var/home/core/zuul-output/0000755000175000017500000000000015111771524014531 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015112024630015463 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000006762121015112024620017673 0ustar rootrootNov 27 06:52:40 crc systemd[1]: Starting Kubernetes Kubelet... Nov 27 06:52:40 crc restorecon[4815]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 27 06:52:40 crc restorecon[4815]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 27 06:52:41 crc restorecon[4815]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 27 06:52:41 crc restorecon[4815]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Nov 27 06:52:42 crc kubenswrapper[4971]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 27 06:52:42 crc kubenswrapper[4971]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Nov 27 06:52:42 crc kubenswrapper[4971]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 27 06:52:42 crc kubenswrapper[4971]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 27 06:52:42 crc kubenswrapper[4971]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Nov 27 06:52:42 crc kubenswrapper[4971]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.283466 4971 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294413 4971 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294454 4971 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294459 4971 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294464 4971 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294469 4971 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294474 4971 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294478 4971 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294482 4971 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294486 4971 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294490 4971 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294495 4971 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294499 4971 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294503 4971 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294507 4971 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294510 4971 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294514 4971 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294519 4971 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294525 4971 feature_gate.go:330] unrecognized feature gate: Example Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294544 4971 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294549 4971 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294553 4971 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294557 4971 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294563 4971 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294568 4971 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294575 4971 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294583 4971 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294589 4971 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294595 4971 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294609 4971 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294614 4971 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294618 4971 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294623 4971 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294627 4971 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294631 4971 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294636 4971 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294643 4971 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294649 4971 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294654 4971 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294658 4971 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294662 4971 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294666 4971 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294670 4971 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294674 4971 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294678 4971 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294682 4971 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294687 4971 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294691 4971 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294695 4971 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294699 4971 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294703 4971 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294707 4971 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294711 4971 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294715 4971 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294719 4971 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294723 4971 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294727 4971 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294731 4971 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294735 4971 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294739 4971 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294743 4971 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294746 4971 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294750 4971 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294754 4971 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294759 4971 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294763 4971 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294768 4971 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294772 4971 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294776 4971 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294780 4971 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294783 4971 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.294788 4971 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.294899 4971 flags.go:64] FLAG: --address="0.0.0.0" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.294911 4971 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.294920 4971 flags.go:64] FLAG: --anonymous-auth="true" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.294927 4971 flags.go:64] FLAG: --application-metrics-count-limit="100" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.294933 4971 flags.go:64] FLAG: --authentication-token-webhook="false" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.294937 4971 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.294943 4971 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.294950 4971 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.294956 4971 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.294962 4971 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.294968 4971 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.294974 4971 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.294979 4971 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.294984 4971 flags.go:64] FLAG: --cgroup-root="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.294988 4971 flags.go:64] FLAG: --cgroups-per-qos="true" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.294993 4971 flags.go:64] FLAG: --client-ca-file="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.294997 4971 flags.go:64] FLAG: --cloud-config="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295002 4971 flags.go:64] FLAG: --cloud-provider="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295006 4971 flags.go:64] FLAG: --cluster-dns="[]" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295011 4971 flags.go:64] FLAG: --cluster-domain="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295015 4971 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295020 4971 flags.go:64] FLAG: --config-dir="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295024 4971 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295029 4971 flags.go:64] FLAG: --container-log-max-files="5" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295036 4971 flags.go:64] FLAG: --container-log-max-size="10Mi" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295041 4971 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295045 4971 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295051 4971 flags.go:64] FLAG: --containerd-namespace="k8s.io" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295056 4971 flags.go:64] FLAG: --contention-profiling="false" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295060 4971 flags.go:64] FLAG: --cpu-cfs-quota="true" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295065 4971 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295069 4971 flags.go:64] FLAG: --cpu-manager-policy="none" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295073 4971 flags.go:64] FLAG: --cpu-manager-policy-options="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295084 4971 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295089 4971 flags.go:64] FLAG: --enable-controller-attach-detach="true" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295093 4971 flags.go:64] FLAG: --enable-debugging-handlers="true" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295097 4971 flags.go:64] FLAG: --enable-load-reader="false" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295101 4971 flags.go:64] FLAG: --enable-server="true" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295105 4971 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295112 4971 flags.go:64] FLAG: --event-burst="100" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295117 4971 flags.go:64] FLAG: --event-qps="50" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295121 4971 flags.go:64] FLAG: --event-storage-age-limit="default=0" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295125 4971 flags.go:64] FLAG: --event-storage-event-limit="default=0" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295129 4971 flags.go:64] FLAG: --eviction-hard="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295135 4971 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295139 4971 flags.go:64] FLAG: --eviction-minimum-reclaim="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295145 4971 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295150 4971 flags.go:64] FLAG: --eviction-soft="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295154 4971 flags.go:64] FLAG: --eviction-soft-grace-period="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295159 4971 flags.go:64] FLAG: --exit-on-lock-contention="false" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295162 4971 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295167 4971 flags.go:64] FLAG: --experimental-mounter-path="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295170 4971 flags.go:64] FLAG: --fail-cgroupv1="false" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295175 4971 flags.go:64] FLAG: --fail-swap-on="true" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295179 4971 flags.go:64] FLAG: --feature-gates="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295184 4971 flags.go:64] FLAG: --file-check-frequency="20s" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295188 4971 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295193 4971 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295197 4971 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295202 4971 flags.go:64] FLAG: --healthz-port="10248" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295206 4971 flags.go:64] FLAG: --help="false" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295210 4971 flags.go:64] FLAG: --hostname-override="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295215 4971 flags.go:64] FLAG: --housekeeping-interval="10s" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295219 4971 flags.go:64] FLAG: --http-check-frequency="20s" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295223 4971 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295228 4971 flags.go:64] FLAG: --image-credential-provider-config="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295232 4971 flags.go:64] FLAG: --image-gc-high-threshold="85" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295236 4971 flags.go:64] FLAG: --image-gc-low-threshold="80" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295241 4971 flags.go:64] FLAG: --image-service-endpoint="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295245 4971 flags.go:64] FLAG: --kernel-memcg-notification="false" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295249 4971 flags.go:64] FLAG: --kube-api-burst="100" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295254 4971 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295259 4971 flags.go:64] FLAG: --kube-api-qps="50" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295263 4971 flags.go:64] FLAG: --kube-reserved="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295267 4971 flags.go:64] FLAG: --kube-reserved-cgroup="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295272 4971 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295276 4971 flags.go:64] FLAG: --kubelet-cgroups="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295281 4971 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295285 4971 flags.go:64] FLAG: --lock-file="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295289 4971 flags.go:64] FLAG: --log-cadvisor-usage="false" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295294 4971 flags.go:64] FLAG: --log-flush-frequency="5s" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295298 4971 flags.go:64] FLAG: --log-json-info-buffer-size="0" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295305 4971 flags.go:64] FLAG: --log-json-split-stream="false" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295310 4971 flags.go:64] FLAG: --log-text-info-buffer-size="0" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295314 4971 flags.go:64] FLAG: --log-text-split-stream="false" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295319 4971 flags.go:64] FLAG: --logging-format="text" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295323 4971 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295327 4971 flags.go:64] FLAG: --make-iptables-util-chains="true" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295332 4971 flags.go:64] FLAG: --manifest-url="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295336 4971 flags.go:64] FLAG: --manifest-url-header="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295343 4971 flags.go:64] FLAG: --max-housekeeping-interval="15s" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295347 4971 flags.go:64] FLAG: --max-open-files="1000000" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295353 4971 flags.go:64] FLAG: --max-pods="110" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295357 4971 flags.go:64] FLAG: --maximum-dead-containers="-1" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295362 4971 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295366 4971 flags.go:64] FLAG: --memory-manager-policy="None" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295371 4971 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295375 4971 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295380 4971 flags.go:64] FLAG: --node-ip="192.168.126.11" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295384 4971 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295397 4971 flags.go:64] FLAG: --node-status-max-images="50" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295402 4971 flags.go:64] FLAG: --node-status-update-frequency="10s" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295406 4971 flags.go:64] FLAG: --oom-score-adj="-999" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295410 4971 flags.go:64] FLAG: --pod-cidr="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295414 4971 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295422 4971 flags.go:64] FLAG: --pod-manifest-path="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295426 4971 flags.go:64] FLAG: --pod-max-pids="-1" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295431 4971 flags.go:64] FLAG: --pods-per-core="0" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295435 4971 flags.go:64] FLAG: --port="10250" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295439 4971 flags.go:64] FLAG: --protect-kernel-defaults="false" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295444 4971 flags.go:64] FLAG: --provider-id="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295449 4971 flags.go:64] FLAG: --qos-reserved="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295454 4971 flags.go:64] FLAG: --read-only-port="10255" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295459 4971 flags.go:64] FLAG: --register-node="true" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295463 4971 flags.go:64] FLAG: --register-schedulable="true" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295467 4971 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295476 4971 flags.go:64] FLAG: --registry-burst="10" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295481 4971 flags.go:64] FLAG: --registry-qps="5" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295485 4971 flags.go:64] FLAG: --reserved-cpus="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295490 4971 flags.go:64] FLAG: --reserved-memory="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295496 4971 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295501 4971 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295505 4971 flags.go:64] FLAG: --rotate-certificates="false" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295510 4971 flags.go:64] FLAG: --rotate-server-certificates="false" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295514 4971 flags.go:64] FLAG: --runonce="false" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295518 4971 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295522 4971 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295527 4971 flags.go:64] FLAG: --seccomp-default="false" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295550 4971 flags.go:64] FLAG: --serialize-image-pulls="true" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295554 4971 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295558 4971 flags.go:64] FLAG: --storage-driver-db="cadvisor" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295563 4971 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295568 4971 flags.go:64] FLAG: --storage-driver-password="root" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295572 4971 flags.go:64] FLAG: --storage-driver-secure="false" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295577 4971 flags.go:64] FLAG: --storage-driver-table="stats" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295581 4971 flags.go:64] FLAG: --storage-driver-user="root" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295585 4971 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295590 4971 flags.go:64] FLAG: --sync-frequency="1m0s" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295594 4971 flags.go:64] FLAG: --system-cgroups="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295598 4971 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295607 4971 flags.go:64] FLAG: --system-reserved-cgroup="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295611 4971 flags.go:64] FLAG: --tls-cert-file="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295616 4971 flags.go:64] FLAG: --tls-cipher-suites="[]" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295622 4971 flags.go:64] FLAG: --tls-min-version="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295631 4971 flags.go:64] FLAG: --tls-private-key-file="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295635 4971 flags.go:64] FLAG: --topology-manager-policy="none" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295641 4971 flags.go:64] FLAG: --topology-manager-policy-options="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295645 4971 flags.go:64] FLAG: --topology-manager-scope="container" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295650 4971 flags.go:64] FLAG: --v="2" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295657 4971 flags.go:64] FLAG: --version="false" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295663 4971 flags.go:64] FLAG: --vmodule="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295670 4971 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.295675 4971 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295790 4971 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295795 4971 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295806 4971 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295811 4971 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295815 4971 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295819 4971 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295823 4971 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295827 4971 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295830 4971 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295834 4971 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295838 4971 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295842 4971 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295846 4971 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295849 4971 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295853 4971 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295857 4971 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295861 4971 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295865 4971 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295868 4971 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295872 4971 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295878 4971 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295883 4971 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295888 4971 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295894 4971 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295898 4971 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295903 4971 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295907 4971 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295911 4971 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295915 4971 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295919 4971 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295923 4971 feature_gate.go:330] unrecognized feature gate: Example Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295927 4971 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295931 4971 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295935 4971 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295940 4971 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295946 4971 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295950 4971 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295954 4971 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295960 4971 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295965 4971 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295969 4971 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295973 4971 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295977 4971 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295981 4971 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295985 4971 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295989 4971 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295994 4971 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.295997 4971 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.296001 4971 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.296006 4971 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.296010 4971 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.296014 4971 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.296018 4971 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.296023 4971 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.296027 4971 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.296032 4971 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.296037 4971 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.296041 4971 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.296045 4971 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.296050 4971 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.296055 4971 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.296060 4971 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.296065 4971 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.296070 4971 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.296074 4971 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.296079 4971 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.296084 4971 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.296089 4971 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.296094 4971 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.296099 4971 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.296105 4971 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.296113 4971 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.308583 4971 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.308635 4971 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308761 4971 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308775 4971 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308782 4971 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308788 4971 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308795 4971 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308801 4971 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308806 4971 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308812 4971 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308818 4971 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308824 4971 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308830 4971 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308835 4971 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308840 4971 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308846 4971 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308852 4971 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308857 4971 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308862 4971 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308869 4971 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308877 4971 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308884 4971 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308889 4971 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308895 4971 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308900 4971 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308906 4971 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308912 4971 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308918 4971 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308923 4971 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308929 4971 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308934 4971 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308939 4971 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308946 4971 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308952 4971 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308957 4971 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308962 4971 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308969 4971 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308975 4971 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308980 4971 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308986 4971 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308991 4971 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.308997 4971 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309002 4971 feature_gate.go:330] unrecognized feature gate: Example Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309008 4971 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309013 4971 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309018 4971 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309023 4971 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309028 4971 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309034 4971 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309039 4971 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309044 4971 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309051 4971 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309057 4971 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309064 4971 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309069 4971 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309074 4971 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309079 4971 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309084 4971 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309093 4971 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309098 4971 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309104 4971 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309109 4971 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309114 4971 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309119 4971 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309127 4971 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309134 4971 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309141 4971 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309147 4971 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309153 4971 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309159 4971 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309164 4971 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309172 4971 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309188 4971 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.309199 4971 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309437 4971 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309447 4971 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309454 4971 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309460 4971 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309466 4971 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309472 4971 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309478 4971 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309484 4971 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309489 4971 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309495 4971 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309501 4971 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309506 4971 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309511 4971 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309517 4971 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309522 4971 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309528 4971 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309550 4971 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309555 4971 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309561 4971 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309568 4971 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309575 4971 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309580 4971 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309586 4971 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309592 4971 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309597 4971 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309603 4971 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309609 4971 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309614 4971 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309619 4971 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309626 4971 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309634 4971 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309641 4971 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309649 4971 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309657 4971 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309676 4971 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309683 4971 feature_gate.go:330] unrecognized feature gate: Example Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309688 4971 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309694 4971 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309700 4971 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309705 4971 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309712 4971 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309718 4971 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309723 4971 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309731 4971 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309737 4971 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309743 4971 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309749 4971 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309754 4971 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309759 4971 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309764 4971 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309769 4971 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309775 4971 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309781 4971 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309787 4971 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309793 4971 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309799 4971 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309805 4971 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309811 4971 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309816 4971 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309821 4971 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309827 4971 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309832 4971 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309838 4971 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309843 4971 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309865 4971 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309872 4971 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309879 4971 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309885 4971 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309891 4971 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309897 4971 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.309911 4971 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.309919 4971 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.310271 4971 server.go:940] "Client rotation is on, will bootstrap in background" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.316138 4971 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.316268 4971 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.318308 4971 server.go:997] "Starting client certificate rotation" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.318352 4971 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.318721 4971 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-24 20:39:52.847575792 +0000 UTC Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.318849 4971 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 661h47m10.528729637s for next certificate rotation Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.347496 4971 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.350846 4971 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.372051 4971 log.go:25] "Validated CRI v1 runtime API" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.412140 4971 log.go:25] "Validated CRI v1 image API" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.415202 4971 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.421080 4971 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-11-27-06-42-48-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.421126 4971 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:41 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:42 fsType:tmpfs blockSize:0}] Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.445396 4971 manager.go:217] Machine: {Timestamp:2025-11-27 06:52:42.440386142 +0000 UTC m=+0.632430140 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654124544 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:d12f7ae2-c7c1-475e-a2cb-1f1e626e5071 BootID:4602e4e9-64d4-4227-8212-1a84a264f109 Filesystems:[{Device:/run/user/1000 DeviceMajor:0 DeviceMinor:41 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:42 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:4b:39:d4 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:4b:39:d4 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:e9:55:a6 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:e5:72:3d Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:f1:de:30 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:1c:90:90 Speed:-1 Mtu:1496} {Name:ens7.23 MacAddress:52:54:00:94:47:da Speed:-1 Mtu:1496} {Name:ens7.44 MacAddress:52:54:00:ca:34:f1 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:0a:b9:cc:b6:a8:3b Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:9e:26:11:a6:38:47 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654124544 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.445864 4971 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.446141 4971 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.447887 4971 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.448286 4971 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.448352 4971 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.448768 4971 topology_manager.go:138] "Creating topology manager with none policy" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.448791 4971 container_manager_linux.go:303] "Creating device plugin manager" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.449422 4971 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.449493 4971 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.449928 4971 state_mem.go:36] "Initialized new in-memory state store" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.450122 4971 server.go:1245] "Using root directory" path="/var/lib/kubelet" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.457749 4971 kubelet.go:418] "Attempting to sync node with API server" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.457786 4971 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.457831 4971 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.457855 4971 kubelet.go:324] "Adding apiserver pod source" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.457877 4971 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.466085 4971 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.466700 4971 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.50:6443: connect: connection refused Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.466729 4971 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.50:6443: connect: connection refused Nov 27 06:52:42 crc kubenswrapper[4971]: E1127 06:52:42.466851 4971 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.50:6443: connect: connection refused" logger="UnhandledError" Nov 27 06:52:42 crc kubenswrapper[4971]: E1127 06:52:42.466924 4971 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.50:6443: connect: connection refused" logger="UnhandledError" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.467629 4971 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.470262 4971 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.472444 4971 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.472639 4971 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.472755 4971 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.472878 4971 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.472995 4971 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.473110 4971 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.473213 4971 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.473324 4971 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.473428 4971 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.473563 4971 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.473691 4971 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.473798 4971 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.474960 4971 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.475843 4971 server.go:1280] "Started kubelet" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.476791 4971 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.50:6443: connect: connection refused Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.478129 4971 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Nov 27 06:52:42 crc systemd[1]: Started Kubernetes Kubelet. Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.478931 4971 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.480443 4971 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.482425 4971 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.485866 4971 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.487662 4971 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-29 16:14:39.728786667 +0000 UTC Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.487739 4971 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 777h21m57.241052231s for next certificate rotation Nov 27 06:52:42 crc kubenswrapper[4971]: E1127 06:52:42.487745 4971 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.489596 4971 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Nov 27 06:52:42 crc kubenswrapper[4971]: E1127 06:52:42.489498 4971 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.50:6443: connect: connection refused" interval="200ms" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.489762 4971 server.go:460] "Adding debug handlers to kubelet server" Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.490857 4971 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.50:6443: connect: connection refused Nov 27 06:52:42 crc kubenswrapper[4971]: E1127 06:52:42.490930 4971 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.50:6443: connect: connection refused" logger="UnhandledError" Nov 27 06:52:42 crc kubenswrapper[4971]: E1127 06:52:42.489744 4971 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.50:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187bca8786bfeb38 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-11-27 06:52:42.4758014 +0000 UTC m=+0.667845358,LastTimestamp:2025-11-27 06:52:42.4758014 +0000 UTC m=+0.667845358,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.491235 4971 factory.go:55] Registering systemd factory Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.491257 4971 factory.go:221] Registration of the systemd container factory successfully Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.491670 4971 factory.go:153] Registering CRI-O factory Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.491694 4971 factory.go:221] Registration of the crio container factory successfully Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.491754 4971 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.491757 4971 volume_manager.go:287] "The desired_state_of_world populator starts" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.491777 4971 factory.go:103] Registering Raw factory Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.491779 4971 volume_manager.go:289] "Starting Kubelet Volume Manager" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.491793 4971 manager.go:1196] Started watching for new ooms in manager Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.497270 4971 manager.go:319] Starting recovery of all containers Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.503049 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.503193 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.503267 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.503335 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.503392 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.503451 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.503525 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.503604 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.503678 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.503754 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.503816 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.503879 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.503945 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.504018 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.504076 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.504140 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.504201 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.504271 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.504337 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.504392 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.504448 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.504502 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.504610 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.504665 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.504729 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.504798 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.504902 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.504968 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.505023 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.505084 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.505139 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.505192 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.505245 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.505300 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.505378 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.505433 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.505490 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.505559 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.505623 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.505693 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.506054 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.506118 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.506173 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.506227 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.506315 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.506381 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.506442 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.506497 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.506568 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.506648 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.506721 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.506778 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.506844 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.506906 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.506961 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.507022 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.507081 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.507162 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.507232 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.507292 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.507353 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.507418 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.507481 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.507556 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.507615 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.507672 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.507737 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.507793 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.507855 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.507914 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.507972 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.508030 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.508091 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.508152 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.508221 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.508280 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.508335 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.508401 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.508461 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.508517 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.508591 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.508647 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.508708 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.508765 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.508822 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.508883 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.508938 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.508998 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.509055 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.509119 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.509174 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.509233 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.509296 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.509363 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.509419 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.509472 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.509527 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.509699 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.509774 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.509837 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.509895 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.509952 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.510009 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.510079 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.510144 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.510204 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.510258 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.510342 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.510404 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.510467 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.510528 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.510604 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.510660 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.510717 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.510786 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.510881 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.510955 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.511019 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.511075 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.511136 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.511193 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.511248 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.511309 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.511366 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.511422 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.511482 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.511555 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.511621 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.511681 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.511737 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.511794 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.511849 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.511905 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.511961 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.512015 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.512077 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.512135 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.512196 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.512254 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.512305 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.512359 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.512416 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.512470 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.512523 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.512594 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.512650 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.512712 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.512767 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.512826 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.512885 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.512939 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.513001 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.513058 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.513115 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.513171 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.513227 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.513290 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.513344 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.513403 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.513461 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.513516 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.513609 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.513674 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.513731 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.513784 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.513844 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.513901 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.513960 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.514016 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.514074 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.514129 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.514190 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.514265 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.515303 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.515965 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.516036 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.516091 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.516147 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.516217 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.516297 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.516352 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.516411 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.516469 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.516528 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.516601 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.516668 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.516737 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.516809 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.516885 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.516954 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.517032 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.517112 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.517191 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.517265 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.518975 4971 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.519071 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.519132 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.519195 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.519252 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.519315 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.519380 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.519446 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.519519 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.519602 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.519673 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.519736 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.519794 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.519850 4971 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.519909 4971 reconstruct.go:97] "Volume reconstruction finished" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.519960 4971 reconciler.go:26] "Reconciler: start to sync state" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.528947 4971 manager.go:324] Recovery completed Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.539251 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.542332 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.542381 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.542395 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.547012 4971 cpu_manager.go:225] "Starting CPU manager" policy="none" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.547039 4971 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.547076 4971 state_mem.go:36] "Initialized new in-memory state store" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.547007 4971 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.548951 4971 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.549004 4971 status_manager.go:217] "Starting to sync pod status with apiserver" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.549041 4971 kubelet.go:2335] "Starting kubelet main sync loop" Nov 27 06:52:42 crc kubenswrapper[4971]: E1127 06:52:42.549223 4971 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Nov 27 06:52:42 crc kubenswrapper[4971]: W1127 06:52:42.553638 4971 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.50:6443: connect: connection refused Nov 27 06:52:42 crc kubenswrapper[4971]: E1127 06:52:42.553759 4971 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.50:6443: connect: connection refused" logger="UnhandledError" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.564891 4971 policy_none.go:49] "None policy: Start" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.565903 4971 memory_manager.go:170] "Starting memorymanager" policy="None" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.565938 4971 state_mem.go:35] "Initializing new in-memory state store" Nov 27 06:52:42 crc kubenswrapper[4971]: E1127 06:52:42.589023 4971 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.623444 4971 manager.go:334] "Starting Device Plugin manager" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.623774 4971 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.623812 4971 server.go:79] "Starting device plugin registration server" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.624763 4971 eviction_manager.go:189] "Eviction manager: starting control loop" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.624824 4971 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.625870 4971 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.626041 4971 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.626061 4971 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Nov 27 06:52:42 crc kubenswrapper[4971]: E1127 06:52:42.634157 4971 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.649358 4971 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc"] Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.649456 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.651005 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.651041 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.651050 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.651175 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.651770 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.651804 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.651815 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.652718 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.652785 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.652798 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.652813 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.652737 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.653663 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.653700 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.653716 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.653937 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.653957 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.653968 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.654046 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.654073 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.654086 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.654094 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.654226 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.654255 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.655281 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.655307 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.655319 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.655321 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.655336 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.655346 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.655462 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.655491 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.655512 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.659752 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.659775 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.659784 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.659923 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.659946 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.660375 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.660399 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.660409 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.661987 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.662019 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.662030 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:42 crc kubenswrapper[4971]: E1127 06:52:42.690586 4971 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.50:6443: connect: connection refused" interval="400ms" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.722436 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.722484 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.722520 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.722560 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.722586 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.722653 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.722777 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.722845 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.722876 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.722911 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.722955 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.722985 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.723010 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.723031 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.723064 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.726567 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.727634 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.727666 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.727678 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.727703 4971 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 27 06:52:42 crc kubenswrapper[4971]: E1127 06:52:42.728076 4971 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.50:6443: connect: connection refused" node="crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.824067 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.824130 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.824162 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.824187 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.824211 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.824235 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.824266 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.824288 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.824310 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.824306 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.824304 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.824352 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.824385 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.824331 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.824365 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.824388 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.824388 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.824412 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.824473 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.824522 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.824526 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.824551 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.824602 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.824669 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.824718 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.824763 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.824775 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.824808 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.824806 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.824762 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.929080 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.931651 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.931720 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.931740 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.931784 4971 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 27 06:52:42 crc kubenswrapper[4971]: E1127 06:52:42.932496 4971 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.50:6443: connect: connection refused" node="crc" Nov 27 06:52:42 crc kubenswrapper[4971]: I1127 06:52:42.996472 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 27 06:52:43 crc kubenswrapper[4971]: I1127 06:52:43.001590 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 27 06:52:43 crc kubenswrapper[4971]: I1127 06:52:43.017032 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Nov 27 06:52:43 crc kubenswrapper[4971]: I1127 06:52:43.032356 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:52:43 crc kubenswrapper[4971]: I1127 06:52:43.040380 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 27 06:52:43 crc kubenswrapper[4971]: W1127 06:52:43.045158 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-8e875f05e23ca34c35d53b6b7413851fd1501176c6cd44653460aff3513af0ed WatchSource:0}: Error finding container 8e875f05e23ca34c35d53b6b7413851fd1501176c6cd44653460aff3513af0ed: Status 404 returned error can't find the container with id 8e875f05e23ca34c35d53b6b7413851fd1501176c6cd44653460aff3513af0ed Nov 27 06:52:43 crc kubenswrapper[4971]: W1127 06:52:43.049980 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-fc7cf0fdf853746b2106c3db18a88a70974a8cabc12502744a45780dc24d3d3a WatchSource:0}: Error finding container fc7cf0fdf853746b2106c3db18a88a70974a8cabc12502744a45780dc24d3d3a: Status 404 returned error can't find the container with id fc7cf0fdf853746b2106c3db18a88a70974a8cabc12502744a45780dc24d3d3a Nov 27 06:52:43 crc kubenswrapper[4971]: W1127 06:52:43.051373 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-c0d5048d904a29a56ba927d3f94cffc2d6ce41ee9a1787ffa7887072de41fbe4 WatchSource:0}: Error finding container c0d5048d904a29a56ba927d3f94cffc2d6ce41ee9a1787ffa7887072de41fbe4: Status 404 returned error can't find the container with id c0d5048d904a29a56ba927d3f94cffc2d6ce41ee9a1787ffa7887072de41fbe4 Nov 27 06:52:43 crc kubenswrapper[4971]: W1127 06:52:43.059105 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-c9e7a0f95463a7cd21337a0a01e5067b307bc65584b42271e234986a6fe4de46 WatchSource:0}: Error finding container c9e7a0f95463a7cd21337a0a01e5067b307bc65584b42271e234986a6fe4de46: Status 404 returned error can't find the container with id c9e7a0f95463a7cd21337a0a01e5067b307bc65584b42271e234986a6fe4de46 Nov 27 06:52:43 crc kubenswrapper[4971]: W1127 06:52:43.061717 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-1f0684c8e7195b43535769807499e5456ad07b24af4c61539479f92b77c27bbe WatchSource:0}: Error finding container 1f0684c8e7195b43535769807499e5456ad07b24af4c61539479f92b77c27bbe: Status 404 returned error can't find the container with id 1f0684c8e7195b43535769807499e5456ad07b24af4c61539479f92b77c27bbe Nov 27 06:52:43 crc kubenswrapper[4971]: E1127 06:52:43.091811 4971 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.50:6443: connect: connection refused" interval="800ms" Nov 27 06:52:43 crc kubenswrapper[4971]: W1127 06:52:43.323829 4971 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.50:6443: connect: connection refused Nov 27 06:52:43 crc kubenswrapper[4971]: E1127 06:52:43.323956 4971 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.50:6443: connect: connection refused" logger="UnhandledError" Nov 27 06:52:43 crc kubenswrapper[4971]: I1127 06:52:43.332665 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:43 crc kubenswrapper[4971]: I1127 06:52:43.334185 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:43 crc kubenswrapper[4971]: I1127 06:52:43.334228 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:43 crc kubenswrapper[4971]: I1127 06:52:43.334241 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:43 crc kubenswrapper[4971]: I1127 06:52:43.334272 4971 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 27 06:52:43 crc kubenswrapper[4971]: E1127 06:52:43.334658 4971 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.50:6443: connect: connection refused" node="crc" Nov 27 06:52:43 crc kubenswrapper[4971]: W1127 06:52:43.339048 4971 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.50:6443: connect: connection refused Nov 27 06:52:43 crc kubenswrapper[4971]: E1127 06:52:43.339123 4971 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.50:6443: connect: connection refused" logger="UnhandledError" Nov 27 06:52:43 crc kubenswrapper[4971]: W1127 06:52:43.398782 4971 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.50:6443: connect: connection refused Nov 27 06:52:43 crc kubenswrapper[4971]: E1127 06:52:43.398896 4971 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.50:6443: connect: connection refused" logger="UnhandledError" Nov 27 06:52:43 crc kubenswrapper[4971]: W1127 06:52:43.427942 4971 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.50:6443: connect: connection refused Nov 27 06:52:43 crc kubenswrapper[4971]: E1127 06:52:43.428031 4971 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.50:6443: connect: connection refused" logger="UnhandledError" Nov 27 06:52:43 crc kubenswrapper[4971]: I1127 06:52:43.478490 4971 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.50:6443: connect: connection refused Nov 27 06:52:43 crc kubenswrapper[4971]: I1127 06:52:43.554398 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"1f0684c8e7195b43535769807499e5456ad07b24af4c61539479f92b77c27bbe"} Nov 27 06:52:43 crc kubenswrapper[4971]: I1127 06:52:43.556019 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c9e7a0f95463a7cd21337a0a01e5067b307bc65584b42271e234986a6fe4de46"} Nov 27 06:52:43 crc kubenswrapper[4971]: I1127 06:52:43.556789 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"c0d5048d904a29a56ba927d3f94cffc2d6ce41ee9a1787ffa7887072de41fbe4"} Nov 27 06:52:43 crc kubenswrapper[4971]: I1127 06:52:43.557417 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"8e875f05e23ca34c35d53b6b7413851fd1501176c6cd44653460aff3513af0ed"} Nov 27 06:52:43 crc kubenswrapper[4971]: I1127 06:52:43.558199 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"fc7cf0fdf853746b2106c3db18a88a70974a8cabc12502744a45780dc24d3d3a"} Nov 27 06:52:43 crc kubenswrapper[4971]: E1127 06:52:43.893127 4971 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.50:6443: connect: connection refused" interval="1.6s" Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.135561 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.137484 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.138041 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.138064 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.138107 4971 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 27 06:52:44 crc kubenswrapper[4971]: E1127 06:52:44.139014 4971 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.50:6443: connect: connection refused" node="crc" Nov 27 06:52:44 crc kubenswrapper[4971]: E1127 06:52:44.168523 4971 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.50:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187bca8786bfeb38 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-11-27 06:52:42.4758014 +0000 UTC m=+0.667845358,LastTimestamp:2025-11-27 06:52:42.4758014 +0000 UTC m=+0.667845358,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.478045 4971 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.50:6443: connect: connection refused Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.563848 4971 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="b47e46ba3596addbd137a1dfaa457e183bd7543340d5a0a85b61ee97f7c1a6cb" exitCode=0 Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.563919 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"b47e46ba3596addbd137a1dfaa457e183bd7543340d5a0a85b61ee97f7c1a6cb"} Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.563961 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.565345 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.565403 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.565426 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.568253 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23"} Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.568296 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.568308 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04"} Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.568335 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517"} Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.568358 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849"} Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.569349 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.569407 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.569437 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.570382 4971 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435" exitCode=0 Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.570440 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435"} Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.570578 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.571686 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.571704 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.571711 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.572341 4971 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce" exitCode=0 Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.572453 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce"} Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.572521 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.573628 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.573657 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.573667 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.574230 4971 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="5822703375f22f8adb588a790d4f00e78b8a2cd8fe3f902fe4732a133511d63a" exitCode=0 Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.574282 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.574285 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"5822703375f22f8adb588a790d4f00e78b8a2cd8fe3f902fe4732a133511d63a"} Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.575336 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.575374 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.575388 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.575780 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.576452 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.576476 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:44 crc kubenswrapper[4971]: I1127 06:52:44.576486 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:45 crc kubenswrapper[4971]: I1127 06:52:45.479240 4971 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.50:6443: connect: connection refused Nov 27 06:52:45 crc kubenswrapper[4971]: E1127 06:52:45.494060 4971 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.50:6443: connect: connection refused" interval="3.2s" Nov 27 06:52:45 crc kubenswrapper[4971]: I1127 06:52:45.589053 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c"} Nov 27 06:52:45 crc kubenswrapper[4971]: I1127 06:52:45.589112 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4"} Nov 27 06:52:45 crc kubenswrapper[4971]: I1127 06:52:45.589122 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1"} Nov 27 06:52:45 crc kubenswrapper[4971]: I1127 06:52:45.589132 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3"} Nov 27 06:52:45 crc kubenswrapper[4971]: I1127 06:52:45.593660 4971 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7" exitCode=0 Nov 27 06:52:45 crc kubenswrapper[4971]: I1127 06:52:45.593719 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7"} Nov 27 06:52:45 crc kubenswrapper[4971]: I1127 06:52:45.593787 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:45 crc kubenswrapper[4971]: I1127 06:52:45.595089 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:45 crc kubenswrapper[4971]: I1127 06:52:45.595124 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:45 crc kubenswrapper[4971]: I1127 06:52:45.595136 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:45 crc kubenswrapper[4971]: I1127 06:52:45.603487 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"f0636fb6663c0b29767d749d85154ba8c5707c5022f211d815895bfc844d6d0d"} Nov 27 06:52:45 crc kubenswrapper[4971]: I1127 06:52:45.603612 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"be990eb8e6023cc976afaf7edaba3eed7bf119dd4b31378671564feaf9cdd5c1"} Nov 27 06:52:45 crc kubenswrapper[4971]: I1127 06:52:45.603634 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"233f958888e7af4cbaf18ca0b743ae116a205a5d250fcb5d8d25c6c33cc6edfa"} Nov 27 06:52:45 crc kubenswrapper[4971]: I1127 06:52:45.603667 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:45 crc kubenswrapper[4971]: I1127 06:52:45.605469 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:45 crc kubenswrapper[4971]: I1127 06:52:45.605523 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:45 crc kubenswrapper[4971]: I1127 06:52:45.605554 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:45 crc kubenswrapper[4971]: I1127 06:52:45.607512 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:45 crc kubenswrapper[4971]: I1127 06:52:45.608342 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:45 crc kubenswrapper[4971]: I1127 06:52:45.608366 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"7a1ad326792f4efe7ca0bbe3cb729cdb3b4047e1d142b95f425b47fd07250a7a"} Nov 27 06:52:45 crc kubenswrapper[4971]: I1127 06:52:45.608782 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:45 crc kubenswrapper[4971]: I1127 06:52:45.608808 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:45 crc kubenswrapper[4971]: I1127 06:52:45.608819 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:45 crc kubenswrapper[4971]: I1127 06:52:45.609108 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:45 crc kubenswrapper[4971]: I1127 06:52:45.609129 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:45 crc kubenswrapper[4971]: I1127 06:52:45.609137 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:45 crc kubenswrapper[4971]: I1127 06:52:45.739447 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:45 crc kubenswrapper[4971]: I1127 06:52:45.741100 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:45 crc kubenswrapper[4971]: I1127 06:52:45.741152 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:45 crc kubenswrapper[4971]: I1127 06:52:45.741168 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:45 crc kubenswrapper[4971]: I1127 06:52:45.741207 4971 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 27 06:52:45 crc kubenswrapper[4971]: E1127 06:52:45.741838 4971 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.50:6443: connect: connection refused" node="crc" Nov 27 06:52:45 crc kubenswrapper[4971]: W1127 06:52:45.824499 4971 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.50:6443: connect: connection refused Nov 27 06:52:45 crc kubenswrapper[4971]: E1127 06:52:45.824644 4971 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.50:6443: connect: connection refused" logger="UnhandledError" Nov 27 06:52:46 crc kubenswrapper[4971]: I1127 06:52:46.615232 4971 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18" exitCode=0 Nov 27 06:52:46 crc kubenswrapper[4971]: I1127 06:52:46.615348 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18"} Nov 27 06:52:46 crc kubenswrapper[4971]: I1127 06:52:46.615794 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:46 crc kubenswrapper[4971]: I1127 06:52:46.618445 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:46 crc kubenswrapper[4971]: I1127 06:52:46.618507 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:46 crc kubenswrapper[4971]: I1127 06:52:46.618522 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:46 crc kubenswrapper[4971]: I1127 06:52:46.619847 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:46 crc kubenswrapper[4971]: I1127 06:52:46.619867 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54"} Nov 27 06:52:46 crc kubenswrapper[4971]: I1127 06:52:46.619964 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:46 crc kubenswrapper[4971]: I1127 06:52:46.620522 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:46 crc kubenswrapper[4971]: I1127 06:52:46.620772 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 27 06:52:46 crc kubenswrapper[4971]: I1127 06:52:46.621094 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:46 crc kubenswrapper[4971]: I1127 06:52:46.621163 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:46 crc kubenswrapper[4971]: I1127 06:52:46.621191 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:46 crc kubenswrapper[4971]: I1127 06:52:46.621116 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:46 crc kubenswrapper[4971]: I1127 06:52:46.621260 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:46 crc kubenswrapper[4971]: I1127 06:52:46.621279 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:46 crc kubenswrapper[4971]: I1127 06:52:46.621482 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:46 crc kubenswrapper[4971]: I1127 06:52:46.621520 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:46 crc kubenswrapper[4971]: I1127 06:52:46.621551 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:46 crc kubenswrapper[4971]: I1127 06:52:46.701521 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 27 06:52:46 crc kubenswrapper[4971]: I1127 06:52:46.703267 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:46 crc kubenswrapper[4971]: I1127 06:52:46.705099 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:46 crc kubenswrapper[4971]: I1127 06:52:46.705174 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:46 crc kubenswrapper[4971]: I1127 06:52:46.705201 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:46 crc kubenswrapper[4971]: I1127 06:52:46.708613 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 27 06:52:47 crc kubenswrapper[4971]: I1127 06:52:47.296794 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:52:47 crc kubenswrapper[4971]: I1127 06:52:47.631162 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"4dda9554712ec04cb4c9616ee62cb0e5e2cf106ba9c63ab411fb07fb892a1897"} Nov 27 06:52:47 crc kubenswrapper[4971]: I1127 06:52:47.631227 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"8ffb860fe1ce8cfa20be499da1932b4c5286cfdcc70418e7cdf5785cf746254c"} Nov 27 06:52:47 crc kubenswrapper[4971]: I1127 06:52:47.631244 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:47 crc kubenswrapper[4971]: I1127 06:52:47.631261 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:47 crc kubenswrapper[4971]: I1127 06:52:47.631408 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:47 crc kubenswrapper[4971]: I1127 06:52:47.631245 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b0568171255f8d7e993c025aece8b10eb0303efde25bc85b46f3b4b2c014ec66"} Nov 27 06:52:47 crc kubenswrapper[4971]: I1127 06:52:47.631920 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"ae6e3b39e688e045045a2a5b02dd0c3c7e7567657e0a9dff4d16f4f818e9e984"} Nov 27 06:52:47 crc kubenswrapper[4971]: I1127 06:52:47.631945 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"c09241254c68f6a129398092fe87bda15b581be840a5cf4034551b5f881042b7"} Nov 27 06:52:47 crc kubenswrapper[4971]: I1127 06:52:47.631968 4971 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 27 06:52:47 crc kubenswrapper[4971]: I1127 06:52:47.631988 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:47 crc kubenswrapper[4971]: I1127 06:52:47.632325 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:47 crc kubenswrapper[4971]: I1127 06:52:47.632346 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:47 crc kubenswrapper[4971]: I1127 06:52:47.632357 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:47 crc kubenswrapper[4971]: I1127 06:52:47.632381 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:47 crc kubenswrapper[4971]: I1127 06:52:47.632396 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:47 crc kubenswrapper[4971]: I1127 06:52:47.632406 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:47 crc kubenswrapper[4971]: I1127 06:52:47.633384 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:47 crc kubenswrapper[4971]: I1127 06:52:47.633431 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:47 crc kubenswrapper[4971]: I1127 06:52:47.633445 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:47 crc kubenswrapper[4971]: I1127 06:52:47.633463 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:47 crc kubenswrapper[4971]: I1127 06:52:47.633499 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:47 crc kubenswrapper[4971]: I1127 06:52:47.633509 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:47 crc kubenswrapper[4971]: I1127 06:52:47.769012 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:52:48 crc kubenswrapper[4971]: I1127 06:52:48.634951 4971 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 27 06:52:48 crc kubenswrapper[4971]: I1127 06:52:48.634988 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:48 crc kubenswrapper[4971]: I1127 06:52:48.635028 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:48 crc kubenswrapper[4971]: I1127 06:52:48.637032 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:48 crc kubenswrapper[4971]: I1127 06:52:48.637103 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:48 crc kubenswrapper[4971]: I1127 06:52:48.637131 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:48 crc kubenswrapper[4971]: I1127 06:52:48.637759 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:48 crc kubenswrapper[4971]: I1127 06:52:48.637992 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:48 crc kubenswrapper[4971]: I1127 06:52:48.638164 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:48 crc kubenswrapper[4971]: I1127 06:52:48.942778 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:48 crc kubenswrapper[4971]: I1127 06:52:48.944190 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:48 crc kubenswrapper[4971]: I1127 06:52:48.944230 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:48 crc kubenswrapper[4971]: I1127 06:52:48.944244 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:48 crc kubenswrapper[4971]: I1127 06:52:48.944281 4971 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 27 06:52:49 crc kubenswrapper[4971]: I1127 06:52:49.573488 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 27 06:52:49 crc kubenswrapper[4971]: I1127 06:52:49.573712 4971 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 27 06:52:49 crc kubenswrapper[4971]: I1127 06:52:49.573768 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:49 crc kubenswrapper[4971]: I1127 06:52:49.575490 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:49 crc kubenswrapper[4971]: I1127 06:52:49.575635 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:49 crc kubenswrapper[4971]: I1127 06:52:49.575662 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:49 crc kubenswrapper[4971]: I1127 06:52:49.637866 4971 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 27 06:52:49 crc kubenswrapper[4971]: I1127 06:52:49.638036 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:49 crc kubenswrapper[4971]: I1127 06:52:49.639664 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:49 crc kubenswrapper[4971]: I1127 06:52:49.639716 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:49 crc kubenswrapper[4971]: I1127 06:52:49.639737 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:50 crc kubenswrapper[4971]: I1127 06:52:50.256687 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:52:50 crc kubenswrapper[4971]: I1127 06:52:50.640723 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:50 crc kubenswrapper[4971]: I1127 06:52:50.641961 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:50 crc kubenswrapper[4971]: I1127 06:52:50.642008 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:50 crc kubenswrapper[4971]: I1127 06:52:50.642020 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:52 crc kubenswrapper[4971]: I1127 06:52:52.069676 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 27 06:52:52 crc kubenswrapper[4971]: I1127 06:52:52.069895 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:52 crc kubenswrapper[4971]: I1127 06:52:52.070960 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:52 crc kubenswrapper[4971]: I1127 06:52:52.070988 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:52 crc kubenswrapper[4971]: I1127 06:52:52.071000 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:52 crc kubenswrapper[4971]: I1127 06:52:52.344351 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Nov 27 06:52:52 crc kubenswrapper[4971]: I1127 06:52:52.344612 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:52 crc kubenswrapper[4971]: I1127 06:52:52.345851 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:52 crc kubenswrapper[4971]: I1127 06:52:52.345888 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:52 crc kubenswrapper[4971]: I1127 06:52:52.345900 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:52 crc kubenswrapper[4971]: E1127 06:52:52.634315 4971 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Nov 27 06:52:53 crc kubenswrapper[4971]: I1127 06:52:53.089818 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 27 06:52:53 crc kubenswrapper[4971]: I1127 06:52:53.090163 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:53 crc kubenswrapper[4971]: I1127 06:52:53.091915 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:53 crc kubenswrapper[4971]: I1127 06:52:53.092136 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:53 crc kubenswrapper[4971]: I1127 06:52:53.092275 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:53 crc kubenswrapper[4971]: I1127 06:52:53.095044 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 27 06:52:53 crc kubenswrapper[4971]: I1127 06:52:53.651148 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:53 crc kubenswrapper[4971]: I1127 06:52:53.652437 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:53 crc kubenswrapper[4971]: I1127 06:52:53.652504 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:53 crc kubenswrapper[4971]: I1127 06:52:53.652522 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:56 crc kubenswrapper[4971]: W1127 06:52:56.084820 4971 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Nov 27 06:52:56 crc kubenswrapper[4971]: I1127 06:52:56.085701 4971 trace.go:236] Trace[1946087851]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (27-Nov-2025 06:52:46.083) (total time: 10002ms): Nov 27 06:52:56 crc kubenswrapper[4971]: Trace[1946087851]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (06:52:56.084) Nov 27 06:52:56 crc kubenswrapper[4971]: Trace[1946087851]: [10.002558809s] [10.002558809s] END Nov 27 06:52:56 crc kubenswrapper[4971]: E1127 06:52:56.085775 4971 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Nov 27 06:52:56 crc kubenswrapper[4971]: I1127 06:52:56.090284 4971 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Nov 27 06:52:56 crc kubenswrapper[4971]: I1127 06:52:56.090353 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Nov 27 06:52:56 crc kubenswrapper[4971]: W1127 06:52:56.207922 4971 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout Nov 27 06:52:56 crc kubenswrapper[4971]: I1127 06:52:56.208044 4971 trace.go:236] Trace[1713278683]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (27-Nov-2025 06:52:46.205) (total time: 10002ms): Nov 27 06:52:56 crc kubenswrapper[4971]: Trace[1713278683]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (06:52:56.207) Nov 27 06:52:56 crc kubenswrapper[4971]: Trace[1713278683]: [10.002056677s] [10.002056677s] END Nov 27 06:52:56 crc kubenswrapper[4971]: E1127 06:52:56.208076 4971 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Nov 27 06:52:56 crc kubenswrapper[4971]: I1127 06:52:56.318113 4971 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]log ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]etcd ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/openshift.io-oauth-apiserver-reachable ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/start-apiserver-admission-initializer ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/openshift.io-api-request-count-filter ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/openshift.io-startkubeinformers ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/openshift.io-openshift-apiserver-reachable ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/generic-apiserver-start-informers ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/priority-and-fairness-config-consumer ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/priority-and-fairness-filter ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/storage-object-count-tracker-hook ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/start-apiextensions-informers ok Nov 27 06:52:56 crc kubenswrapper[4971]: [-]poststarthook/start-apiextensions-controllers failed: reason withheld Nov 27 06:52:56 crc kubenswrapper[4971]: [-]poststarthook/crd-informer-synced failed: reason withheld Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/start-system-namespaces-controller ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/start-cluster-authentication-info-controller ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/start-kube-apiserver-identity-lease-controller ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/start-legacy-token-tracking-controller ok Nov 27 06:52:56 crc kubenswrapper[4971]: [-]poststarthook/start-service-ip-repair-controllers failed: reason withheld Nov 27 06:52:56 crc kubenswrapper[4971]: [-]poststarthook/rbac/bootstrap-roles failed: reason withheld Nov 27 06:52:56 crc kubenswrapper[4971]: [-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld Nov 27 06:52:56 crc kubenswrapper[4971]: [-]poststarthook/priority-and-fairness-config-producer failed: reason withheld Nov 27 06:52:56 crc kubenswrapper[4971]: [-]poststarthook/bootstrap-controller failed: reason withheld Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/aggregator-reload-proxy-client-cert ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/start-kube-aggregator-informers ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/apiservice-status-local-available-controller ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/apiservice-status-remote-available-controller ok Nov 27 06:52:56 crc kubenswrapper[4971]: [-]poststarthook/apiservice-registration-controller failed: reason withheld Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/apiservice-wait-for-first-sync ok Nov 27 06:52:56 crc kubenswrapper[4971]: [-]poststarthook/apiservice-discovery-controller failed: reason withheld Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/kube-apiserver-autoregistration ok Nov 27 06:52:56 crc kubenswrapper[4971]: [-]autoregister-completion failed: reason withheld Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/apiservice-openapi-controller ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/apiservice-openapiv3-controller ok Nov 27 06:52:56 crc kubenswrapper[4971]: livez check failed Nov 27 06:52:56 crc kubenswrapper[4971]: I1127 06:52:56.318183 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 27 06:52:56 crc kubenswrapper[4971]: I1127 06:52:56.322613 4971 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]log ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]etcd ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/openshift.io-oauth-apiserver-reachable ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/start-apiserver-admission-initializer ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/openshift.io-api-request-count-filter ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/openshift.io-startkubeinformers ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/openshift.io-openshift-apiserver-reachable ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/generic-apiserver-start-informers ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/priority-and-fairness-config-consumer ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/priority-and-fairness-filter ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/storage-object-count-tracker-hook ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/start-apiextensions-informers ok Nov 27 06:52:56 crc kubenswrapper[4971]: [-]poststarthook/start-apiextensions-controllers failed: reason withheld Nov 27 06:52:56 crc kubenswrapper[4971]: [-]poststarthook/crd-informer-synced failed: reason withheld Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/start-system-namespaces-controller ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/start-cluster-authentication-info-controller ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/start-kube-apiserver-identity-lease-controller ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/start-legacy-token-tracking-controller ok Nov 27 06:52:56 crc kubenswrapper[4971]: [-]poststarthook/start-service-ip-repair-controllers failed: reason withheld Nov 27 06:52:56 crc kubenswrapper[4971]: [-]poststarthook/rbac/bootstrap-roles failed: reason withheld Nov 27 06:52:56 crc kubenswrapper[4971]: [-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld Nov 27 06:52:56 crc kubenswrapper[4971]: [-]poststarthook/priority-and-fairness-config-producer failed: reason withheld Nov 27 06:52:56 crc kubenswrapper[4971]: [-]poststarthook/bootstrap-controller failed: reason withheld Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/aggregator-reload-proxy-client-cert ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/start-kube-aggregator-informers ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/apiservice-status-local-available-controller ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/apiservice-status-remote-available-controller ok Nov 27 06:52:56 crc kubenswrapper[4971]: [-]poststarthook/apiservice-registration-controller failed: reason withheld Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/apiservice-wait-for-first-sync ok Nov 27 06:52:56 crc kubenswrapper[4971]: [-]poststarthook/apiservice-discovery-controller failed: reason withheld Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/kube-apiserver-autoregistration ok Nov 27 06:52:56 crc kubenswrapper[4971]: [-]autoregister-completion failed: reason withheld Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/apiservice-openapi-controller ok Nov 27 06:52:56 crc kubenswrapper[4971]: [+]poststarthook/apiservice-openapiv3-controller ok Nov 27 06:52:56 crc kubenswrapper[4971]: livez check failed Nov 27 06:52:56 crc kubenswrapper[4971]: I1127 06:52:56.322686 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 27 06:52:56 crc kubenswrapper[4971]: I1127 06:52:56.505174 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Nov 27 06:52:56 crc kubenswrapper[4971]: I1127 06:52:56.505364 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:52:56 crc kubenswrapper[4971]: I1127 06:52:56.506799 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:52:56 crc kubenswrapper[4971]: I1127 06:52:56.506832 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:52:56 crc kubenswrapper[4971]: I1127 06:52:56.506847 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:52:57 crc kubenswrapper[4971]: I1127 06:52:57.308686 4971 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Nov 27 06:52:57 crc kubenswrapper[4971]: [+]log ok Nov 27 06:52:57 crc kubenswrapper[4971]: [+]etcd ok Nov 27 06:52:57 crc kubenswrapper[4971]: [+]poststarthook/openshift.io-oauth-apiserver-reachable ok Nov 27 06:52:57 crc kubenswrapper[4971]: [+]poststarthook/start-apiserver-admission-initializer ok Nov 27 06:52:57 crc kubenswrapper[4971]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Nov 27 06:52:57 crc kubenswrapper[4971]: [+]poststarthook/openshift.io-api-request-count-filter ok Nov 27 06:52:57 crc kubenswrapper[4971]: [+]poststarthook/openshift.io-startkubeinformers ok Nov 27 06:52:57 crc kubenswrapper[4971]: [+]poststarthook/openshift.io-openshift-apiserver-reachable ok Nov 27 06:52:57 crc kubenswrapper[4971]: [+]poststarthook/generic-apiserver-start-informers ok Nov 27 06:52:57 crc kubenswrapper[4971]: [+]poststarthook/priority-and-fairness-config-consumer ok Nov 27 06:52:57 crc kubenswrapper[4971]: [+]poststarthook/priority-and-fairness-filter ok Nov 27 06:52:57 crc kubenswrapper[4971]: [+]poststarthook/storage-object-count-tracker-hook ok Nov 27 06:52:57 crc kubenswrapper[4971]: [+]poststarthook/start-apiextensions-informers ok Nov 27 06:52:57 crc kubenswrapper[4971]: [+]poststarthook/start-apiextensions-controllers ok Nov 27 06:52:57 crc kubenswrapper[4971]: [+]poststarthook/crd-informer-synced ok Nov 27 06:52:57 crc kubenswrapper[4971]: [+]poststarthook/start-system-namespaces-controller ok Nov 27 06:52:57 crc kubenswrapper[4971]: [+]poststarthook/start-cluster-authentication-info-controller ok Nov 27 06:52:57 crc kubenswrapper[4971]: [+]poststarthook/start-kube-apiserver-identity-lease-controller ok Nov 27 06:52:57 crc kubenswrapper[4971]: [+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok Nov 27 06:52:57 crc kubenswrapper[4971]: [+]poststarthook/start-legacy-token-tracking-controller ok Nov 27 06:52:57 crc kubenswrapper[4971]: [+]poststarthook/start-service-ip-repair-controllers ok Nov 27 06:52:57 crc kubenswrapper[4971]: [-]poststarthook/rbac/bootstrap-roles failed: reason withheld Nov 27 06:52:57 crc kubenswrapper[4971]: [-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld Nov 27 06:52:57 crc kubenswrapper[4971]: [+]poststarthook/priority-and-fairness-config-producer ok Nov 27 06:52:57 crc kubenswrapper[4971]: [+]poststarthook/bootstrap-controller ok Nov 27 06:52:57 crc kubenswrapper[4971]: [+]poststarthook/aggregator-reload-proxy-client-cert ok Nov 27 06:52:57 crc kubenswrapper[4971]: [+]poststarthook/start-kube-aggregator-informers ok Nov 27 06:52:57 crc kubenswrapper[4971]: [+]poststarthook/apiservice-status-local-available-controller ok Nov 27 06:52:57 crc kubenswrapper[4971]: [+]poststarthook/apiservice-status-remote-available-controller ok Nov 27 06:52:57 crc kubenswrapper[4971]: [+]poststarthook/apiservice-registration-controller ok Nov 27 06:52:57 crc kubenswrapper[4971]: [+]poststarthook/apiservice-wait-for-first-sync ok Nov 27 06:52:57 crc kubenswrapper[4971]: [+]poststarthook/apiservice-discovery-controller ok Nov 27 06:52:57 crc kubenswrapper[4971]: [+]poststarthook/kube-apiserver-autoregistration ok Nov 27 06:52:57 crc kubenswrapper[4971]: [+]autoregister-completion ok Nov 27 06:52:57 crc kubenswrapper[4971]: [+]poststarthook/apiservice-openapi-controller ok Nov 27 06:52:57 crc kubenswrapper[4971]: [+]poststarthook/apiservice-openapiv3-controller ok Nov 27 06:52:57 crc kubenswrapper[4971]: livez check failed Nov 27 06:52:57 crc kubenswrapper[4971]: I1127 06:52:57.308750 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 27 06:52:59 crc kubenswrapper[4971]: I1127 06:52:59.562453 4971 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Nov 27 06:53:01 crc kubenswrapper[4971]: E1127 06:53:01.305383 4971 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Nov 27 06:53:01 crc kubenswrapper[4971]: E1127 06:53:01.308806 4971 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Nov 27 06:53:01 crc kubenswrapper[4971]: I1127 06:53:01.309154 4971 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Nov 27 06:53:01 crc kubenswrapper[4971]: I1127 06:53:01.309291 4971 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Nov 27 06:53:01 crc kubenswrapper[4971]: I1127 06:53:01.310328 4971 trace.go:236] Trace[902675716]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (27-Nov-2025 06:52:46.419) (total time: 14890ms): Nov 27 06:53:01 crc kubenswrapper[4971]: Trace[902675716]: ---"Objects listed" error: 14890ms (06:53:01.310) Nov 27 06:53:01 crc kubenswrapper[4971]: Trace[902675716]: [14.890949468s] [14.890949468s] END Nov 27 06:53:01 crc kubenswrapper[4971]: I1127 06:53:01.310354 4971 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Nov 27 06:53:01 crc kubenswrapper[4971]: I1127 06:53:01.470023 4971 apiserver.go:52] "Watching apiserver" Nov 27 06:53:01 crc kubenswrapper[4971]: I1127 06:53:01.532586 4971 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.465770 4971 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.465991 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf"] Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.466449 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:02 crc kubenswrapper[4971]: E1127 06:53:02.466505 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.466572 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.466798 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:53:02 crc kubenswrapper[4971]: E1127 06:53:02.466826 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.467137 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.467252 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.467347 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:53:02 crc kubenswrapper[4971]: E1127 06:53:02.467378 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.470672 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.473331 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.474408 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.474565 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.474633 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.476109 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.476564 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.476680 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.476906 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.476945 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.480781 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.491585 4971 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.548681 4971 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:40970->192.168.126.11:17697: read: connection reset by peer" start-of-body= Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.548771 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:40970->192.168.126.11:17697: read: connection reset by peer" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.548872 4971 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:40986->192.168.126.11:17697: read: connection reset by peer" start-of-body= Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.548888 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:40986->192.168.126.11:17697: read: connection reset by peer" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.549139 4971 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.549158 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.559947 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.559999 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.560026 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.560056 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.560210 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.560246 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.560274 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.560298 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.560325 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.560352 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.560376 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.560406 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.560433 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.560437 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.560464 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.560505 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.560556 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.560583 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.560613 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.560642 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.560631 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.560667 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.560727 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.560755 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.560786 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.560810 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.560840 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.560685 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.561071 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.561086 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.561158 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.561256 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.561280 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.561342 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.561402 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.560790 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.560852 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.561451 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: E1127 06:53:02.560860 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:53:03.060836034 +0000 UTC m=+21.252879952 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.561500 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.561556 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.561502 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.560896 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.561004 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.561000 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.561032 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.561699 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.561891 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.562006 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.563749 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.563809 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.563837 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.563854 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.563871 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.563886 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.563904 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.563918 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.563935 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.563950 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.563964 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.563979 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564001 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564016 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564033 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564049 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564064 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564078 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564095 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564110 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564126 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564141 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564156 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564172 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564189 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564204 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564221 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564244 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564266 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564282 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564301 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564316 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564331 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564347 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564362 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564378 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564394 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564409 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564425 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564447 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564465 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564486 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564507 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564547 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564572 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564593 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564622 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564647 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564673 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564725 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564748 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564768 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564790 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564808 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564822 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564841 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564855 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564870 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564887 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564905 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564926 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564948 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564967 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.564988 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565014 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565040 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565060 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565077 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565097 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565115 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565136 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565160 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565180 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565202 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565225 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565262 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565285 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565306 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565326 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565357 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565381 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565403 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565425 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565447 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565470 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565491 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565514 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565556 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565583 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565604 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565649 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565675 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565699 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565721 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565747 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565772 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565795 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565818 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565839 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565861 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565882 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565904 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565928 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565950 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565971 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.565992 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566016 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566038 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566059 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566080 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566105 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566127 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566149 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566169 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566191 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566189 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566212 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566238 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566264 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566287 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566310 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566333 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566356 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566379 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566401 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566423 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566443 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566463 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566479 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566498 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566514 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566544 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566564 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566583 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566601 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566618 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566635 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566651 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566667 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566686 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566705 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566713 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566722 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566779 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566808 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566833 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566857 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566883 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566908 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566936 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.566994 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.567225 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.567361 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.567423 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.567252 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.569603 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.569675 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.569703 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.569811 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.569971 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.569976 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.569984 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.570172 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.570280 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.570319 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.570462 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.570721 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.570792 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.571208 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.571219 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.571247 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.571337 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.571600 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.571604 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.571890 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.571935 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.571963 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.571989 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.572000 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.572015 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.572041 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.572064 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.572076 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.572088 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.572113 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.572138 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.572167 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.572192 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.572221 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.572255 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.572279 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.572308 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.572334 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.572360 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.572384 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.572408 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.572433 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.572484 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.572512 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.572578 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.572614 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.572660 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.572689 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.572749 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.572778 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.572807 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.572835 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.572864 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.572895 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.572921 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.572949 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573014 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573030 4971 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573045 4971 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573059 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573073 4971 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573086 4971 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573101 4971 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573115 4971 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573129 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573143 4971 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573157 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573171 4971 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573184 4971 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573199 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573213 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573226 4971 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573240 4971 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573253 4971 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573267 4971 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573280 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573295 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573308 4971 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573323 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573336 4971 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573351 4971 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573365 4971 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573379 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573394 4971 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573407 4971 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573439 4971 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573452 4971 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573466 4971 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573481 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573495 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573509 4971 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573522 4971 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573554 4971 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573568 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573581 4971 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573595 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573609 4971 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573622 4971 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573635 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573648 4971 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573661 4971 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573675 4971 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573688 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573702 4971 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.574388 4971 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.582853 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.572226 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.572485 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.572510 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.572746 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.586661 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.572867 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573028 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573111 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573288 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573309 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573597 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.573968 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.577602 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.578311 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.578568 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.578749 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.578953 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.581021 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.581507 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.581667 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.581811 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.581870 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.581927 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.582018 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.582039 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.582054 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.582145 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.582254 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.582265 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.582412 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.582436 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.582459 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.582682 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.582876 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.582904 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.582606 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.583059 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.583147 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.583163 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.583351 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.583394 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.583481 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.583515 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.583610 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.583870 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.583933 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.584002 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: E1127 06:53:02.584384 4971 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 27 06:53:02 crc kubenswrapper[4971]: E1127 06:53:02.587330 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-27 06:53:03.087309116 +0000 UTC m=+21.279353034 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.584420 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.583368 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.584853 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.585521 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.585620 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.586026 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.586399 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.586399 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.586516 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.586567 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.586744 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.588392 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: E1127 06:53:02.588439 4971 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.588443 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: E1127 06:53:02.588512 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-27 06:53:03.088494579 +0000 UTC m=+21.280538497 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.588569 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.589517 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.589684 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.589799 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.591067 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.593134 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.593193 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.593330 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.593376 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.593897 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.594604 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.594846 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.594850 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.595073 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.595189 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.595712 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.596063 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.596373 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.596767 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.597140 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.597401 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.598768 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.599010 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.599162 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.599353 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.600550 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.600791 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.600810 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.601159 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.601280 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.601818 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.604611 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.604694 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.604715 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.604943 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.604939 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.605044 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.605172 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.605212 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.605242 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.605279 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.605399 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: E1127 06:53:02.605520 4971 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 27 06:53:02 crc kubenswrapper[4971]: E1127 06:53:02.605552 4971 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 27 06:53:02 crc kubenswrapper[4971]: E1127 06:53:02.605565 4971 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 27 06:53:02 crc kubenswrapper[4971]: E1127 06:53:02.605883 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-27 06:53:03.105598174 +0000 UTC m=+21.297642092 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.605884 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.606141 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.606211 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.606353 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.606551 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.606690 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: E1127 06:53:02.611035 4971 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 27 06:53:02 crc kubenswrapper[4971]: E1127 06:53:02.611065 4971 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 27 06:53:02 crc kubenswrapper[4971]: E1127 06:53:02.611077 4971 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 27 06:53:02 crc kubenswrapper[4971]: E1127 06:53:02.611138 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-27 06:53:03.111117521 +0000 UTC m=+21.303161439 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.613766 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.616789 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.617198 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.617813 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.618488 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.618862 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.620356 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.620383 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.620709 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.620849 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.622402 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.622959 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.623121 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.623321 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.623387 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.623591 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.624054 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.624559 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.624572 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.624814 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.624872 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.625099 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.625188 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.625222 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.625413 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.625409 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.625676 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.625982 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.626076 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.626193 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.626104 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.627189 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.627835 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.628175 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.628881 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.629076 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.629357 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.629453 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.629550 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.629823 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.629973 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.630309 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.630755 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.632046 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.632797 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.639711 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.642791 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.644237 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.656682 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.658576 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.665285 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.675832 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.675866 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.675937 4971 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.675948 4971 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.675957 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.675965 4971 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.675974 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.675982 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.675990 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.675998 4971 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676006 4971 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676014 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676021 4971 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676029 4971 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676036 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676043 4971 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676051 4971 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676059 4971 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676067 4971 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676075 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676082 4971 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676090 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676098 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676107 4971 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676115 4971 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676123 4971 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676131 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676139 4971 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676147 4971 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676155 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676162 4971 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676170 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676179 4971 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676187 4971 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676194 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676203 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676211 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676218 4971 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676226 4971 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676235 4971 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676244 4971 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676252 4971 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676260 4971 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676268 4971 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676276 4971 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676283 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676291 4971 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676299 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676307 4971 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676314 4971 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676322 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676330 4971 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676338 4971 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676345 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676352 4971 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676360 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676369 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676377 4971 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676386 4971 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676395 4971 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676404 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676411 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676419 4971 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676426 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676435 4971 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676442 4971 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676455 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676465 4971 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676475 4971 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676488 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676499 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676508 4971 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676518 4971 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676548 4971 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676557 4971 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676565 4971 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676573 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676581 4971 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676588 4971 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676596 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676604 4971 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676612 4971 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676620 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676628 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676636 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676643 4971 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676651 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676659 4971 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676667 4971 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676675 4971 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676682 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676689 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676697 4971 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676704 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676711 4971 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676718 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676726 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676734 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676746 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676754 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676786 4971 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676795 4971 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676802 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676810 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676819 4971 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676827 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676835 4971 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676842 4971 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676850 4971 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676858 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676866 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676874 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676881 4971 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676889 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676896 4971 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676904 4971 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676911 4971 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676919 4971 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676926 4971 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676934 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676941 4971 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676948 4971 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676956 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676964 4971 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676971 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676980 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676988 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.676996 4971 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.677004 4971 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.677011 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.677020 4971 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.677028 4971 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.677035 4971 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.677042 4971 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.677050 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.677058 4971 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.677066 4971 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.677074 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.677082 4971 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.677089 4971 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.677097 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.677105 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.677118 4971 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.677126 4971 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.677134 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.677142 4971 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.677150 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.677157 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.677360 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.677383 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.683695 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.695019 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.705690 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.717604 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.729294 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.746913 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.757206 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.774726 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.780103 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.786302 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.787325 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 27 06:53:02 crc kubenswrapper[4971]: W1127 06:53:02.794032 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-15c921466264c6550024ea4c03349b0448778e1522417ef2b96271985ea96eda WatchSource:0}: Error finding container 15c921466264c6550024ea4c03349b0448778e1522417ef2b96271985ea96eda: Status 404 returned error can't find the container with id 15c921466264c6550024ea4c03349b0448778e1522417ef2b96271985ea96eda Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.798450 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 27 06:53:02 crc kubenswrapper[4971]: I1127 06:53:02.801781 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.056243 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-srcg7"] Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.056595 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-srcg7" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.059634 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.060088 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.060106 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.069555 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.079056 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.080230 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:53:03 crc kubenswrapper[4971]: E1127 06:53:03.080456 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:53:04.080415905 +0000 UTC m=+22.272459823 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.093720 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.094358 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.098503 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.116930 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.118615 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.128504 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.139605 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.154031 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.166187 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.176477 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.180671 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.180707 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.180725 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.180745 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.180765 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/9ea5e8b6-6a19-49f0-8107-d4cb93d52889-hosts-file\") pod \"node-resolver-srcg7\" (UID: \"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\") " pod="openshift-dns/node-resolver-srcg7" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.180782 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kwcr9\" (UniqueName: \"kubernetes.io/projected/9ea5e8b6-6a19-49f0-8107-d4cb93d52889-kube-api-access-kwcr9\") pod \"node-resolver-srcg7\" (UID: \"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\") " pod="openshift-dns/node-resolver-srcg7" Nov 27 06:53:03 crc kubenswrapper[4971]: E1127 06:53:03.180815 4971 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 27 06:53:03 crc kubenswrapper[4971]: E1127 06:53:03.180875 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-27 06:53:04.180858985 +0000 UTC m=+22.372902903 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 27 06:53:03 crc kubenswrapper[4971]: E1127 06:53:03.180889 4971 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 27 06:53:03 crc kubenswrapper[4971]: E1127 06:53:03.180909 4971 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 27 06:53:03 crc kubenswrapper[4971]: E1127 06:53:03.180921 4971 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 27 06:53:03 crc kubenswrapper[4971]: E1127 06:53:03.180956 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-27 06:53:04.180943687 +0000 UTC m=+22.372987605 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 27 06:53:03 crc kubenswrapper[4971]: E1127 06:53:03.181000 4971 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 27 06:53:03 crc kubenswrapper[4971]: E1127 06:53:03.181040 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-27 06:53:04.181018119 +0000 UTC m=+22.373062187 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 27 06:53:03 crc kubenswrapper[4971]: E1127 06:53:03.181091 4971 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 27 06:53:03 crc kubenswrapper[4971]: E1127 06:53:03.181102 4971 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 27 06:53:03 crc kubenswrapper[4971]: E1127 06:53:03.181109 4971 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 27 06:53:03 crc kubenswrapper[4971]: E1127 06:53:03.181131 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-27 06:53:04.181125502 +0000 UTC m=+22.373169410 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.186088 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.195561 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.202757 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.210659 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.219468 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.229109 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.239028 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.249791 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.282405 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/9ea5e8b6-6a19-49f0-8107-d4cb93d52889-hosts-file\") pod \"node-resolver-srcg7\" (UID: \"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\") " pod="openshift-dns/node-resolver-srcg7" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.282467 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kwcr9\" (UniqueName: \"kubernetes.io/projected/9ea5e8b6-6a19-49f0-8107-d4cb93d52889-kube-api-access-kwcr9\") pod \"node-resolver-srcg7\" (UID: \"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\") " pod="openshift-dns/node-resolver-srcg7" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.282599 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/9ea5e8b6-6a19-49f0-8107-d4cb93d52889-hosts-file\") pod \"node-resolver-srcg7\" (UID: \"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\") " pod="openshift-dns/node-resolver-srcg7" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.300159 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kwcr9\" (UniqueName: \"kubernetes.io/projected/9ea5e8b6-6a19-49f0-8107-d4cb93d52889-kube-api-access-kwcr9\") pod \"node-resolver-srcg7\" (UID: \"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\") " pod="openshift-dns/node-resolver-srcg7" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.367061 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-srcg7" Nov 27 06:53:03 crc kubenswrapper[4971]: W1127 06:53:03.376063 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9ea5e8b6_6a19_49f0_8107_d4cb93d52889.slice/crio-f9aa687b8f2453d6a00f1e3933c2edfa6558fd266ab51fdffc42782cb638b77a WatchSource:0}: Error finding container f9aa687b8f2453d6a00f1e3933c2edfa6558fd266ab51fdffc42782cb638b77a: Status 404 returned error can't find the container with id f9aa687b8f2453d6a00f1e3933c2edfa6558fd266ab51fdffc42782cb638b77a Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.438277 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-w2fnl"] Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.438980 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-lr9p6"] Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.439169 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-56nwb"] Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.439208 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.439317 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.440112 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: W1127 06:53:03.442402 4971 reflector.go:561] object-"openshift-ovn-kubernetes"/"env-overrides": failed to list *v1.ConfigMap: configmaps "env-overrides" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-ovn-kubernetes": no relationship found between node 'crc' and this object Nov 27 06:53:03 crc kubenswrapper[4971]: E1127 06:53:03.442442 4971 reflector.go:158] "Unhandled Error" err="object-\"openshift-ovn-kubernetes\"/\"env-overrides\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"env-overrides\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-ovn-kubernetes\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 27 06:53:03 crc kubenswrapper[4971]: W1127 06:53:03.442555 4971 reflector.go:561] object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz": failed to list *v1.Secret: secrets "multus-ancillary-tools-dockercfg-vnmsz" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Nov 27 06:53:03 crc kubenswrapper[4971]: W1127 06:53:03.442578 4971 reflector.go:561] object-"openshift-multus"/"default-dockercfg-2q5b6": failed to list *v1.Secret: secrets "default-dockercfg-2q5b6" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Nov 27 06:53:03 crc kubenswrapper[4971]: E1127 06:53:03.442592 4971 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"default-dockercfg-2q5b6\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"default-dockercfg-2q5b6\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 27 06:53:03 crc kubenswrapper[4971]: E1127 06:53:03.442591 4971 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"multus-ancillary-tools-dockercfg-vnmsz\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"multus-ancillary-tools-dockercfg-vnmsz\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 27 06:53:03 crc kubenswrapper[4971]: W1127 06:53:03.442557 4971 reflector.go:561] object-"openshift-multus"/"cni-copy-resources": failed to list *v1.ConfigMap: configmaps "cni-copy-resources" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Nov 27 06:53:03 crc kubenswrapper[4971]: W1127 06:53:03.442617 4971 reflector.go:561] object-"openshift-ovn-kubernetes"/"ovnkube-script-lib": failed to list *v1.ConfigMap: configmaps "ovnkube-script-lib" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-ovn-kubernetes": no relationship found between node 'crc' and this object Nov 27 06:53:03 crc kubenswrapper[4971]: E1127 06:53:03.442624 4971 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"cni-copy-resources\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"cni-copy-resources\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 27 06:53:03 crc kubenswrapper[4971]: E1127 06:53:03.442627 4971 reflector.go:158] "Unhandled Error" err="object-\"openshift-ovn-kubernetes\"/\"ovnkube-script-lib\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"ovnkube-script-lib\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-ovn-kubernetes\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 27 06:53:03 crc kubenswrapper[4971]: W1127 06:53:03.442653 4971 reflector.go:561] object-"openshift-multus"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Nov 27 06:53:03 crc kubenswrapper[4971]: E1127 06:53:03.442663 4971 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 27 06:53:03 crc kubenswrapper[4971]: W1127 06:53:03.442673 4971 reflector.go:561] object-"openshift-ovn-kubernetes"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-ovn-kubernetes": no relationship found between node 'crc' and this object Nov 27 06:53:03 crc kubenswrapper[4971]: W1127 06:53:03.442689 4971 reflector.go:561] object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl": failed to list *v1.Secret: secrets "ovn-kubernetes-node-dockercfg-pwtwl" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-ovn-kubernetes": no relationship found between node 'crc' and this object Nov 27 06:53:03 crc kubenswrapper[4971]: E1127 06:53:03.442700 4971 reflector.go:158] "Unhandled Error" err="object-\"openshift-ovn-kubernetes\"/\"ovn-kubernetes-node-dockercfg-pwtwl\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"ovn-kubernetes-node-dockercfg-pwtwl\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-ovn-kubernetes\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 27 06:53:03 crc kubenswrapper[4971]: E1127 06:53:03.442688 4971 reflector.go:158] "Unhandled Error" err="object-\"openshift-ovn-kubernetes\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-ovn-kubernetes\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 27 06:53:03 crc kubenswrapper[4971]: W1127 06:53:03.442721 4971 reflector.go:561] object-"openshift-multus"/"default-cni-sysctl-allowlist": failed to list *v1.ConfigMap: configmaps "default-cni-sysctl-allowlist" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Nov 27 06:53:03 crc kubenswrapper[4971]: E1127 06:53:03.442736 4971 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"default-cni-sysctl-allowlist\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"default-cni-sysctl-allowlist\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 27 06:53:03 crc kubenswrapper[4971]: W1127 06:53:03.442774 4971 reflector.go:561] object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert": failed to list *v1.Secret: secrets "ovn-node-metrics-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-ovn-kubernetes": no relationship found between node 'crc' and this object Nov 27 06:53:03 crc kubenswrapper[4971]: E1127 06:53:03.442787 4971 reflector.go:158] "Unhandled Error" err="object-\"openshift-ovn-kubernetes\"/\"ovn-node-metrics-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"ovn-node-metrics-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-ovn-kubernetes\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 27 06:53:03 crc kubenswrapper[4971]: W1127 06:53:03.442793 4971 reflector.go:561] object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-ovn-kubernetes": no relationship found between node 'crc' and this object Nov 27 06:53:03 crc kubenswrapper[4971]: E1127 06:53:03.442810 4971 reflector.go:158] "Unhandled Error" err="object-\"openshift-ovn-kubernetes\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-ovn-kubernetes\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 27 06:53:03 crc kubenswrapper[4971]: W1127 06:53:03.442835 4971 reflector.go:561] object-"openshift-multus"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Nov 27 06:53:03 crc kubenswrapper[4971]: E1127 06:53:03.442851 4971 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.446020 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.446156 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-zdz6h"] Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.446435 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.447735 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.448040 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.448314 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.448365 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.448551 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.450744 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.464692 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.468037 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.470838 4971 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54" exitCode=255 Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.470896 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54"} Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.471313 4971 scope.go:117] "RemoveContainer" containerID="fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.473890 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-srcg7" event={"ID":"9ea5e8b6-6a19-49f0-8107-d4cb93d52889","Type":"ContainerStarted","Data":"f9aa687b8f2453d6a00f1e3933c2edfa6558fd266ab51fdffc42782cb638b77a"} Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.476273 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"d1671ceb9609c4b456d96233633d13b82d958dc2175c01a05463feebb2d30d1f"} Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.480698 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"e5be4571b65d35e7a2d2888f250e6cf6e0c105b598047e2236b3ec90bbdbee5f"} Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.480740 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"15c921466264c6550024ea4c03349b0448778e1522417ef2b96271985ea96eda"} Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.482376 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:03Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.485174 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"c02f3806e673886030fe506e4903bfa7f383bdc7130e4f3d251b5851cb99fb52"} Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.485241 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"0a3d1fb1c5f4949857e54218b78eb7d2a695aa1baf7fe0d4e26888cad2e2cdb9"} Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.485257 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"5897f8fdb3cd879cfd8d7d65cbb14f688f3e25514ee11fa147ea86fb630248b7"} Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.497166 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:03Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.522976 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:03Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.539431 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:03Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.551638 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:03Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.571335 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w2fnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:03Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.584607 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-systemd-units\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.584653 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0ab8c2ef-d82b-4396-919d-8550cc2e24d7-proxy-tls\") pod \"machine-config-daemon-zdz6h\" (UID: \"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\") " pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.584675 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0ab8c2ef-d82b-4396-919d-8550cc2e24d7-mcd-auth-proxy-config\") pod \"machine-config-daemon-zdz6h\" (UID: \"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\") " pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.584709 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-run-netns\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.584725 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-ovn-node-metrics-cert\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.584739 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k9lhg\" (UniqueName: \"kubernetes.io/projected/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-kube-api-access-k9lhg\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.584773 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-run-systemd\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.584788 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-cni-netd\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.584802 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-ovnkube-config\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.584819 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-multus-cni-dir\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.584833 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-cni-binary-copy\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.584847 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-host-var-lib-cni-multus\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.584862 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-run-ovn\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.584878 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-node-log\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.584893 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-cni-bin\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.584909 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.584924 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-os-release\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.584940 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-multus-socket-dir-parent\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.584959 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-var-lib-openvswitch\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.584972 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-log-socket\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.584987 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-kubelet\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.585001 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-host-run-netns\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.585015 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-host-run-multus-certs\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.585030 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-etc-kubernetes\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.585051 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-system-cni-dir\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.585066 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e6ad47ab-7d74-49d8-8cd7-3261e36837a9-cnibin\") pod \"multus-additional-cni-plugins-w2fnl\" (UID: \"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\") " pod="openshift-multus/multus-additional-cni-plugins-w2fnl" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.585081 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e6ad47ab-7d74-49d8-8cd7-3261e36837a9-os-release\") pod \"multus-additional-cni-plugins-w2fnl\" (UID: \"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\") " pod="openshift-multus/multus-additional-cni-plugins-w2fnl" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.585096 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-multus-conf-dir\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.585110 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-etc-openvswitch\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.585128 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-host-var-lib-kubelet\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.585142 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e6ad47ab-7d74-49d8-8cd7-3261e36837a9-cni-binary-copy\") pod \"multus-additional-cni-plugins-w2fnl\" (UID: \"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\") " pod="openshift-multus/multus-additional-cni-plugins-w2fnl" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.585156 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/0ab8c2ef-d82b-4396-919d-8550cc2e24d7-rootfs\") pod \"machine-config-daemon-zdz6h\" (UID: \"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\") " pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.585172 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-host-run-k8s-cni-cncf-io\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.585565 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-ovnkube-script-lib\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.585585 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-cnibin\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.585600 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-hostroot\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.585616 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-multus-daemon-config\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.585640 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vc4z8\" (UniqueName: \"kubernetes.io/projected/e6ad47ab-7d74-49d8-8cd7-3261e36837a9-kube-api-access-vc4z8\") pod \"multus-additional-cni-plugins-w2fnl\" (UID: \"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\") " pod="openshift-multus/multus-additional-cni-plugins-w2fnl" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.585656 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-run-openvswitch\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.585672 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-env-overrides\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.585685 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e6ad47ab-7d74-49d8-8cd7-3261e36837a9-system-cni-dir\") pod \"multus-additional-cni-plugins-w2fnl\" (UID: \"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\") " pod="openshift-multus/multus-additional-cni-plugins-w2fnl" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.585700 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e6ad47ab-7d74-49d8-8cd7-3261e36837a9-tuning-conf-dir\") pod \"multus-additional-cni-plugins-w2fnl\" (UID: \"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\") " pod="openshift-multus/multus-additional-cni-plugins-w2fnl" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.585717 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/e6ad47ab-7d74-49d8-8cd7-3261e36837a9-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-w2fnl\" (UID: \"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\") " pod="openshift-multus/multus-additional-cni-plugins-w2fnl" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.585741 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-slash\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.585757 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-host-var-lib-cni-bin\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.585773 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5pmn4\" (UniqueName: \"kubernetes.io/projected/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-kube-api-access-5pmn4\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.585789 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-run-ovn-kubernetes\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.585806 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxzdj\" (UniqueName: \"kubernetes.io/projected/0ab8c2ef-d82b-4396-919d-8550cc2e24d7-kube-api-access-fxzdj\") pod \"machine-config-daemon-zdz6h\" (UID: \"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\") " pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.591806 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lr9p6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5pmn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lr9p6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:03Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.602657 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:03Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.614363 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:03Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.627102 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:03Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.640130 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:03Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.650901 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:03Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.661946 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5be4571b65d35e7a2d2888f250e6cf6e0c105b598047e2236b3ec90bbdbee5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:03Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.678058 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-56nwb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:03Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686172 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxzdj\" (UniqueName: \"kubernetes.io/projected/0ab8c2ef-d82b-4396-919d-8550cc2e24d7-kube-api-access-fxzdj\") pod \"machine-config-daemon-zdz6h\" (UID: \"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\") " pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686228 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-systemd-units\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686249 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0ab8c2ef-d82b-4396-919d-8550cc2e24d7-proxy-tls\") pod \"machine-config-daemon-zdz6h\" (UID: \"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\") " pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686279 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0ab8c2ef-d82b-4396-919d-8550cc2e24d7-mcd-auth-proxy-config\") pod \"machine-config-daemon-zdz6h\" (UID: \"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\") " pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686300 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-run-netns\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686326 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-ovn-node-metrics-cert\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686348 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k9lhg\" (UniqueName: \"kubernetes.io/projected/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-kube-api-access-k9lhg\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686350 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-systemd-units\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686369 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-ovnkube-config\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686437 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-run-systemd\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686454 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-cni-netd\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686470 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-multus-cni-dir\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686486 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-cni-binary-copy\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686501 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-run-ovn\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686515 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-node-log\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686553 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-cni-bin\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686572 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-host-var-lib-cni-multus\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686587 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-run-netns\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686600 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-var-lib-openvswitch\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686617 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-log-socket\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686634 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686652 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-os-release\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686666 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-multus-socket-dir-parent\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686685 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-kubelet\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686709 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-system-cni-dir\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686724 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-host-run-netns\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686740 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-host-run-multus-certs\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686761 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-etc-kubernetes\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686782 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e6ad47ab-7d74-49d8-8cd7-3261e36837a9-cnibin\") pod \"multus-additional-cni-plugins-w2fnl\" (UID: \"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\") " pod="openshift-multus/multus-additional-cni-plugins-w2fnl" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686805 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e6ad47ab-7d74-49d8-8cd7-3261e36837a9-os-release\") pod \"multus-additional-cni-plugins-w2fnl\" (UID: \"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\") " pod="openshift-multus/multus-additional-cni-plugins-w2fnl" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686824 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-multus-conf-dir\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686842 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-etc-openvswitch\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686856 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-host-var-lib-kubelet\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686875 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-ovnkube-script-lib\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686890 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e6ad47ab-7d74-49d8-8cd7-3261e36837a9-cni-binary-copy\") pod \"multus-additional-cni-plugins-w2fnl\" (UID: \"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\") " pod="openshift-multus/multus-additional-cni-plugins-w2fnl" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686904 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/0ab8c2ef-d82b-4396-919d-8550cc2e24d7-rootfs\") pod \"machine-config-daemon-zdz6h\" (UID: \"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\") " pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686919 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-host-run-k8s-cni-cncf-io\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686935 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vc4z8\" (UniqueName: \"kubernetes.io/projected/e6ad47ab-7d74-49d8-8cd7-3261e36837a9-kube-api-access-vc4z8\") pod \"multus-additional-cni-plugins-w2fnl\" (UID: \"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\") " pod="openshift-multus/multus-additional-cni-plugins-w2fnl" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686951 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-cnibin\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686968 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-hostroot\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686982 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-multus-daemon-config\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.686998 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/e6ad47ab-7d74-49d8-8cd7-3261e36837a9-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-w2fnl\" (UID: \"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\") " pod="openshift-multus/multus-additional-cni-plugins-w2fnl" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687017 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-slash\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687047 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-run-openvswitch\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687065 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-ovnkube-config\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687071 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-env-overrides\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687121 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e6ad47ab-7d74-49d8-8cd7-3261e36837a9-system-cni-dir\") pod \"multus-additional-cni-plugins-w2fnl\" (UID: \"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\") " pod="openshift-multus/multus-additional-cni-plugins-w2fnl" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687142 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-run-systemd\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687145 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e6ad47ab-7d74-49d8-8cd7-3261e36837a9-tuning-conf-dir\") pod \"multus-additional-cni-plugins-w2fnl\" (UID: \"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\") " pod="openshift-multus/multus-additional-cni-plugins-w2fnl" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687176 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-host-var-lib-cni-bin\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687195 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pmn4\" (UniqueName: \"kubernetes.io/projected/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-kube-api-access-5pmn4\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687219 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-run-ovn-kubernetes\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687271 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e6ad47ab-7d74-49d8-8cd7-3261e36837a9-system-cni-dir\") pod \"multus-additional-cni-plugins-w2fnl\" (UID: \"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\") " pod="openshift-multus/multus-additional-cni-plugins-w2fnl" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687290 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-run-ovn-kubernetes\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687310 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e6ad47ab-7d74-49d8-8cd7-3261e36837a9-cnibin\") pod \"multus-additional-cni-plugins-w2fnl\" (UID: \"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\") " pod="openshift-multus/multus-additional-cni-plugins-w2fnl" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687338 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-cni-netd\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687395 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-multus-cni-dir\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687435 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e6ad47ab-7d74-49d8-8cd7-3261e36837a9-os-release\") pod \"multus-additional-cni-plugins-w2fnl\" (UID: \"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\") " pod="openshift-multus/multus-additional-cni-plugins-w2fnl" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687442 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-run-ovn\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687463 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-multus-conf-dir\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687469 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-node-log\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687489 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-etc-openvswitch\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687497 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-cni-bin\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687512 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-host-var-lib-kubelet\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687523 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-host-var-lib-cni-multus\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687565 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-var-lib-openvswitch\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687576 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e6ad47ab-7d74-49d8-8cd7-3261e36837a9-tuning-conf-dir\") pod \"multus-additional-cni-plugins-w2fnl\" (UID: \"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\") " pod="openshift-multus/multus-additional-cni-plugins-w2fnl" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687594 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-log-socket\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687610 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/0ab8c2ef-d82b-4396-919d-8550cc2e24d7-rootfs\") pod \"machine-config-daemon-zdz6h\" (UID: \"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\") " pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687625 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-kubelet\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687636 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-system-cni-dir\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687646 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-host-run-k8s-cni-cncf-io\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687661 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687673 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-host-run-netns\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687708 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-host-run-multus-certs\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687715 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-os-release\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687712 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0ab8c2ef-d82b-4396-919d-8550cc2e24d7-mcd-auth-proxy-config\") pod \"machine-config-daemon-zdz6h\" (UID: \"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\") " pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687731 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-etc-kubernetes\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687763 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-multus-socket-dir-parent\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687788 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-run-openvswitch\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687796 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-host-var-lib-cni-bin\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687812 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-cnibin\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687818 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-slash\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.687831 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-hostroot\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.688283 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-multus-daemon-config\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.689014 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdz6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:03Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.691813 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0ab8c2ef-d82b-4396-919d-8550cc2e24d7-proxy-tls\") pod \"machine-config-daemon-zdz6h\" (UID: \"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\") " pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.704021 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxzdj\" (UniqueName: \"kubernetes.io/projected/0ab8c2ef-d82b-4396-919d-8550cc2e24d7-kube-api-access-fxzdj\") pod \"machine-config-daemon-zdz6h\" (UID: \"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\") " pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.703983 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"ed a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\"\\\\nI1127 06:53:01.687702 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1127 06:53:02.478794 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1127 06:53:02.478820 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1127 06:53:02.478850 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1127 06:53:02.478856 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1127 06:53:02.511358 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1127 06:53:02.511475 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511499 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511521 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1127 06:53:02.511559 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1127 06:53:02.511581 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1127 06:53:02.511603 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1127 06:53:02.511767 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1127 06:53:02.517623 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764226365\\\\\\\\\\\\\\\" (2025-11-27 06:52:45 +0000 UTC to 2025-12-27 06:52:46 +0000 UTC (now=2025-11-27 06:53:02.517594908 +0000 UTC))\\\\\\\"\\\\nF1127 06:53:02.517671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:03Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.720484 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:03Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.732573 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c02f3806e673886030fe506e4903bfa7f383bdc7130e4f3d251b5851cb99fb52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d1fb1c5f4949857e54218b78eb7d2a695aa1baf7fe0d4e26888cad2e2cdb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:03Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.743220 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:03Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.753706 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:03Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.764033 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:03Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.777984 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w2fnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:03Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.787438 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 06:53:03 crc kubenswrapper[4971]: I1127 06:53:03.792059 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lr9p6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5pmn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lr9p6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:03Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:03 crc kubenswrapper[4971]: W1127 06:53:03.804491 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0ab8c2ef_d82b_4396_919d_8550cc2e24d7.slice/crio-c0952dbb7b0e92070e9b73bd404b8a4e6c1082ab2d347210d010386cf8b83384 WatchSource:0}: Error finding container c0952dbb7b0e92070e9b73bd404b8a4e6c1082ab2d347210d010386cf8b83384: Status 404 returned error can't find the container with id c0952dbb7b0e92070e9b73bd404b8a4e6c1082ab2d347210d010386cf8b83384 Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.090520 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:53:04 crc kubenswrapper[4971]: E1127 06:53:04.090796 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:53:06.090778869 +0000 UTC m=+24.282822787 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.191274 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.191326 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.191367 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.191389 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:53:04 crc kubenswrapper[4971]: E1127 06:53:04.191546 4971 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 27 06:53:04 crc kubenswrapper[4971]: E1127 06:53:04.191563 4971 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 27 06:53:04 crc kubenswrapper[4971]: E1127 06:53:04.191573 4971 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 27 06:53:04 crc kubenswrapper[4971]: E1127 06:53:04.191611 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-27 06:53:06.1915992 +0000 UTC m=+24.383643108 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 27 06:53:04 crc kubenswrapper[4971]: E1127 06:53:04.191897 4971 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 27 06:53:04 crc kubenswrapper[4971]: E1127 06:53:04.191926 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-27 06:53:06.191919129 +0000 UTC m=+24.383963047 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 27 06:53:04 crc kubenswrapper[4971]: E1127 06:53:04.191966 4971 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 27 06:53:04 crc kubenswrapper[4971]: E1127 06:53:04.191975 4971 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 27 06:53:04 crc kubenswrapper[4971]: E1127 06:53:04.191983 4971 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 27 06:53:04 crc kubenswrapper[4971]: E1127 06:53:04.192004 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-27 06:53:06.191998431 +0000 UTC m=+24.384042349 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 27 06:53:04 crc kubenswrapper[4971]: E1127 06:53:04.192038 4971 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 27 06:53:04 crc kubenswrapper[4971]: E1127 06:53:04.192057 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-27 06:53:06.192051922 +0000 UTC m=+24.384095830 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.263142 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.268376 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-env-overrides\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.398045 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.489484 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-srcg7" event={"ID":"9ea5e8b6-6a19-49f0-8107-d4cb93d52889","Type":"ContainerStarted","Data":"db8ead1e3b35d86c894bb1067d64e31f7ac2ec518180db02176574eb245fafb8"} Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.491096 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"673830b15fa2749a267b5b4e1116f8d7c2cbe419b963a87da48dfa8fe9eaf8c4"} Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.491143 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20"} Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.491161 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"c0952dbb7b0e92070e9b73bd404b8a4e6c1082ab2d347210d010386cf8b83384"} Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.492867 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.495240 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af"} Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.495625 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.506623 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:04Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.518338 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:04Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.538600 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.543340 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-56nwb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:04Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.543943 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k9lhg\" (UniqueName: \"kubernetes.io/projected/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-kube-api-access-k9lhg\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.557847 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.557936 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.557973 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:53:04 crc kubenswrapper[4971]: E1127 06:53:04.558004 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:53:04 crc kubenswrapper[4971]: E1127 06:53:04.558072 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:53:04 crc kubenswrapper[4971]: E1127 06:53:04.558246 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.564100 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.565031 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.566665 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.567596 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.569008 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.569910 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.570841 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.572262 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.573199 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.574684 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.575074 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5be4571b65d35e7a2d2888f250e6cf6e0c105b598047e2236b3ec90bbdbee5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:04Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.575526 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.577118 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.578083 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.578828 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.580119 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.580919 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.582574 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.583389 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.584649 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.586650 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.587788 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.589474 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:04Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.589734 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.590449 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.592157 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.592840 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.593756 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.595607 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.596345 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.600339 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.601300 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.602865 4971 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.603008 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.604975 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.606228 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.606705 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.608248 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.608504 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdz6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:04Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.609345 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.610214 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.611507 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.612441 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.613160 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.614405 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.615583 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.616344 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.617279 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.617986 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.618928 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.619698 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.620602 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.621102 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.621701 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.622398 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"ed a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\"\\\\nI1127 06:53:01.687702 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1127 06:53:02.478794 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1127 06:53:02.478820 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1127 06:53:02.478850 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1127 06:53:02.478856 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1127 06:53:02.511358 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1127 06:53:02.511475 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511499 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511521 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1127 06:53:02.511559 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1127 06:53:02.511581 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1127 06:53:02.511603 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1127 06:53:02.511767 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1127 06:53:02.517623 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764226365\\\\\\\\\\\\\\\" (2025-11-27 06:52:45 +0000 UTC to 2025-12-27 06:52:46 +0000 UTC (now=2025-11-27 06:53:02.517594908 +0000 UTC))\\\\\\\"\\\\nF1127 06:53:02.517671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:04Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.622782 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.623343 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.624237 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.625563 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.629321 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/e6ad47ab-7d74-49d8-8cd7-3261e36837a9-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-w2fnl\" (UID: \"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\") " pod="openshift-multus/multus-additional-cni-plugins-w2fnl" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.634791 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:04Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.647016 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c02f3806e673886030fe506e4903bfa7f383bdc7130e4f3d251b5851cb99fb52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d1fb1c5f4949857e54218b78eb7d2a695aa1baf7fe0d4e26888cad2e2cdb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:04Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.661159 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lr9p6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5pmn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lr9p6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:04Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.680795 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:04Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:04 crc kubenswrapper[4971]: E1127 06:53:04.687982 4971 configmap.go:193] Couldn't get configMap openshift-multus/cni-copy-resources: failed to sync configmap cache: timed out waiting for the condition Nov 27 06:53:04 crc kubenswrapper[4971]: E1127 06:53:04.688027 4971 configmap.go:193] Couldn't get configMap openshift-multus/cni-copy-resources: failed to sync configmap cache: timed out waiting for the condition Nov 27 06:53:04 crc kubenswrapper[4971]: E1127 06:53:04.688068 4971 configmap.go:193] Couldn't get configMap openshift-ovn-kubernetes/ovnkube-script-lib: failed to sync configmap cache: timed out waiting for the condition Nov 27 06:53:04 crc kubenswrapper[4971]: E1127 06:53:04.688078 4971 secret.go:188] Couldn't get secret openshift-ovn-kubernetes/ovn-node-metrics-cert: failed to sync secret cache: timed out waiting for the condition Nov 27 06:53:04 crc kubenswrapper[4971]: E1127 06:53:04.688077 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-cni-binary-copy podName:a2136014-aa8f-48e5-bccf-64cdd3cbc5f9 nodeName:}" failed. No retries permitted until 2025-11-27 06:53:05.188057704 +0000 UTC m=+23.380101622 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cni-binary-copy" (UniqueName: "kubernetes.io/configmap/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-cni-binary-copy") pod "multus-lr9p6" (UID: "a2136014-aa8f-48e5-bccf-64cdd3cbc5f9") : failed to sync configmap cache: timed out waiting for the condition Nov 27 06:53:04 crc kubenswrapper[4971]: E1127 06:53:04.688206 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e6ad47ab-7d74-49d8-8cd7-3261e36837a9-cni-binary-copy podName:e6ad47ab-7d74-49d8-8cd7-3261e36837a9 nodeName:}" failed. No retries permitted until 2025-11-27 06:53:05.188171307 +0000 UTC m=+23.380215225 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cni-binary-copy" (UniqueName: "kubernetes.io/configmap/e6ad47ab-7d74-49d8-8cd7-3261e36837a9-cni-binary-copy") pod "multus-additional-cni-plugins-w2fnl" (UID: "e6ad47ab-7d74-49d8-8cd7-3261e36837a9") : failed to sync configmap cache: timed out waiting for the condition Nov 27 06:53:04 crc kubenswrapper[4971]: E1127 06:53:04.688224 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-ovn-node-metrics-cert podName:47c0abbf-6e9f-4bca-b3ca-bd896be15f2c nodeName:}" failed. No retries permitted until 2025-11-27 06:53:05.188216739 +0000 UTC m=+23.380260647 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "ovn-node-metrics-cert" (UniqueName: "kubernetes.io/secret/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-ovn-node-metrics-cert") pod "ovnkube-node-56nwb" (UID: "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c") : failed to sync secret cache: timed out waiting for the condition Nov 27 06:53:04 crc kubenswrapper[4971]: E1127 06:53:04.688259 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-ovnkube-script-lib podName:47c0abbf-6e9f-4bca-b3ca-bd896be15f2c nodeName:}" failed. No retries permitted until 2025-11-27 06:53:05.18825394 +0000 UTC m=+23.380297858 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "ovnkube-script-lib" (UniqueName: "kubernetes.io/configmap/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-ovnkube-script-lib") pod "ovnkube-node-56nwb" (UID: "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c") : failed to sync configmap cache: timed out waiting for the condition Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.690638 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db8ead1e3b35d86c894bb1067d64e31f7ac2ec518180db02176574eb245fafb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:04Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:04 crc kubenswrapper[4971]: E1127 06:53:04.704070 4971 projected.go:288] Couldn't get configMap openshift-multus/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Nov 27 06:53:04 crc kubenswrapper[4971]: E1127 06:53:04.705130 4971 projected.go:288] Couldn't get configMap openshift-multus/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.707117 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w2fnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:04Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.723540 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5be4571b65d35e7a2d2888f250e6cf6e0c105b598047e2236b3ec90bbdbee5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:04Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.741183 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-56nwb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:04Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.758583 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:04Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.770335 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c02f3806e673886030fe506e4903bfa7f383bdc7130e4f3d251b5851cb99fb52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d1fb1c5f4949857e54218b78eb7d2a695aa1baf7fe0d4e26888cad2e2cdb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:04Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.779474 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.780729 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:04Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.796824 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://673830b15fa2749a267b5b4e1116f8d7c2cbe419b963a87da48dfa8fe9eaf8c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdz6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:04Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.812600 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"ed a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\"\\\\nI1127 06:53:01.687702 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1127 06:53:02.478794 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1127 06:53:02.478820 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1127 06:53:02.478850 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1127 06:53:02.478856 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1127 06:53:02.511358 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1127 06:53:02.511475 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511499 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511521 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1127 06:53:02.511559 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1127 06:53:02.511581 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1127 06:53:02.511603 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1127 06:53:02.511767 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1127 06:53:02.517623 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764226365\\\\\\\\\\\\\\\" (2025-11-27 06:52:45 +0000 UTC to 2025-12-27 06:52:46 +0000 UTC (now=2025-11-27 06:53:02.517594908 +0000 UTC))\\\\\\\"\\\\nF1127 06:53:02.517671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:04Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.827845 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db8ead1e3b35d86c894bb1067d64e31f7ac2ec518180db02176574eb245fafb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:04Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.849104 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w2fnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:04Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.864987 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lr9p6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5pmn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lr9p6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:04Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.878247 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:04Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.891644 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:04Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.903435 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:04Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.904511 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.905935 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Nov 27 06:53:04 crc kubenswrapper[4971]: I1127 06:53:04.953629 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.003356 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.035935 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.040861 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.043841 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Nov 27 06:53:05 crc kubenswrapper[4971]: E1127 06:53:05.044890 4971 projected.go:194] Error preparing data for projected volume kube-api-access-vc4z8 for pod openshift-multus/multus-additional-cni-plugins-w2fnl: failed to sync configmap cache: timed out waiting for the condition Nov 27 06:53:05 crc kubenswrapper[4971]: E1127 06:53:05.044989 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e6ad47ab-7d74-49d8-8cd7-3261e36837a9-kube-api-access-vc4z8 podName:e6ad47ab-7d74-49d8-8cd7-3261e36837a9 nodeName:}" failed. No retries permitted until 2025-11-27 06:53:05.544960279 +0000 UTC m=+23.737004237 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-vc4z8" (UniqueName: "kubernetes.io/projected/e6ad47ab-7d74-49d8-8cd7-3261e36837a9-kube-api-access-vc4z8") pod "multus-additional-cni-plugins-w2fnl" (UID: "e6ad47ab-7d74-49d8-8cd7-3261e36837a9") : failed to sync configmap cache: timed out waiting for the condition Nov 27 06:53:05 crc kubenswrapper[4971]: E1127 06:53:05.045265 4971 projected.go:194] Error preparing data for projected volume kube-api-access-5pmn4 for pod openshift-multus/multus-lr9p6: failed to sync configmap cache: timed out waiting for the condition Nov 27 06:53:05 crc kubenswrapper[4971]: E1127 06:53:05.045326 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-kube-api-access-5pmn4 podName:a2136014-aa8f-48e5-bccf-64cdd3cbc5f9 nodeName:}" failed. No retries permitted until 2025-11-27 06:53:05.545310979 +0000 UTC m=+23.737354937 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-5pmn4" (UniqueName: "kubernetes.io/projected/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-kube-api-access-5pmn4") pod "multus-lr9p6" (UID: "a2136014-aa8f-48e5-bccf-64cdd3cbc5f9") : failed to sync configmap cache: timed out waiting for the condition Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.201995 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-ovn-node-metrics-cert\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.202047 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-cni-binary-copy\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.202111 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-ovnkube-script-lib\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.202131 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e6ad47ab-7d74-49d8-8cd7-3261e36837a9-cni-binary-copy\") pod \"multus-additional-cni-plugins-w2fnl\" (UID: \"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\") " pod="openshift-multus/multus-additional-cni-plugins-w2fnl" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.202893 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e6ad47ab-7d74-49d8-8cd7-3261e36837a9-cni-binary-copy\") pod \"multus-additional-cni-plugins-w2fnl\" (UID: \"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\") " pod="openshift-multus/multus-additional-cni-plugins-w2fnl" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.202958 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-cni-binary-copy\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.203395 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-ovnkube-script-lib\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.208083 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-ovn-node-metrics-cert\") pod \"ovnkube-node-56nwb\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.282735 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.367646 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-ckzrr"] Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.368226 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-ckzrr" Nov 27 06:53:05 crc kubenswrapper[4971]: W1127 06:53:05.371329 4971 reflector.go:561] object-"openshift-image-registry"/"image-registry-certificates": failed to list *v1.ConfigMap: configmaps "image-registry-certificates" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-image-registry": no relationship found between node 'crc' and this object Nov 27 06:53:05 crc kubenswrapper[4971]: W1127 06:53:05.371392 4971 reflector.go:561] object-"openshift-image-registry"/"node-ca-dockercfg-4777p": failed to list *v1.Secret: secrets "node-ca-dockercfg-4777p" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-image-registry": no relationship found between node 'crc' and this object Nov 27 06:53:05 crc kubenswrapper[4971]: E1127 06:53:05.371440 4971 reflector.go:158] "Unhandled Error" err="object-\"openshift-image-registry\"/\"node-ca-dockercfg-4777p\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"node-ca-dockercfg-4777p\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-image-registry\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 27 06:53:05 crc kubenswrapper[4971]: E1127 06:53:05.371396 4971 reflector.go:158] "Unhandled Error" err="object-\"openshift-image-registry\"/\"image-registry-certificates\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"image-registry-certificates\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-image-registry\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.371556 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.373050 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.404849 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5be4571b65d35e7a2d2888f250e6cf6e0c105b598047e2236b3ec90bbdbee5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:05Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.436335 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-56nwb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:05Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.449630 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"ed a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\"\\\\nI1127 06:53:01.687702 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1127 06:53:02.478794 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1127 06:53:02.478820 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1127 06:53:02.478850 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1127 06:53:02.478856 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1127 06:53:02.511358 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1127 06:53:02.511475 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511499 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511521 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1127 06:53:02.511559 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1127 06:53:02.511581 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1127 06:53:02.511603 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1127 06:53:02.511767 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1127 06:53:02.517623 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764226365\\\\\\\\\\\\\\\" (2025-11-27 06:52:45 +0000 UTC to 2025-12-27 06:52:46 +0000 UTC (now=2025-11-27 06:53:02.517594908 +0000 UTC))\\\\\\\"\\\\nF1127 06:53:02.517671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:05Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.463882 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:05Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.476766 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c02f3806e673886030fe506e4903bfa7f383bdc7130e4f3d251b5851cb99fb52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d1fb1c5f4949857e54218b78eb7d2a695aa1baf7fe0d4e26888cad2e2cdb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:05Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.503134 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:05Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.506046 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xx7nr\" (UniqueName: \"kubernetes.io/projected/1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4-kube-api-access-xx7nr\") pod \"node-ca-ckzrr\" (UID: \"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\") " pod="openshift-image-registry/node-ca-ckzrr" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.506097 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4-serviceca\") pod \"node-ca-ckzrr\" (UID: \"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\") " pod="openshift-image-registry/node-ca-ckzrr" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.506116 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4-host\") pod \"node-ca-ckzrr\" (UID: \"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\") " pod="openshift-image-registry/node-ca-ckzrr" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.511495 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"0609bb483c2769bba8b86b1692fd22e002dc73a3655a931c4bc4f7f378b9649f"} Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.514152 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" event={"ID":"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c","Type":"ContainerStarted","Data":"8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a"} Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.514287 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" event={"ID":"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c","Type":"ContainerStarted","Data":"3b0bfc1c99b394f40dc7f888ce48e484b6104167251dbb2005702aac8a60e29b"} Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.516282 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://673830b15fa2749a267b5b4e1116f8d7c2cbe419b963a87da48dfa8fe9eaf8c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdz6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:05Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.527283 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:05Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.535612 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db8ead1e3b35d86c894bb1067d64e31f7ac2ec518180db02176574eb245fafb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:05Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.548580 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w2fnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:05Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.560639 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lr9p6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5pmn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lr9p6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:05Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.605097 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckzrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xx7nr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:05Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckzrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:05Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.607112 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vc4z8\" (UniqueName: \"kubernetes.io/projected/e6ad47ab-7d74-49d8-8cd7-3261e36837a9-kube-api-access-vc4z8\") pod \"multus-additional-cni-plugins-w2fnl\" (UID: \"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\") " pod="openshift-multus/multus-additional-cni-plugins-w2fnl" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.607198 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pmn4\" (UniqueName: \"kubernetes.io/projected/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-kube-api-access-5pmn4\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.607270 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xx7nr\" (UniqueName: \"kubernetes.io/projected/1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4-kube-api-access-xx7nr\") pod \"node-ca-ckzrr\" (UID: \"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\") " pod="openshift-image-registry/node-ca-ckzrr" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.607333 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4-serviceca\") pod \"node-ca-ckzrr\" (UID: \"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\") " pod="openshift-image-registry/node-ca-ckzrr" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.607364 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4-host\") pod \"node-ca-ckzrr\" (UID: \"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\") " pod="openshift-image-registry/node-ca-ckzrr" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.607449 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4-host\") pod \"node-ca-ckzrr\" (UID: \"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\") " pod="openshift-image-registry/node-ca-ckzrr" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.619017 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5pmn4\" (UniqueName: \"kubernetes.io/projected/a2136014-aa8f-48e5-bccf-64cdd3cbc5f9-kube-api-access-5pmn4\") pod \"multus-lr9p6\" (UID: \"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\") " pod="openshift-multus/multus-lr9p6" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.619253 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vc4z8\" (UniqueName: \"kubernetes.io/projected/e6ad47ab-7d74-49d8-8cd7-3261e36837a9-kube-api-access-vc4z8\") pod \"multus-additional-cni-plugins-w2fnl\" (UID: \"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\") " pod="openshift-multus/multus-additional-cni-plugins-w2fnl" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.621137 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:05Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.621883 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xx7nr\" (UniqueName: \"kubernetes.io/projected/1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4-kube-api-access-xx7nr\") pod \"node-ca-ckzrr\" (UID: \"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\") " pod="openshift-image-registry/node-ca-ckzrr" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.643232 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:05Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.658171 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5be4571b65d35e7a2d2888f250e6cf6e0c105b598047e2236b3ec90bbdbee5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:05Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.683367 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-56nwb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:05Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.719405 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c02f3806e673886030fe506e4903bfa7f383bdc7130e4f3d251b5851cb99fb52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d1fb1c5f4949857e54218b78eb7d2a695aa1baf7fe0d4e26888cad2e2cdb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:05Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.759338 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:05Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.800439 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://673830b15fa2749a267b5b4e1116f8d7c2cbe419b963a87da48dfa8fe9eaf8c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdz6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:05Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.837687 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"ed a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\"\\\\nI1127 06:53:01.687702 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1127 06:53:02.478794 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1127 06:53:02.478820 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1127 06:53:02.478850 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1127 06:53:02.478856 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1127 06:53:02.511358 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1127 06:53:02.511475 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511499 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511521 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1127 06:53:02.511559 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1127 06:53:02.511581 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1127 06:53:02.511603 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1127 06:53:02.511767 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1127 06:53:02.517623 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764226365\\\\\\\\\\\\\\\" (2025-11-27 06:52:45 +0000 UTC to 2025-12-27 06:52:46 +0000 UTC (now=2025-11-27 06:53:02.517594908 +0000 UTC))\\\\\\\"\\\\nF1127 06:53:02.517671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:05Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.862958 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.873045 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-lr9p6" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.875928 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:05Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:05 crc kubenswrapper[4971]: W1127 06:53:05.879192 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode6ad47ab_7d74_49d8_8cd7_3261e36837a9.slice/crio-22b1bda465b6f8b79117ecf73df6ef2713f9b2551cce3cc25865cfdacdfe977f WatchSource:0}: Error finding container 22b1bda465b6f8b79117ecf73df6ef2713f9b2551cce3cc25865cfdacdfe977f: Status 404 returned error can't find the container with id 22b1bda465b6f8b79117ecf73df6ef2713f9b2551cce3cc25865cfdacdfe977f Nov 27 06:53:05 crc kubenswrapper[4971]: W1127 06:53:05.889427 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda2136014_aa8f_48e5_bccf_64cdd3cbc5f9.slice/crio-ad1ee628e0b614ea260d9b15f0edb09954af769317b961ebcc5dd567fe5e7b30 WatchSource:0}: Error finding container ad1ee628e0b614ea260d9b15f0edb09954af769317b961ebcc5dd567fe5e7b30: Status 404 returned error can't find the container with id ad1ee628e0b614ea260d9b15f0edb09954af769317b961ebcc5dd567fe5e7b30 Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.921247 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w2fnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:05Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.957782 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lr9p6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5pmn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lr9p6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:05Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:05 crc kubenswrapper[4971]: I1127 06:53:05.995747 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckzrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xx7nr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:05Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckzrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:05Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.040856 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:06Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.076548 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db8ead1e3b35d86c894bb1067d64e31f7ac2ec518180db02176574eb245fafb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:06Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.112129 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:53:06 crc kubenswrapper[4971]: E1127 06:53:06.112315 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:53:10.112288949 +0000 UTC m=+28.304332867 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.119301 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0609bb483c2769bba8b86b1692fd22e002dc73a3655a931c4bc4f7f378b9649f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:06Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.159290 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:06Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.213474 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.213515 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.213556 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.213575 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:53:06 crc kubenswrapper[4971]: E1127 06:53:06.213666 4971 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 27 06:53:06 crc kubenswrapper[4971]: E1127 06:53:06.213692 4971 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 27 06:53:06 crc kubenswrapper[4971]: E1127 06:53:06.213708 4971 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 27 06:53:06 crc kubenswrapper[4971]: E1127 06:53:06.213718 4971 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 27 06:53:06 crc kubenswrapper[4971]: E1127 06:53:06.213748 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-27 06:53:10.213729077 +0000 UTC m=+28.405772995 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 27 06:53:06 crc kubenswrapper[4971]: E1127 06:53:06.213764 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-27 06:53:10.213758008 +0000 UTC m=+28.405801926 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 27 06:53:06 crc kubenswrapper[4971]: E1127 06:53:06.213777 4971 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 27 06:53:06 crc kubenswrapper[4971]: E1127 06:53:06.213796 4971 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 27 06:53:06 crc kubenswrapper[4971]: E1127 06:53:06.213803 4971 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 27 06:53:06 crc kubenswrapper[4971]: E1127 06:53:06.213832 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-27 06:53:10.213818839 +0000 UTC m=+28.405862757 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 27 06:53:06 crc kubenswrapper[4971]: E1127 06:53:06.213864 4971 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 27 06:53:06 crc kubenswrapper[4971]: E1127 06:53:06.213884 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-27 06:53:10.213878861 +0000 UTC m=+28.405922779 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.272951 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.279353 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4-serviceca\") pod \"node-ca-ckzrr\" (UID: \"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\") " pod="openshift-image-registry/node-ca-ckzrr" Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.499800 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.504408 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-ckzrr" Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.518911 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-lr9p6" event={"ID":"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9","Type":"ContainerStarted","Data":"5609ab7b327e7ec603c40b9b27f4ee52b4b49e647d0c6e9db5d360bf686c493c"} Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.518963 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-lr9p6" event={"ID":"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9","Type":"ContainerStarted","Data":"ad1ee628e0b614ea260d9b15f0edb09954af769317b961ebcc5dd567fe5e7b30"} Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.520821 4971 generic.go:334] "Generic (PLEG): container finished" podID="e6ad47ab-7d74-49d8-8cd7-3261e36837a9" containerID="2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148" exitCode=0 Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.520901 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" event={"ID":"e6ad47ab-7d74-49d8-8cd7-3261e36837a9","Type":"ContainerDied","Data":"2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148"} Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.520928 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" event={"ID":"e6ad47ab-7d74-49d8-8cd7-3261e36837a9","Type":"ContainerStarted","Data":"22b1bda465b6f8b79117ecf73df6ef2713f9b2551cce3cc25865cfdacdfe977f"} Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.525398 4971 generic.go:334] "Generic (PLEG): container finished" podID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerID="8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a" exitCode=0 Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.525467 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" event={"ID":"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c","Type":"ContainerDied","Data":"8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a"} Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.525551 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" event={"ID":"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c","Type":"ContainerStarted","Data":"ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae"} Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.525567 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" event={"ID":"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c","Type":"ContainerStarted","Data":"cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57"} Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.525579 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" event={"ID":"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c","Type":"ContainerStarted","Data":"e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c"} Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.525591 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" event={"ID":"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c","Type":"ContainerStarted","Data":"f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d"} Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.525602 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" event={"ID":"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c","Type":"ContainerStarted","Data":"04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890"} Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.525613 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" event={"ID":"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c","Type":"ContainerStarted","Data":"de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc"} Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.535208 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:06Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.538045 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.548787 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db8ead1e3b35d86c894bb1067d64e31f7ac2ec518180db02176574eb245fafb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:06Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.549284 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.549356 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:53:06 crc kubenswrapper[4971]: E1127 06:53:06.549399 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.549423 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:53:06 crc kubenswrapper[4971]: E1127 06:53:06.549524 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:53:06 crc kubenswrapper[4971]: E1127 06:53:06.549727 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.553417 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.556394 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.567002 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w2fnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:06Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.580073 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lr9p6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5609ab7b327e7ec603c40b9b27f4ee52b4b49e647d0c6e9db5d360bf686c493c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5pmn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lr9p6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:06Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.591667 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckzrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xx7nr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:05Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckzrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:06Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.611778 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:06Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.632473 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0609bb483c2769bba8b86b1692fd22e002dc73a3655a931c4bc4f7f378b9649f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:06Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.648714 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5be4571b65d35e7a2d2888f250e6cf6e0c105b598047e2236b3ec90bbdbee5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:06Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.666683 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-56nwb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:06Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.680950 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"ed a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\"\\\\nI1127 06:53:01.687702 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1127 06:53:02.478794 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1127 06:53:02.478820 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1127 06:53:02.478850 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1127 06:53:02.478856 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1127 06:53:02.511358 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1127 06:53:02.511475 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511499 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511521 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1127 06:53:02.511559 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1127 06:53:02.511581 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1127 06:53:02.511603 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1127 06:53:02.511767 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1127 06:53:02.517623 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764226365\\\\\\\\\\\\\\\" (2025-11-27 06:52:45 +0000 UTC to 2025-12-27 06:52:46 +0000 UTC (now=2025-11-27 06:53:02.517594908 +0000 UTC))\\\\\\\"\\\\nF1127 06:53:02.517671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:06Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.692886 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:06Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.706610 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c02f3806e673886030fe506e4903bfa7f383bdc7130e4f3d251b5851cb99fb52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d1fb1c5f4949857e54218b78eb7d2a695aa1baf7fe0d4e26888cad2e2cdb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:06Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.740438 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:06Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.784771 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://673830b15fa2749a267b5b4e1116f8d7c2cbe419b963a87da48dfa8fe9eaf8c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdz6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:06Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.818891 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://673830b15fa2749a267b5b4e1116f8d7c2cbe419b963a87da48dfa8fe9eaf8c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdz6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:06Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.859801 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"ed a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\"\\\\nI1127 06:53:01.687702 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1127 06:53:02.478794 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1127 06:53:02.478820 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1127 06:53:02.478850 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1127 06:53:02.478856 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1127 06:53:02.511358 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1127 06:53:02.511475 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511499 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511521 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1127 06:53:02.511559 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1127 06:53:02.511581 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1127 06:53:02.511603 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1127 06:53:02.511767 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1127 06:53:02.517623 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764226365\\\\\\\\\\\\\\\" (2025-11-27 06:52:45 +0000 UTC to 2025-12-27 06:52:46 +0000 UTC (now=2025-11-27 06:53:02.517594908 +0000 UTC))\\\\\\\"\\\\nF1127 06:53:02.517671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:06Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.897358 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:06Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.940463 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c02f3806e673886030fe506e4903bfa7f383bdc7130e4f3d251b5851cb99fb52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d1fb1c5f4949857e54218b78eb7d2a695aa1baf7fe0d4e26888cad2e2cdb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:06Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:06 crc kubenswrapper[4971]: I1127 06:53:06.977570 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:06Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.015646 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckzrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xx7nr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:05Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckzrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:07Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.059645 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:07Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.097361 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db8ead1e3b35d86c894bb1067d64e31f7ac2ec518180db02176574eb245fafb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:07Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.139451 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w2fnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:07Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.176574 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lr9p6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5609ab7b327e7ec603c40b9b27f4ee52b4b49e647d0c6e9db5d360bf686c493c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5pmn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lr9p6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:07Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.232653 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f110061b-13b1-4eae-b031-1f7899d7728d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6e3b39e688e045045a2a5b02dd0c3c7e7567657e0a9dff4d16f4f818e9e984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0568171255f8d7e993c025aece8b10eb0303efde25bc85b46f3b4b2c014ec66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ffb860fe1ce8cfa20be499da1932b4c5286cfdcc70418e7cdf5785cf746254c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dda9554712ec04cb4c9616ee62cb0e5e2cf106ba9c63ab411fb07fb892a1897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c09241254c68f6a129398092fe87bda15b581be840a5cf4034551b5f881042b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:07Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.281277 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:07Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.330525 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0609bb483c2769bba8b86b1692fd22e002dc73a3655a931c4bc4f7f378b9649f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:07Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.348764 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5be4571b65d35e7a2d2888f250e6cf6e0c105b598047e2236b3ec90bbdbee5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:07Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.381423 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-56nwb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:07Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.530513 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-ckzrr" event={"ID":"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4","Type":"ContainerStarted","Data":"dc0de2fe198ee71bf786a69fa44fa2bd1c0d29e16391fdafd96bb6b062f6f6de"} Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.530577 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-ckzrr" event={"ID":"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4","Type":"ContainerStarted","Data":"4f1d28493a78f4300975e092575df2c2b2a438bb64bd2630161f18aa36b9b348"} Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.532188 4971 generic.go:334] "Generic (PLEG): container finished" podID="e6ad47ab-7d74-49d8-8cd7-3261e36837a9" containerID="16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661" exitCode=0 Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.532289 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" event={"ID":"e6ad47ab-7d74-49d8-8cd7-3261e36837a9","Type":"ContainerDied","Data":"16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661"} Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.550424 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"ed a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\"\\\\nI1127 06:53:01.687702 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1127 06:53:02.478794 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1127 06:53:02.478820 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1127 06:53:02.478850 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1127 06:53:02.478856 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1127 06:53:02.511358 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1127 06:53:02.511475 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511499 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511521 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1127 06:53:02.511559 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1127 06:53:02.511581 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1127 06:53:02.511603 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1127 06:53:02.511767 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1127 06:53:02.517623 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764226365\\\\\\\\\\\\\\\" (2025-11-27 06:52:45 +0000 UTC to 2025-12-27 06:52:46 +0000 UTC (now=2025-11-27 06:53:02.517594908 +0000 UTC))\\\\\\\"\\\\nF1127 06:53:02.517671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:07Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.565376 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:07Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.579195 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c02f3806e673886030fe506e4903bfa7f383bdc7130e4f3d251b5851cb99fb52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d1fb1c5f4949857e54218b78eb7d2a695aa1baf7fe0d4e26888cad2e2cdb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:07Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.592658 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:07Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.603716 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://673830b15fa2749a267b5b4e1116f8d7c2cbe419b963a87da48dfa8fe9eaf8c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdz6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:07Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.618319 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:07Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.656650 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db8ead1e3b35d86c894bb1067d64e31f7ac2ec518180db02176574eb245fafb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:07Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.700305 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w2fnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:07Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.709409 4971 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.712130 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.712169 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.712180 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.712261 4971 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.740670 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lr9p6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5609ab7b327e7ec603c40b9b27f4ee52b4b49e647d0c6e9db5d360bf686c493c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5pmn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lr9p6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:07Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.790548 4971 kubelet_node_status.go:115] "Node was previously registered" node="crc" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.790790 4971 kubelet_node_status.go:79] "Successfully registered node" node="crc" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.791759 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.791805 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.791820 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.791838 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.791853 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:07Z","lastTransitionTime":"2025-11-27T06:53:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:07 crc kubenswrapper[4971]: E1127 06:53:07.810193 4971 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4602e4e9-64d4-4227-8212-1a84a264f109\\\",\\\"systemUUID\\\":\\\"d12f7ae2-c7c1-475e-a2cb-1f1e626e5071\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:07Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.814010 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.814093 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.814108 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.814126 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.814141 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:07Z","lastTransitionTime":"2025-11-27T06:53:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.817913 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckzrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc0de2fe198ee71bf786a69fa44fa2bd1c0d29e16391fdafd96bb6b062f6f6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xx7nr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:05Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckzrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:07Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:07 crc kubenswrapper[4971]: E1127 06:53:07.825823 4971 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4602e4e9-64d4-4227-8212-1a84a264f109\\\",\\\"systemUUID\\\":\\\"d12f7ae2-c7c1-475e-a2cb-1f1e626e5071\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:07Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.829752 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.829789 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.829804 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.829825 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.829840 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:07Z","lastTransitionTime":"2025-11-27T06:53:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:07 crc kubenswrapper[4971]: E1127 06:53:07.842875 4971 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4602e4e9-64d4-4227-8212-1a84a264f109\\\",\\\"systemUUID\\\":\\\"d12f7ae2-c7c1-475e-a2cb-1f1e626e5071\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:07Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.846141 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.846179 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.846194 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.846215 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.846230 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:07Z","lastTransitionTime":"2025-11-27T06:53:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:07 crc kubenswrapper[4971]: E1127 06:53:07.858139 4971 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4602e4e9-64d4-4227-8212-1a84a264f109\\\",\\\"systemUUID\\\":\\\"d12f7ae2-c7c1-475e-a2cb-1f1e626e5071\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:07Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.863387 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f110061b-13b1-4eae-b031-1f7899d7728d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6e3b39e688e045045a2a5b02dd0c3c7e7567657e0a9dff4d16f4f818e9e984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0568171255f8d7e993c025aece8b10eb0303efde25bc85b46f3b4b2c014ec66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ffb860fe1ce8cfa20be499da1932b4c5286cfdcc70418e7cdf5785cf746254c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dda9554712ec04cb4c9616ee62cb0e5e2cf106ba9c63ab411fb07fb892a1897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c09241254c68f6a129398092fe87bda15b581be840a5cf4034551b5f881042b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:07Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.863866 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.863912 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.863922 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.863939 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.863952 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:07Z","lastTransitionTime":"2025-11-27T06:53:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:07 crc kubenswrapper[4971]: E1127 06:53:07.878436 4971 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4602e4e9-64d4-4227-8212-1a84a264f109\\\",\\\"systemUUID\\\":\\\"d12f7ae2-c7c1-475e-a2cb-1f1e626e5071\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:07Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:07 crc kubenswrapper[4971]: E1127 06:53:07.878656 4971 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.880222 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.880269 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.880282 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.880302 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.880341 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:07Z","lastTransitionTime":"2025-11-27T06:53:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.898362 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:07Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.937121 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0609bb483c2769bba8b86b1692fd22e002dc73a3655a931c4bc4f7f378b9649f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:07Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.982967 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.983107 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.983300 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5be4571b65d35e7a2d2888f250e6cf6e0c105b598047e2236b3ec90bbdbee5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:07Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.983451 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.983808 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:07 crc kubenswrapper[4971]: I1127 06:53:07.984217 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:07Z","lastTransitionTime":"2025-11-27T06:53:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.030829 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-56nwb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:08Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.059620 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckzrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc0de2fe198ee71bf786a69fa44fa2bd1c0d29e16391fdafd96bb6b062f6f6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xx7nr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:05Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckzrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:08Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.087842 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.087905 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.087923 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.087946 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.087963 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:08Z","lastTransitionTime":"2025-11-27T06:53:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.100926 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:08Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.141765 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db8ead1e3b35d86c894bb1067d64e31f7ac2ec518180db02176574eb245fafb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:08Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.180992 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w2fnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:08Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.190444 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.190520 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.190573 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.190606 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.190631 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:08Z","lastTransitionTime":"2025-11-27T06:53:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.223251 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lr9p6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5609ab7b327e7ec603c40b9b27f4ee52b4b49e647d0c6e9db5d360bf686c493c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5pmn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lr9p6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:08Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.263309 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f110061b-13b1-4eae-b031-1f7899d7728d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6e3b39e688e045045a2a5b02dd0c3c7e7567657e0a9dff4d16f4f818e9e984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0568171255f8d7e993c025aece8b10eb0303efde25bc85b46f3b4b2c014ec66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ffb860fe1ce8cfa20be499da1932b4c5286cfdcc70418e7cdf5785cf746254c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dda9554712ec04cb4c9616ee62cb0e5e2cf106ba9c63ab411fb07fb892a1897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c09241254c68f6a129398092fe87bda15b581be840a5cf4034551b5f881042b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:08Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.293924 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.294010 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.294025 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.294050 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.294066 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:08Z","lastTransitionTime":"2025-11-27T06:53:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.302035 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:08Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.342175 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0609bb483c2769bba8b86b1692fd22e002dc73a3655a931c4bc4f7f378b9649f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:08Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.386961 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5be4571b65d35e7a2d2888f250e6cf6e0c105b598047e2236b3ec90bbdbee5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:08Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.396966 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.397027 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.397041 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.397068 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.397080 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:08Z","lastTransitionTime":"2025-11-27T06:53:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.421367 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-56nwb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:08Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.458500 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://673830b15fa2749a267b5b4e1116f8d7c2cbe419b963a87da48dfa8fe9eaf8c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdz6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:08Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.499583 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.499629 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.499647 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.499669 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.499682 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:08Z","lastTransitionTime":"2025-11-27T06:53:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.502083 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"ed a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\"\\\\nI1127 06:53:01.687702 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1127 06:53:02.478794 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1127 06:53:02.478820 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1127 06:53:02.478850 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1127 06:53:02.478856 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1127 06:53:02.511358 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1127 06:53:02.511475 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511499 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511521 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1127 06:53:02.511559 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1127 06:53:02.511581 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1127 06:53:02.511603 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1127 06:53:02.511767 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1127 06:53:02.517623 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764226365\\\\\\\\\\\\\\\" (2025-11-27 06:52:45 +0000 UTC to 2025-12-27 06:52:46 +0000 UTC (now=2025-11-27 06:53:02.517594908 +0000 UTC))\\\\\\\"\\\\nF1127 06:53:02.517671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:08Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.537319 4971 generic.go:334] "Generic (PLEG): container finished" podID="e6ad47ab-7d74-49d8-8cd7-3261e36837a9" containerID="21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0" exitCode=0 Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.537376 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" event={"ID":"e6ad47ab-7d74-49d8-8cd7-3261e36837a9","Type":"ContainerDied","Data":"21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0"} Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.545698 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" event={"ID":"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c","Type":"ContainerStarted","Data":"d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349"} Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.545833 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:08Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.550892 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:53:08 crc kubenswrapper[4971]: E1127 06:53:08.551120 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.551559 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:08 crc kubenswrapper[4971]: E1127 06:53:08.551659 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.551715 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:53:08 crc kubenswrapper[4971]: E1127 06:53:08.551756 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.578652 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c02f3806e673886030fe506e4903bfa7f383bdc7130e4f3d251b5851cb99fb52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d1fb1c5f4949857e54218b78eb7d2a695aa1baf7fe0d4e26888cad2e2cdb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:08Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.603094 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.603148 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.603160 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.603181 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.603196 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:08Z","lastTransitionTime":"2025-11-27T06:53:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.618736 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:08Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.658279 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lr9p6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5609ab7b327e7ec603c40b9b27f4ee52b4b49e647d0c6e9db5d360bf686c493c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5pmn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lr9p6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:08Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.700662 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckzrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc0de2fe198ee71bf786a69fa44fa2bd1c0d29e16391fdafd96bb6b062f6f6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xx7nr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:05Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckzrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:08Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.705567 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.705610 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.705621 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.705640 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.705653 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:08Z","lastTransitionTime":"2025-11-27T06:53:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.741608 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:08Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.781063 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db8ead1e3b35d86c894bb1067d64e31f7ac2ec518180db02176574eb245fafb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:08Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.809332 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.809362 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.809370 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.809384 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.809394 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:08Z","lastTransitionTime":"2025-11-27T06:53:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.824264 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w2fnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:08Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.874648 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f110061b-13b1-4eae-b031-1f7899d7728d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6e3b39e688e045045a2a5b02dd0c3c7e7567657e0a9dff4d16f4f818e9e984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0568171255f8d7e993c025aece8b10eb0303efde25bc85b46f3b4b2c014ec66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ffb860fe1ce8cfa20be499da1932b4c5286cfdcc70418e7cdf5785cf746254c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dda9554712ec04cb4c9616ee62cb0e5e2cf106ba9c63ab411fb07fb892a1897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c09241254c68f6a129398092fe87bda15b581be840a5cf4034551b5f881042b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:08Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.903018 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:08Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.911735 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.911769 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.911779 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.911795 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.911814 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:08Z","lastTransitionTime":"2025-11-27T06:53:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.938841 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0609bb483c2769bba8b86b1692fd22e002dc73a3655a931c4bc4f7f378b9649f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:08Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:08 crc kubenswrapper[4971]: I1127 06:53:08.985038 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-56nwb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:08Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.018677 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.018754 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.018770 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.018990 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.019005 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:09Z","lastTransitionTime":"2025-11-27T06:53:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.020844 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5be4571b65d35e7a2d2888f250e6cf6e0c105b598047e2236b3ec90bbdbee5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:09Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.066669 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:09Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.108241 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://673830b15fa2749a267b5b4e1116f8d7c2cbe419b963a87da48dfa8fe9eaf8c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdz6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:09Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.124438 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.124493 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.124511 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.124558 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.124579 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:09Z","lastTransitionTime":"2025-11-27T06:53:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.145632 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"ed a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\"\\\\nI1127 06:53:01.687702 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1127 06:53:02.478794 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1127 06:53:02.478820 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1127 06:53:02.478850 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1127 06:53:02.478856 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1127 06:53:02.511358 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1127 06:53:02.511475 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511499 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511521 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1127 06:53:02.511559 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1127 06:53:02.511581 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1127 06:53:02.511603 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1127 06:53:02.511767 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1127 06:53:02.517623 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764226365\\\\\\\\\\\\\\\" (2025-11-27 06:52:45 +0000 UTC to 2025-12-27 06:52:46 +0000 UTC (now=2025-11-27 06:53:02.517594908 +0000 UTC))\\\\\\\"\\\\nF1127 06:53:02.517671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:09Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.179797 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:09Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.220896 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c02f3806e673886030fe506e4903bfa7f383bdc7130e4f3d251b5851cb99fb52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d1fb1c5f4949857e54218b78eb7d2a695aa1baf7fe0d4e26888cad2e2cdb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:09Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.227549 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.227592 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.227695 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.227722 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.227738 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:09Z","lastTransitionTime":"2025-11-27T06:53:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.331217 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.331263 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.331275 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.331295 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.331312 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:09Z","lastTransitionTime":"2025-11-27T06:53:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.435175 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.435225 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.435240 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.435263 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.435275 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:09Z","lastTransitionTime":"2025-11-27T06:53:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.538301 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.538367 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.538390 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.538417 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.538433 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:09Z","lastTransitionTime":"2025-11-27T06:53:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.553492 4971 generic.go:334] "Generic (PLEG): container finished" podID="e6ad47ab-7d74-49d8-8cd7-3261e36837a9" containerID="a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263" exitCode=0 Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.553568 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" event={"ID":"e6ad47ab-7d74-49d8-8cd7-3261e36837a9","Type":"ContainerDied","Data":"a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263"} Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.579563 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5be4571b65d35e7a2d2888f250e6cf6e0c105b598047e2236b3ec90bbdbee5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:09Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.613477 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-56nwb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:09Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.641331 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"ed a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\"\\\\nI1127 06:53:01.687702 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1127 06:53:02.478794 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1127 06:53:02.478820 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1127 06:53:02.478850 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1127 06:53:02.478856 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1127 06:53:02.511358 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1127 06:53:02.511475 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511499 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511521 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1127 06:53:02.511559 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1127 06:53:02.511581 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1127 06:53:02.511603 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1127 06:53:02.511767 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1127 06:53:02.517623 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764226365\\\\\\\\\\\\\\\" (2025-11-27 06:52:45 +0000 UTC to 2025-12-27 06:52:46 +0000 UTC (now=2025-11-27 06:53:02.517594908 +0000 UTC))\\\\\\\"\\\\nF1127 06:53:02.517671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:09Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.642264 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.642312 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.642323 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.642343 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.642355 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:09Z","lastTransitionTime":"2025-11-27T06:53:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.662571 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:09Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.677718 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c02f3806e673886030fe506e4903bfa7f383bdc7130e4f3d251b5851cb99fb52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d1fb1c5f4949857e54218b78eb7d2a695aa1baf7fe0d4e26888cad2e2cdb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:09Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.695587 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:09Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.709248 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://673830b15fa2749a267b5b4e1116f8d7c2cbe419b963a87da48dfa8fe9eaf8c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdz6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:09Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.723177 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:09Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.738079 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db8ead1e3b35d86c894bb1067d64e31f7ac2ec518180db02176574eb245fafb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:09Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.744487 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.744583 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.744595 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.744617 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.744630 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:09Z","lastTransitionTime":"2025-11-27T06:53:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.760477 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w2fnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:09Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.776246 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lr9p6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5609ab7b327e7ec603c40b9b27f4ee52b4b49e647d0c6e9db5d360bf686c493c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5pmn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lr9p6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:09Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.790136 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckzrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc0de2fe198ee71bf786a69fa44fa2bd1c0d29e16391fdafd96bb6b062f6f6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xx7nr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:05Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckzrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:09Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.808749 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f110061b-13b1-4eae-b031-1f7899d7728d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6e3b39e688e045045a2a5b02dd0c3c7e7567657e0a9dff4d16f4f818e9e984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0568171255f8d7e993c025aece8b10eb0303efde25bc85b46f3b4b2c014ec66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ffb860fe1ce8cfa20be499da1932b4c5286cfdcc70418e7cdf5785cf746254c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dda9554712ec04cb4c9616ee62cb0e5e2cf106ba9c63ab411fb07fb892a1897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c09241254c68f6a129398092fe87bda15b581be840a5cf4034551b5f881042b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:09Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.823295 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:09Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.837036 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0609bb483c2769bba8b86b1692fd22e002dc73a3655a931c4bc4f7f378b9649f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:09Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.847038 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.847076 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.847088 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.847105 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.847117 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:09Z","lastTransitionTime":"2025-11-27T06:53:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.949396 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.949441 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.949450 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.949468 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:09 crc kubenswrapper[4971]: I1127 06:53:09.949479 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:09Z","lastTransitionTime":"2025-11-27T06:53:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.052116 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.052176 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.052192 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.052214 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.052228 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:10Z","lastTransitionTime":"2025-11-27T06:53:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.154671 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.154773 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.154797 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.154830 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.154854 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:10Z","lastTransitionTime":"2025-11-27T06:53:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.158981 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:53:10 crc kubenswrapper[4971]: E1127 06:53:10.159217 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:53:18.159193459 +0000 UTC m=+36.351237387 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.257374 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.257434 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.257450 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.257473 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.257490 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:10Z","lastTransitionTime":"2025-11-27T06:53:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.260253 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.260317 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.260361 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.260410 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:10 crc kubenswrapper[4971]: E1127 06:53:10.260467 4971 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 27 06:53:10 crc kubenswrapper[4971]: E1127 06:53:10.260494 4971 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 27 06:53:10 crc kubenswrapper[4971]: E1127 06:53:10.260621 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-27 06:53:18.260589866 +0000 UTC m=+36.452633844 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 27 06:53:10 crc kubenswrapper[4971]: E1127 06:53:10.260502 4971 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 27 06:53:10 crc kubenswrapper[4971]: E1127 06:53:10.260683 4971 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 27 06:53:10 crc kubenswrapper[4971]: E1127 06:53:10.260737 4971 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 27 06:53:10 crc kubenswrapper[4971]: E1127 06:53:10.260761 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-27 06:53:18.26072965 +0000 UTC m=+36.452773658 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 27 06:53:10 crc kubenswrapper[4971]: E1127 06:53:10.260505 4971 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 27 06:53:10 crc kubenswrapper[4971]: E1127 06:53:10.260813 4971 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 27 06:53:10 crc kubenswrapper[4971]: E1127 06:53:10.260839 4971 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 27 06:53:10 crc kubenswrapper[4971]: E1127 06:53:10.260856 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-27 06:53:18.260839163 +0000 UTC m=+36.452883171 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 27 06:53:10 crc kubenswrapper[4971]: E1127 06:53:10.260901 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-27 06:53:18.260882244 +0000 UTC m=+36.452926202 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.361118 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.361190 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.361210 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.361239 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.361257 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:10Z","lastTransitionTime":"2025-11-27T06:53:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.464559 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.464943 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.464954 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.464972 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.464981 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:10Z","lastTransitionTime":"2025-11-27T06:53:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.549801 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:53:10 crc kubenswrapper[4971]: E1127 06:53:10.549985 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.550116 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:53:10 crc kubenswrapper[4971]: E1127 06:53:10.550233 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.550357 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:10 crc kubenswrapper[4971]: E1127 06:53:10.550587 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.561742 4971 generic.go:334] "Generic (PLEG): container finished" podID="e6ad47ab-7d74-49d8-8cd7-3261e36837a9" containerID="d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303" exitCode=0 Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.561831 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" event={"ID":"e6ad47ab-7d74-49d8-8cd7-3261e36837a9","Type":"ContainerDied","Data":"d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303"} Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.567705 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.567759 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.567777 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.567801 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.567821 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:10Z","lastTransitionTime":"2025-11-27T06:53:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.568726 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" event={"ID":"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c","Type":"ContainerStarted","Data":"e13377d592c751ef11995711e957b8d3e5f03b2bbde6a491ec87a9304406518a"} Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.569096 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.584221 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:10Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.601122 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.603156 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db8ead1e3b35d86c894bb1067d64e31f7ac2ec518180db02176574eb245fafb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:10Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.621341 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w2fnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:10Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.637259 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lr9p6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5609ab7b327e7ec603c40b9b27f4ee52b4b49e647d0c6e9db5d360bf686c493c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5pmn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lr9p6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:10Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.650464 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckzrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc0de2fe198ee71bf786a69fa44fa2bd1c0d29e16391fdafd96bb6b062f6f6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xx7nr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:05Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckzrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:10Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.670171 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.670213 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.670226 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.670243 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.670256 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:10Z","lastTransitionTime":"2025-11-27T06:53:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.679650 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f110061b-13b1-4eae-b031-1f7899d7728d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6e3b39e688e045045a2a5b02dd0c3c7e7567657e0a9dff4d16f4f818e9e984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0568171255f8d7e993c025aece8b10eb0303efde25bc85b46f3b4b2c014ec66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ffb860fe1ce8cfa20be499da1932b4c5286cfdcc70418e7cdf5785cf746254c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dda9554712ec04cb4c9616ee62cb0e5e2cf106ba9c63ab411fb07fb892a1897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c09241254c68f6a129398092fe87bda15b581be840a5cf4034551b5f881042b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:10Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.695875 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:10Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.708643 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0609bb483c2769bba8b86b1692fd22e002dc73a3655a931c4bc4f7f378b9649f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:10Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.723044 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5be4571b65d35e7a2d2888f250e6cf6e0c105b598047e2236b3ec90bbdbee5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:10Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.743563 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-56nwb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:10Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.764696 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"ed a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\"\\\\nI1127 06:53:01.687702 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1127 06:53:02.478794 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1127 06:53:02.478820 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1127 06:53:02.478850 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1127 06:53:02.478856 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1127 06:53:02.511358 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1127 06:53:02.511475 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511499 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511521 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1127 06:53:02.511559 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1127 06:53:02.511581 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1127 06:53:02.511603 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1127 06:53:02.511767 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1127 06:53:02.517623 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764226365\\\\\\\\\\\\\\\" (2025-11-27 06:52:45 +0000 UTC to 2025-12-27 06:52:46 +0000 UTC (now=2025-11-27 06:53:02.517594908 +0000 UTC))\\\\\\\"\\\\nF1127 06:53:02.517671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:10Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.772716 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.772753 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.772764 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.772780 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.772790 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:10Z","lastTransitionTime":"2025-11-27T06:53:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.781279 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:10Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.795162 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c02f3806e673886030fe506e4903bfa7f383bdc7130e4f3d251b5851cb99fb52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d1fb1c5f4949857e54218b78eb7d2a695aa1baf7fe0d4e26888cad2e2cdb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:10Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.812849 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:10Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.827286 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://673830b15fa2749a267b5b4e1116f8d7c2cbe419b963a87da48dfa8fe9eaf8c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdz6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:10Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.839608 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:10Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.849696 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db8ead1e3b35d86c894bb1067d64e31f7ac2ec518180db02176574eb245fafb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:10Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.864283 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w2fnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:10Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.874813 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.874859 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.874869 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.874887 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.874898 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:10Z","lastTransitionTime":"2025-11-27T06:53:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.878566 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lr9p6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5609ab7b327e7ec603c40b9b27f4ee52b4b49e647d0c6e9db5d360bf686c493c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5pmn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lr9p6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:10Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.891268 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckzrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc0de2fe198ee71bf786a69fa44fa2bd1c0d29e16391fdafd96bb6b062f6f6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xx7nr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:05Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckzrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:10Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.913837 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f110061b-13b1-4eae-b031-1f7899d7728d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6e3b39e688e045045a2a5b02dd0c3c7e7567657e0a9dff4d16f4f818e9e984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0568171255f8d7e993c025aece8b10eb0303efde25bc85b46f3b4b2c014ec66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ffb860fe1ce8cfa20be499da1932b4c5286cfdcc70418e7cdf5785cf746254c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dda9554712ec04cb4c9616ee62cb0e5e2cf106ba9c63ab411fb07fb892a1897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c09241254c68f6a129398092fe87bda15b581be840a5cf4034551b5f881042b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:10Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.928038 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:10Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.942195 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0609bb483c2769bba8b86b1692fd22e002dc73a3655a931c4bc4f7f378b9649f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:10Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.956558 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5be4571b65d35e7a2d2888f250e6cf6e0c105b598047e2236b3ec90bbdbee5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:10Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.974307 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e13377d592c751ef11995711e957b8d3e5f03b2bbde6a491ec87a9304406518a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-56nwb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:10Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.977325 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.977368 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.977384 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.977407 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.977421 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:10Z","lastTransitionTime":"2025-11-27T06:53:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:10 crc kubenswrapper[4971]: I1127 06:53:10.992100 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"ed a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\"\\\\nI1127 06:53:01.687702 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1127 06:53:02.478794 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1127 06:53:02.478820 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1127 06:53:02.478850 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1127 06:53:02.478856 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1127 06:53:02.511358 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1127 06:53:02.511475 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511499 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511521 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1127 06:53:02.511559 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1127 06:53:02.511581 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1127 06:53:02.511603 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1127 06:53:02.511767 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1127 06:53:02.517623 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764226365\\\\\\\\\\\\\\\" (2025-11-27 06:52:45 +0000 UTC to 2025-12-27 06:52:46 +0000 UTC (now=2025-11-27 06:53:02.517594908 +0000 UTC))\\\\\\\"\\\\nF1127 06:53:02.517671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:10Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.011499 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:11Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.031939 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c02f3806e673886030fe506e4903bfa7f383bdc7130e4f3d251b5851cb99fb52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d1fb1c5f4949857e54218b78eb7d2a695aa1baf7fe0d4e26888cad2e2cdb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:11Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.044640 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:11Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.056674 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://673830b15fa2749a267b5b4e1116f8d7c2cbe419b963a87da48dfa8fe9eaf8c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdz6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:11Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.080297 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.080330 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.080340 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.080355 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.080366 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:11Z","lastTransitionTime":"2025-11-27T06:53:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.183009 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.183066 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.183083 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.183104 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.183113 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:11Z","lastTransitionTime":"2025-11-27T06:53:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.286791 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.286834 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.286854 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.286868 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.286878 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:11Z","lastTransitionTime":"2025-11-27T06:53:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.390267 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.390317 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.390329 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.390350 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.390361 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:11Z","lastTransitionTime":"2025-11-27T06:53:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.494104 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.494170 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.494186 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.494206 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.494219 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:11Z","lastTransitionTime":"2025-11-27T06:53:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.577891 4971 generic.go:334] "Generic (PLEG): container finished" podID="e6ad47ab-7d74-49d8-8cd7-3261e36837a9" containerID="d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd" exitCode=0 Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.577961 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" event={"ID":"e6ad47ab-7d74-49d8-8cd7-3261e36837a9","Type":"ContainerDied","Data":"d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd"} Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.578095 4971 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.578945 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.597280 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.597335 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.597355 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.597383 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.597402 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:11Z","lastTransitionTime":"2025-11-27T06:53:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.599897 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"ed a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\"\\\\nI1127 06:53:01.687702 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1127 06:53:02.478794 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1127 06:53:02.478820 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1127 06:53:02.478850 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1127 06:53:02.478856 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1127 06:53:02.511358 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1127 06:53:02.511475 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511499 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511521 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1127 06:53:02.511559 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1127 06:53:02.511581 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1127 06:53:02.511603 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1127 06:53:02.511767 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1127 06:53:02.517623 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764226365\\\\\\\\\\\\\\\" (2025-11-27 06:52:45 +0000 UTC to 2025-12-27 06:52:46 +0000 UTC (now=2025-11-27 06:53:02.517594908 +0000 UTC))\\\\\\\"\\\\nF1127 06:53:02.517671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:11Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.673872 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:11Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.678379 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.693432 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c02f3806e673886030fe506e4903bfa7f383bdc7130e4f3d251b5851cb99fb52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d1fb1c5f4949857e54218b78eb7d2a695aa1baf7fe0d4e26888cad2e2cdb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:11Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.699985 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.700046 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.700068 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.700093 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.700109 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:11Z","lastTransitionTime":"2025-11-27T06:53:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.709738 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:11Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.723457 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://673830b15fa2749a267b5b4e1116f8d7c2cbe419b963a87da48dfa8fe9eaf8c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdz6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:11Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.739012 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:11Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.751948 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db8ead1e3b35d86c894bb1067d64e31f7ac2ec518180db02176574eb245fafb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:11Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.767680 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w2fnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:11Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.781758 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lr9p6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5609ab7b327e7ec603c40b9b27f4ee52b4b49e647d0c6e9db5d360bf686c493c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5pmn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lr9p6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:11Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.795919 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckzrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc0de2fe198ee71bf786a69fa44fa2bd1c0d29e16391fdafd96bb6b062f6f6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xx7nr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:05Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckzrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:11Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.804023 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.804085 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.804098 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.804116 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.804127 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:11Z","lastTransitionTime":"2025-11-27T06:53:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.814850 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f110061b-13b1-4eae-b031-1f7899d7728d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6e3b39e688e045045a2a5b02dd0c3c7e7567657e0a9dff4d16f4f818e9e984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0568171255f8d7e993c025aece8b10eb0303efde25bc85b46f3b4b2c014ec66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ffb860fe1ce8cfa20be499da1932b4c5286cfdcc70418e7cdf5785cf746254c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dda9554712ec04cb4c9616ee62cb0e5e2cf106ba9c63ab411fb07fb892a1897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c09241254c68f6a129398092fe87bda15b581be840a5cf4034551b5f881042b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:11Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.826810 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:11Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.837673 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0609bb483c2769bba8b86b1692fd22e002dc73a3655a931c4bc4f7f378b9649f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:11Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.848450 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5be4571b65d35e7a2d2888f250e6cf6e0c105b598047e2236b3ec90bbdbee5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:11Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.864576 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e13377d592c751ef11995711e957b8d3e5f03b2bbde6a491ec87a9304406518a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-56nwb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:11Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.876277 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5be4571b65d35e7a2d2888f250e6cf6e0c105b598047e2236b3ec90bbdbee5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:11Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.891710 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e13377d592c751ef11995711e957b8d3e5f03b2bbde6a491ec87a9304406518a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-56nwb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:11Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.902722 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c02f3806e673886030fe506e4903bfa7f383bdc7130e4f3d251b5851cb99fb52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d1fb1c5f4949857e54218b78eb7d2a695aa1baf7fe0d4e26888cad2e2cdb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:11Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.906399 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.906437 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.906448 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.906465 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.906477 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:11Z","lastTransitionTime":"2025-11-27T06:53:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.914025 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:11Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.927103 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://673830b15fa2749a267b5b4e1116f8d7c2cbe419b963a87da48dfa8fe9eaf8c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdz6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:11Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.938500 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"ed a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\"\\\\nI1127 06:53:01.687702 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1127 06:53:02.478794 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1127 06:53:02.478820 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1127 06:53:02.478850 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1127 06:53:02.478856 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1127 06:53:02.511358 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1127 06:53:02.511475 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511499 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511521 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1127 06:53:02.511559 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1127 06:53:02.511581 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1127 06:53:02.511603 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1127 06:53:02.511767 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1127 06:53:02.517623 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764226365\\\\\\\\\\\\\\\" (2025-11-27 06:52:45 +0000 UTC to 2025-12-27 06:52:46 +0000 UTC (now=2025-11-27 06:53:02.517594908 +0000 UTC))\\\\\\\"\\\\nF1127 06:53:02.517671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:11Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.951724 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:11Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.965822 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w2fnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:11Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:11 crc kubenswrapper[4971]: I1127 06:53:11.980576 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lr9p6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5609ab7b327e7ec603c40b9b27f4ee52b4b49e647d0c6e9db5d360bf686c493c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5pmn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lr9p6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:11Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.008923 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.008990 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.009001 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.009023 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.009035 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:12Z","lastTransitionTime":"2025-11-27T06:53:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.019142 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckzrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc0de2fe198ee71bf786a69fa44fa2bd1c0d29e16391fdafd96bb6b062f6f6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xx7nr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:05Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckzrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:12Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.058863 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:12Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.097971 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db8ead1e3b35d86c894bb1067d64e31f7ac2ec518180db02176574eb245fafb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:12Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.111986 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.112089 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.112109 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.112136 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.112153 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:12Z","lastTransitionTime":"2025-11-27T06:53:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.140427 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0609bb483c2769bba8b86b1692fd22e002dc73a3655a931c4bc4f7f378b9649f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:12Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.189253 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f110061b-13b1-4eae-b031-1f7899d7728d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6e3b39e688e045045a2a5b02dd0c3c7e7567657e0a9dff4d16f4f818e9e984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0568171255f8d7e993c025aece8b10eb0303efde25bc85b46f3b4b2c014ec66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ffb860fe1ce8cfa20be499da1932b4c5286cfdcc70418e7cdf5785cf746254c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dda9554712ec04cb4c9616ee62cb0e5e2cf106ba9c63ab411fb07fb892a1897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c09241254c68f6a129398092fe87bda15b581be840a5cf4034551b5f881042b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:12Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.215130 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.215169 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.215180 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.215195 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.215212 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:12Z","lastTransitionTime":"2025-11-27T06:53:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.222335 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:12Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.317906 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.317945 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.317955 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.317973 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.317986 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:12Z","lastTransitionTime":"2025-11-27T06:53:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.421199 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.421261 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.421273 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.421291 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.421306 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:12Z","lastTransitionTime":"2025-11-27T06:53:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.526371 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.526430 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.526448 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.526470 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.526487 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:12Z","lastTransitionTime":"2025-11-27T06:53:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.550263 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:53:12 crc kubenswrapper[4971]: E1127 06:53:12.550399 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.550719 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:12 crc kubenswrapper[4971]: E1127 06:53:12.550781 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.550915 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:53:12 crc kubenswrapper[4971]: E1127 06:53:12.550980 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.561365 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0609bb483c2769bba8b86b1692fd22e002dc73a3655a931c4bc4f7f378b9649f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:12Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.592015 4971 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.592026 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" event={"ID":"e6ad47ab-7d74-49d8-8cd7-3261e36837a9","Type":"ContainerStarted","Data":"239d7a81aa3003faaae51b9e8f67d4ba1495328340befcc2b962804e376c9c18"} Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.595407 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f110061b-13b1-4eae-b031-1f7899d7728d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6e3b39e688e045045a2a5b02dd0c3c7e7567657e0a9dff4d16f4f818e9e984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0568171255f8d7e993c025aece8b10eb0303efde25bc85b46f3b4b2c014ec66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ffb860fe1ce8cfa20be499da1932b4c5286cfdcc70418e7cdf5785cf746254c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dda9554712ec04cb4c9616ee62cb0e5e2cf106ba9c63ab411fb07fb892a1897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c09241254c68f6a129398092fe87bda15b581be840a5cf4034551b5f881042b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:12Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.613112 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:12Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.627022 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5be4571b65d35e7a2d2888f250e6cf6e0c105b598047e2236b3ec90bbdbee5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:12Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.628789 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.628822 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.628834 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.628851 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.628862 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:12Z","lastTransitionTime":"2025-11-27T06:53:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.643761 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e13377d592c751ef11995711e957b8d3e5f03b2bbde6a491ec87a9304406518a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-56nwb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:12Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.655433 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c02f3806e673886030fe506e4903bfa7f383bdc7130e4f3d251b5851cb99fb52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d1fb1c5f4949857e54218b78eb7d2a695aa1baf7fe0d4e26888cad2e2cdb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:12Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.665297 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:12Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.674130 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://673830b15fa2749a267b5b4e1116f8d7c2cbe419b963a87da48dfa8fe9eaf8c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdz6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:12Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.688519 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"ed a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\"\\\\nI1127 06:53:01.687702 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1127 06:53:02.478794 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1127 06:53:02.478820 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1127 06:53:02.478850 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1127 06:53:02.478856 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1127 06:53:02.511358 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1127 06:53:02.511475 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511499 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511521 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1127 06:53:02.511559 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1127 06:53:02.511581 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1127 06:53:02.511603 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1127 06:53:02.511767 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1127 06:53:02.517623 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764226365\\\\\\\\\\\\\\\" (2025-11-27 06:52:45 +0000 UTC to 2025-12-27 06:52:46 +0000 UTC (now=2025-11-27 06:53:02.517594908 +0000 UTC))\\\\\\\"\\\\nF1127 06:53:02.517671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:12Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.698964 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:12Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.711948 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w2fnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:12Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.724288 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lr9p6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5609ab7b327e7ec603c40b9b27f4ee52b4b49e647d0c6e9db5d360bf686c493c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5pmn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lr9p6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:12Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.732681 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.732750 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.732774 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.732802 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.732821 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:12Z","lastTransitionTime":"2025-11-27T06:53:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.736220 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckzrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc0de2fe198ee71bf786a69fa44fa2bd1c0d29e16391fdafd96bb6b062f6f6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xx7nr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:05Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckzrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:12Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.777293 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:12Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.818937 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db8ead1e3b35d86c894bb1067d64e31f7ac2ec518180db02176574eb245fafb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:12Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.837645 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.837680 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.837691 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.837704 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.837714 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:12Z","lastTransitionTime":"2025-11-27T06:53:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.871804 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f110061b-13b1-4eae-b031-1f7899d7728d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6e3b39e688e045045a2a5b02dd0c3c7e7567657e0a9dff4d16f4f818e9e984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0568171255f8d7e993c025aece8b10eb0303efde25bc85b46f3b4b2c014ec66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ffb860fe1ce8cfa20be499da1932b4c5286cfdcc70418e7cdf5785cf746254c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dda9554712ec04cb4c9616ee62cb0e5e2cf106ba9c63ab411fb07fb892a1897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c09241254c68f6a129398092fe87bda15b581be840a5cf4034551b5f881042b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:12Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.897090 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:12Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.935478 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0609bb483c2769bba8b86b1692fd22e002dc73a3655a931c4bc4f7f378b9649f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:12Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.940215 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.940261 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.940271 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.940286 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.940296 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:12Z","lastTransitionTime":"2025-11-27T06:53:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:12 crc kubenswrapper[4971]: I1127 06:53:12.977638 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5be4571b65d35e7a2d2888f250e6cf6e0c105b598047e2236b3ec90bbdbee5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:12Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.026059 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e13377d592c751ef11995711e957b8d3e5f03b2bbde6a491ec87a9304406518a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-56nwb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:13Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.043483 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.043528 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.043567 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.043589 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.043604 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:13Z","lastTransitionTime":"2025-11-27T06:53:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.062834 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"ed a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\"\\\\nI1127 06:53:01.687702 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1127 06:53:02.478794 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1127 06:53:02.478820 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1127 06:53:02.478850 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1127 06:53:02.478856 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1127 06:53:02.511358 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1127 06:53:02.511475 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511499 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511521 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1127 06:53:02.511559 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1127 06:53:02.511581 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1127 06:53:02.511603 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1127 06:53:02.511767 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1127 06:53:02.517623 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764226365\\\\\\\\\\\\\\\" (2025-11-27 06:52:45 +0000 UTC to 2025-12-27 06:52:46 +0000 UTC (now=2025-11-27 06:53:02.517594908 +0000 UTC))\\\\\\\"\\\\nF1127 06:53:02.517671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:13Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.102781 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:13Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.141717 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c02f3806e673886030fe506e4903bfa7f383bdc7130e4f3d251b5851cb99fb52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d1fb1c5f4949857e54218b78eb7d2a695aa1baf7fe0d4e26888cad2e2cdb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:13Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.146643 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.146688 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.146701 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.146720 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.146733 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:13Z","lastTransitionTime":"2025-11-27T06:53:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.182610 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:13Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.215960 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://673830b15fa2749a267b5b4e1116f8d7c2cbe419b963a87da48dfa8fe9eaf8c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdz6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:13Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.248863 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.248900 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.248914 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.248930 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.248941 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:13Z","lastTransitionTime":"2025-11-27T06:53:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.258087 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:13Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.295817 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db8ead1e3b35d86c894bb1067d64e31f7ac2ec518180db02176574eb245fafb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:13Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.341666 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://239d7a81aa3003faaae51b9e8f67d4ba1495328340befcc2b962804e376c9c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w2fnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:13Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.350729 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.350772 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.350785 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.350803 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.350816 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:13Z","lastTransitionTime":"2025-11-27T06:53:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.377397 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lr9p6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5609ab7b327e7ec603c40b9b27f4ee52b4b49e647d0c6e9db5d360bf686c493c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5pmn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lr9p6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:13Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.415944 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckzrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc0de2fe198ee71bf786a69fa44fa2bd1c0d29e16391fdafd96bb6b062f6f6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xx7nr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:05Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckzrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:13Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.453007 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.453038 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.453046 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.453058 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.453066 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:13Z","lastTransitionTime":"2025-11-27T06:53:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.555386 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.555482 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.555518 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.555648 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.555692 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:13Z","lastTransitionTime":"2025-11-27T06:53:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.598247 4971 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.658795 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.658865 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.658884 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.658914 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.658935 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:13Z","lastTransitionTime":"2025-11-27T06:53:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.762477 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.762576 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.762602 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.762674 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.762694 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:13Z","lastTransitionTime":"2025-11-27T06:53:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.866113 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.866184 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.866205 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.866238 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.866254 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:13Z","lastTransitionTime":"2025-11-27T06:53:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.970715 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.970782 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.970795 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.970810 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:13 crc kubenswrapper[4971]: I1127 06:53:13.970822 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:13Z","lastTransitionTime":"2025-11-27T06:53:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.074142 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.074195 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.074216 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.074240 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.074255 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:14Z","lastTransitionTime":"2025-11-27T06:53:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.178218 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.178289 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.178305 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.178332 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.178348 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:14Z","lastTransitionTime":"2025-11-27T06:53:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.281985 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.282024 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.282033 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.282048 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.282058 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:14Z","lastTransitionTime":"2025-11-27T06:53:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.385631 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.385697 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.385714 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.385738 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.385755 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:14Z","lastTransitionTime":"2025-11-27T06:53:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.488649 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.488689 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.488698 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.488713 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.488725 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:14Z","lastTransitionTime":"2025-11-27T06:53:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.550834 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.550901 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.550833 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:53:14 crc kubenswrapper[4971]: E1127 06:53:14.550966 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:53:14 crc kubenswrapper[4971]: E1127 06:53:14.551097 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:53:14 crc kubenswrapper[4971]: E1127 06:53:14.551624 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.590825 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.590868 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.590877 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.590892 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.590903 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:14Z","lastTransitionTime":"2025-11-27T06:53:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.602421 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-56nwb_47c0abbf-6e9f-4bca-b3ca-bd896be15f2c/ovnkube-controller/0.log" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.605428 4971 generic.go:334] "Generic (PLEG): container finished" podID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerID="e13377d592c751ef11995711e957b8d3e5f03b2bbde6a491ec87a9304406518a" exitCode=1 Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.605460 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" event={"ID":"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c","Type":"ContainerDied","Data":"e13377d592c751ef11995711e957b8d3e5f03b2bbde6a491ec87a9304406518a"} Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.606064 4971 scope.go:117] "RemoveContainer" containerID="e13377d592c751ef11995711e957b8d3e5f03b2bbde6a491ec87a9304406518a" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.625859 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e13377d592c751ef11995711e957b8d3e5f03b2bbde6a491ec87a9304406518a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13377d592c751ef11995711e957b8d3e5f03b2bbde6a491ec87a9304406518a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-27T06:53:13Z\\\",\\\"message\\\":\\\":13.718468 6253 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1127 06:53:13.718723 6253 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1127 06:53:13.718836 6253 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1127 06:53:13.719039 6253 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1127 06:53:13.719204 6253 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1127 06:53:13.719410 6253 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1127 06:53:13.719729 6253 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1127 06:53:13.720117 6253 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-56nwb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:14Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.641477 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5be4571b65d35e7a2d2888f250e6cf6e0c105b598047e2236b3ec90bbdbee5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:14Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.653825 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:14Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.669687 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://673830b15fa2749a267b5b4e1116f8d7c2cbe419b963a87da48dfa8fe9eaf8c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdz6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:14Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.683254 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"ed a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\"\\\\nI1127 06:53:01.687702 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1127 06:53:02.478794 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1127 06:53:02.478820 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1127 06:53:02.478850 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1127 06:53:02.478856 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1127 06:53:02.511358 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1127 06:53:02.511475 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511499 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511521 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1127 06:53:02.511559 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1127 06:53:02.511581 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1127 06:53:02.511603 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1127 06:53:02.511767 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1127 06:53:02.517623 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764226365\\\\\\\\\\\\\\\" (2025-11-27 06:52:45 +0000 UTC to 2025-12-27 06:52:46 +0000 UTC (now=2025-11-27 06:53:02.517594908 +0000 UTC))\\\\\\\"\\\\nF1127 06:53:02.517671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:14Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.694433 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.694493 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.694506 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.694526 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.694556 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:14Z","lastTransitionTime":"2025-11-27T06:53:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.698625 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:14Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.711302 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c02f3806e673886030fe506e4903bfa7f383bdc7130e4f3d251b5851cb99fb52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d1fb1c5f4949857e54218b78eb7d2a695aa1baf7fe0d4e26888cad2e2cdb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:14Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.723669 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lr9p6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5609ab7b327e7ec603c40b9b27f4ee52b4b49e647d0c6e9db5d360bf686c493c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5pmn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lr9p6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:14Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.735611 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckzrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc0de2fe198ee71bf786a69fa44fa2bd1c0d29e16391fdafd96bb6b062f6f6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xx7nr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:05Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckzrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:14Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.738170 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nxjns"] Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.739112 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nxjns" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.742577 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.743734 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.753067 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:14Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.765780 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db8ead1e3b35d86c894bb1067d64e31f7ac2ec518180db02176574eb245fafb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:14Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.787974 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://239d7a81aa3003faaae51b9e8f67d4ba1495328340befcc2b962804e376c9c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w2fnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:14Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.797334 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.797371 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.797385 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.797407 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.797418 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:14Z","lastTransitionTime":"2025-11-27T06:53:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.809819 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b22ca58c-5169-4ae7-ac50-a6cfb48fa334-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-nxjns\" (UID: \"b22ca58c-5169-4ae7-ac50-a6cfb48fa334\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nxjns" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.809854 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b22ca58c-5169-4ae7-ac50-a6cfb48fa334-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-nxjns\" (UID: \"b22ca58c-5169-4ae7-ac50-a6cfb48fa334\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nxjns" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.809872 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzkwn\" (UniqueName: \"kubernetes.io/projected/b22ca58c-5169-4ae7-ac50-a6cfb48fa334-kube-api-access-xzkwn\") pod \"ovnkube-control-plane-749d76644c-nxjns\" (UID: \"b22ca58c-5169-4ae7-ac50-a6cfb48fa334\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nxjns" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.809897 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b22ca58c-5169-4ae7-ac50-a6cfb48fa334-env-overrides\") pod \"ovnkube-control-plane-749d76644c-nxjns\" (UID: \"b22ca58c-5169-4ae7-ac50-a6cfb48fa334\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nxjns" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.814490 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f110061b-13b1-4eae-b031-1f7899d7728d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6e3b39e688e045045a2a5b02dd0c3c7e7567657e0a9dff4d16f4f818e9e984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0568171255f8d7e993c025aece8b10eb0303efde25bc85b46f3b4b2c014ec66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ffb860fe1ce8cfa20be499da1932b4c5286cfdcc70418e7cdf5785cf746254c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dda9554712ec04cb4c9616ee62cb0e5e2cf106ba9c63ab411fb07fb892a1897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c09241254c68f6a129398092fe87bda15b581be840a5cf4034551b5f881042b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:14Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.826336 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:14Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.837396 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0609bb483c2769bba8b86b1692fd22e002dc73a3655a931c4bc4f7f378b9649f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:14Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.849031 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5be4571b65d35e7a2d2888f250e6cf6e0c105b598047e2236b3ec90bbdbee5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:14Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.867626 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e13377d592c751ef11995711e957b8d3e5f03b2bbde6a491ec87a9304406518a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13377d592c751ef11995711e957b8d3e5f03b2bbde6a491ec87a9304406518a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-27T06:53:13Z\\\",\\\"message\\\":\\\":13.718468 6253 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1127 06:53:13.718723 6253 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1127 06:53:13.718836 6253 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1127 06:53:13.719039 6253 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1127 06:53:13.719204 6253 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1127 06:53:13.719410 6253 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1127 06:53:13.719729 6253 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1127 06:53:13.720117 6253 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-56nwb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:14Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.877920 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nxjns" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22ca58c-5169-4ae7-ac50-a6cfb48fa334\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nxjns\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:14Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.890391 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c02f3806e673886030fe506e4903bfa7f383bdc7130e4f3d251b5851cb99fb52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d1fb1c5f4949857e54218b78eb7d2a695aa1baf7fe0d4e26888cad2e2cdb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:14Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.899470 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.899500 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.899509 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.899525 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.899551 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:14Z","lastTransitionTime":"2025-11-27T06:53:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.904331 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:14Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.910559 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b22ca58c-5169-4ae7-ac50-a6cfb48fa334-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-nxjns\" (UID: \"b22ca58c-5169-4ae7-ac50-a6cfb48fa334\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nxjns" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.910603 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b22ca58c-5169-4ae7-ac50-a6cfb48fa334-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-nxjns\" (UID: \"b22ca58c-5169-4ae7-ac50-a6cfb48fa334\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nxjns" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.910627 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzkwn\" (UniqueName: \"kubernetes.io/projected/b22ca58c-5169-4ae7-ac50-a6cfb48fa334-kube-api-access-xzkwn\") pod \"ovnkube-control-plane-749d76644c-nxjns\" (UID: \"b22ca58c-5169-4ae7-ac50-a6cfb48fa334\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nxjns" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.910657 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b22ca58c-5169-4ae7-ac50-a6cfb48fa334-env-overrides\") pod \"ovnkube-control-plane-749d76644c-nxjns\" (UID: \"b22ca58c-5169-4ae7-ac50-a6cfb48fa334\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nxjns" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.911624 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b22ca58c-5169-4ae7-ac50-a6cfb48fa334-env-overrides\") pod \"ovnkube-control-plane-749d76644c-nxjns\" (UID: \"b22ca58c-5169-4ae7-ac50-a6cfb48fa334\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nxjns" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.912151 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b22ca58c-5169-4ae7-ac50-a6cfb48fa334-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-nxjns\" (UID: \"b22ca58c-5169-4ae7-ac50-a6cfb48fa334\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nxjns" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.917346 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b22ca58c-5169-4ae7-ac50-a6cfb48fa334-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-nxjns\" (UID: \"b22ca58c-5169-4ae7-ac50-a6cfb48fa334\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nxjns" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.924819 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://673830b15fa2749a267b5b4e1116f8d7c2cbe419b963a87da48dfa8fe9eaf8c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdz6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:14Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.930092 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzkwn\" (UniqueName: \"kubernetes.io/projected/b22ca58c-5169-4ae7-ac50-a6cfb48fa334-kube-api-access-xzkwn\") pod \"ovnkube-control-plane-749d76644c-nxjns\" (UID: \"b22ca58c-5169-4ae7-ac50-a6cfb48fa334\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nxjns" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.942244 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"ed a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\"\\\\nI1127 06:53:01.687702 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1127 06:53:02.478794 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1127 06:53:02.478820 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1127 06:53:02.478850 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1127 06:53:02.478856 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1127 06:53:02.511358 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1127 06:53:02.511475 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511499 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511521 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1127 06:53:02.511559 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1127 06:53:02.511581 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1127 06:53:02.511603 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1127 06:53:02.511767 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1127 06:53:02.517623 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764226365\\\\\\\\\\\\\\\" (2025-11-27 06:52:45 +0000 UTC to 2025-12-27 06:52:46 +0000 UTC (now=2025-11-27 06:53:02.517594908 +0000 UTC))\\\\\\\"\\\\nF1127 06:53:02.517671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:14Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.960904 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:14Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:14 crc kubenswrapper[4971]: I1127 06:53:14.984714 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://239d7a81aa3003faaae51b9e8f67d4ba1495328340befcc2b962804e376c9c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w2fnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:14Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.001731 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.001771 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.001781 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.001796 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.001806 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:15Z","lastTransitionTime":"2025-11-27T06:53:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.005073 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lr9p6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5609ab7b327e7ec603c40b9b27f4ee52b4b49e647d0c6e9db5d360bf686c493c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5pmn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lr9p6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:15Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.015720 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckzrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc0de2fe198ee71bf786a69fa44fa2bd1c0d29e16391fdafd96bb6b062f6f6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xx7nr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:05Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckzrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:15Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.026543 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:15Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.036603 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db8ead1e3b35d86c894bb1067d64e31f7ac2ec518180db02176574eb245fafb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:15Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.047152 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0609bb483c2769bba8b86b1692fd22e002dc73a3655a931c4bc4f7f378b9649f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:15Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.054761 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nxjns" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.067973 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f110061b-13b1-4eae-b031-1f7899d7728d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6e3b39e688e045045a2a5b02dd0c3c7e7567657e0a9dff4d16f4f818e9e984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0568171255f8d7e993c025aece8b10eb0303efde25bc85b46f3b4b2c014ec66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ffb860fe1ce8cfa20be499da1932b4c5286cfdcc70418e7cdf5785cf746254c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dda9554712ec04cb4c9616ee62cb0e5e2cf106ba9c63ab411fb07fb892a1897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c09241254c68f6a129398092fe87bda15b581be840a5cf4034551b5f881042b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:15Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.082044 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:15Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.104254 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.104304 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.104314 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.104335 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.104348 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:15Z","lastTransitionTime":"2025-11-27T06:53:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:15 crc kubenswrapper[4971]: W1127 06:53:15.122915 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb22ca58c_5169_4ae7_ac50_a6cfb48fa334.slice/crio-c86c8dbb24c36b5efaa8127231a6dbc369307de51256416ea3ff656ece101420 WatchSource:0}: Error finding container c86c8dbb24c36b5efaa8127231a6dbc369307de51256416ea3ff656ece101420: Status 404 returned error can't find the container with id c86c8dbb24c36b5efaa8127231a6dbc369307de51256416ea3ff656ece101420 Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.207470 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.207508 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.207517 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.207544 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.207557 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:15Z","lastTransitionTime":"2025-11-27T06:53:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.309220 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.309257 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.309272 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.309288 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.309298 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:15Z","lastTransitionTime":"2025-11-27T06:53:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.411658 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.411695 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.411704 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.411717 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.411726 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:15Z","lastTransitionTime":"2025-11-27T06:53:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.513953 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.514019 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.514041 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.514072 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.514093 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:15Z","lastTransitionTime":"2025-11-27T06:53:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.610404 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nxjns" event={"ID":"b22ca58c-5169-4ae7-ac50-a6cfb48fa334","Type":"ContainerStarted","Data":"237653db3d61407da4c98734dc9cc8e349e67fabab8ad7fde3ceda2cf15e2bd1"} Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.610458 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nxjns" event={"ID":"b22ca58c-5169-4ae7-ac50-a6cfb48fa334","Type":"ContainerStarted","Data":"c86c8dbb24c36b5efaa8127231a6dbc369307de51256416ea3ff656ece101420"} Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.613052 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-56nwb_47c0abbf-6e9f-4bca-b3ca-bd896be15f2c/ovnkube-controller/1.log" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.613591 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-56nwb_47c0abbf-6e9f-4bca-b3ca-bd896be15f2c/ovnkube-controller/0.log" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.615908 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.615936 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.615947 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.615964 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.615975 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:15Z","lastTransitionTime":"2025-11-27T06:53:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.617597 4971 generic.go:334] "Generic (PLEG): container finished" podID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerID="31806e765969bacb5e0230e2f2868d6bb37910390507c99725d28c6fad548919" exitCode=1 Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.617650 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" event={"ID":"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c","Type":"ContainerDied","Data":"31806e765969bacb5e0230e2f2868d6bb37910390507c99725d28c6fad548919"} Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.617958 4971 scope.go:117] "RemoveContainer" containerID="e13377d592c751ef11995711e957b8d3e5f03b2bbde6a491ec87a9304406518a" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.619082 4971 scope.go:117] "RemoveContainer" containerID="31806e765969bacb5e0230e2f2868d6bb37910390507c99725d28c6fad548919" Nov 27 06:53:15 crc kubenswrapper[4971]: E1127 06:53:15.619241 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-56nwb_openshift-ovn-kubernetes(47c0abbf-6e9f-4bca-b3ca-bd896be15f2c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.635260 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5be4571b65d35e7a2d2888f250e6cf6e0c105b598047e2236b3ec90bbdbee5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:15Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.654912 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31806e765969bacb5e0230e2f2868d6bb37910390507c99725d28c6fad548919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13377d592c751ef11995711e957b8d3e5f03b2bbde6a491ec87a9304406518a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-27T06:53:13Z\\\",\\\"message\\\":\\\":13.718468 6253 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1127 06:53:13.718723 6253 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1127 06:53:13.718836 6253 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1127 06:53:13.719039 6253 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1127 06:53:13.719204 6253 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1127 06:53:13.719410 6253 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1127 06:53:13.719729 6253 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1127 06:53:13.720117 6253 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://31806e765969bacb5e0230e2f2868d6bb37910390507c99725d28c6fad548919\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-27T06:53:15Z\\\",\\\"message\\\":\\\"r because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:15Z is after 2025-08-24T17:21:41Z]\\\\nI1127 06:53:15.358596 6403 obj_retry.go:365] Adding new object: *v1.Pod openshift-etcd/etcd-crc\\\\nI1127 06:53:15.358594 6403 services_controller.go:356] Processing sync for service openshift-kube-scheduler/scheduler for network=default\\\\nI1127 06:53:15.358593 6403 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1127 06:53:15.358604 6403 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nI1127 06:53:15.358605 6403 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1127 06:53:15.358611 6403 obj_retry.go:386] Retry successful for *v1.Pod openshift-etcd/etcd-crc after 0 failed attempt(s)\\\\nI1127 06:53:15.358612 6403 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1127 06:53:15.358615 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-56nwb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:15Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.669895 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nxjns" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22ca58c-5169-4ae7-ac50-a6cfb48fa334\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nxjns\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:15Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.685797 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"ed a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\"\\\\nI1127 06:53:01.687702 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1127 06:53:02.478794 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1127 06:53:02.478820 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1127 06:53:02.478850 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1127 06:53:02.478856 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1127 06:53:02.511358 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1127 06:53:02.511475 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511499 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511521 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1127 06:53:02.511559 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1127 06:53:02.511581 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1127 06:53:02.511603 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1127 06:53:02.511767 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1127 06:53:02.517623 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764226365\\\\\\\\\\\\\\\" (2025-11-27 06:52:45 +0000 UTC to 2025-12-27 06:52:46 +0000 UTC (now=2025-11-27 06:53:02.517594908 +0000 UTC))\\\\\\\"\\\\nF1127 06:53:02.517671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:15Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.703550 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:15Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.718471 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.718526 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.718573 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.718598 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.718628 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:15Z","lastTransitionTime":"2025-11-27T06:53:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.721136 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c02f3806e673886030fe506e4903bfa7f383bdc7130e4f3d251b5851cb99fb52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d1fb1c5f4949857e54218b78eb7d2a695aa1baf7fe0d4e26888cad2e2cdb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:15Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.737106 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:15Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.753246 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://673830b15fa2749a267b5b4e1116f8d7c2cbe419b963a87da48dfa8fe9eaf8c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdz6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:15Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.769489 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:15Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.782164 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db8ead1e3b35d86c894bb1067d64e31f7ac2ec518180db02176574eb245fafb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:15Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.803193 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://239d7a81aa3003faaae51b9e8f67d4ba1495328340befcc2b962804e376c9c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w2fnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:15Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.821431 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.821507 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.821520 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.821563 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.821579 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:15Z","lastTransitionTime":"2025-11-27T06:53:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.824074 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lr9p6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5609ab7b327e7ec603c40b9b27f4ee52b4b49e647d0c6e9db5d360bf686c493c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5pmn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lr9p6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:15Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.843202 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckzrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc0de2fe198ee71bf786a69fa44fa2bd1c0d29e16391fdafd96bb6b062f6f6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xx7nr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:05Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckzrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:15Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.873845 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f110061b-13b1-4eae-b031-1f7899d7728d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6e3b39e688e045045a2a5b02dd0c3c7e7567657e0a9dff4d16f4f818e9e984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0568171255f8d7e993c025aece8b10eb0303efde25bc85b46f3b4b2c014ec66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ffb860fe1ce8cfa20be499da1932b4c5286cfdcc70418e7cdf5785cf746254c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dda9554712ec04cb4c9616ee62cb0e5e2cf106ba9c63ab411fb07fb892a1897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c09241254c68f6a129398092fe87bda15b581be840a5cf4034551b5f881042b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:15Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.895101 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:15Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.908256 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0609bb483c2769bba8b86b1692fd22e002dc73a3655a931c4bc4f7f378b9649f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:15Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.924353 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.924394 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.924405 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.924424 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:15 crc kubenswrapper[4971]: I1127 06:53:15.924438 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:15Z","lastTransitionTime":"2025-11-27T06:53:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.027277 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.027513 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.027524 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.027561 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.027574 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:16Z","lastTransitionTime":"2025-11-27T06:53:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.130196 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.130238 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.130252 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.130270 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.130283 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:16Z","lastTransitionTime":"2025-11-27T06:53:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.232635 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.232690 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.232701 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.232721 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.232733 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:16Z","lastTransitionTime":"2025-11-27T06:53:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.335499 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.335556 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.335572 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.335600 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.335615 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:16Z","lastTransitionTime":"2025-11-27T06:53:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.437899 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.437939 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.437952 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.437968 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.437978 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:16Z","lastTransitionTime":"2025-11-27T06:53:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.540126 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.540175 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.540189 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.540208 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.540228 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:16Z","lastTransitionTime":"2025-11-27T06:53:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.549797 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.549839 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.549860 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:16 crc kubenswrapper[4971]: E1127 06:53:16.549939 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:53:16 crc kubenswrapper[4971]: E1127 06:53:16.550039 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:53:16 crc kubenswrapper[4971]: E1127 06:53:16.550119 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.622818 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-56nwb_47c0abbf-6e9f-4bca-b3ca-bd896be15f2c/ovnkube-controller/1.log" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.626793 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.627557 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nxjns" event={"ID":"b22ca58c-5169-4ae7-ac50-a6cfb48fa334","Type":"ContainerStarted","Data":"cf345d1f97fcf7d514b9bb82e0cf811761950ad9bfe627b738b5271261ee0aa2"} Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.627581 4971 scope.go:117] "RemoveContainer" containerID="31806e765969bacb5e0230e2f2868d6bb37910390507c99725d28c6fad548919" Nov 27 06:53:16 crc kubenswrapper[4971]: E1127 06:53:16.627868 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-56nwb_openshift-ovn-kubernetes(47c0abbf-6e9f-4bca-b3ca-bd896be15f2c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.643387 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.643442 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.643455 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.643475 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.643487 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:16Z","lastTransitionTime":"2025-11-27T06:53:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.645336 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c02f3806e673886030fe506e4903bfa7f383bdc7130e4f3d251b5851cb99fb52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d1fb1c5f4949857e54218b78eb7d2a695aa1baf7fe0d4e26888cad2e2cdb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:16Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.660874 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:16Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.676601 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://673830b15fa2749a267b5b4e1116f8d7c2cbe419b963a87da48dfa8fe9eaf8c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdz6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:16Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.695018 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"ed a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\"\\\\nI1127 06:53:01.687702 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1127 06:53:02.478794 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1127 06:53:02.478820 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1127 06:53:02.478850 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1127 06:53:02.478856 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1127 06:53:02.511358 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1127 06:53:02.511475 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511499 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511521 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1127 06:53:02.511559 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1127 06:53:02.511581 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1127 06:53:02.511603 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1127 06:53:02.511767 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1127 06:53:02.517623 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764226365\\\\\\\\\\\\\\\" (2025-11-27 06:52:45 +0000 UTC to 2025-12-27 06:52:46 +0000 UTC (now=2025-11-27 06:53:02.517594908 +0000 UTC))\\\\\\\"\\\\nF1127 06:53:02.517671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:16Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.708989 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:16Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.726843 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://239d7a81aa3003faaae51b9e8f67d4ba1495328340befcc2b962804e376c9c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w2fnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:16Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.740431 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lr9p6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5609ab7b327e7ec603c40b9b27f4ee52b4b49e647d0c6e9db5d360bf686c493c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5pmn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lr9p6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:16Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.745639 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.745690 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.745709 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.745734 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.745753 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:16Z","lastTransitionTime":"2025-11-27T06:53:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.750796 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckzrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc0de2fe198ee71bf786a69fa44fa2bd1c0d29e16391fdafd96bb6b062f6f6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xx7nr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:05Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckzrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:16Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.762254 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:16Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.772078 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db8ead1e3b35d86c894bb1067d64e31f7ac2ec518180db02176574eb245fafb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:16Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.783828 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0609bb483c2769bba8b86b1692fd22e002dc73a3655a931c4bc4f7f378b9649f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:16Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.802343 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f110061b-13b1-4eae-b031-1f7899d7728d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6e3b39e688e045045a2a5b02dd0c3c7e7567657e0a9dff4d16f4f818e9e984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0568171255f8d7e993c025aece8b10eb0303efde25bc85b46f3b4b2c014ec66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ffb860fe1ce8cfa20be499da1932b4c5286cfdcc70418e7cdf5785cf746254c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dda9554712ec04cb4c9616ee62cb0e5e2cf106ba9c63ab411fb07fb892a1897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c09241254c68f6a129398092fe87bda15b581be840a5cf4034551b5f881042b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:16Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.821341 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:16Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.835408 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5be4571b65d35e7a2d2888f250e6cf6e0c105b598047e2236b3ec90bbdbee5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:16Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.849260 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.849317 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.849327 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.849345 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.849357 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:16Z","lastTransitionTime":"2025-11-27T06:53:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.853962 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31806e765969bacb5e0230e2f2868d6bb37910390507c99725d28c6fad548919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://31806e765969bacb5e0230e2f2868d6bb37910390507c99725d28c6fad548919\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-27T06:53:15Z\\\",\\\"message\\\":\\\"r because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:15Z is after 2025-08-24T17:21:41Z]\\\\nI1127 06:53:15.358596 6403 obj_retry.go:365] Adding new object: *v1.Pod openshift-etcd/etcd-crc\\\\nI1127 06:53:15.358594 6403 services_controller.go:356] Processing sync for service openshift-kube-scheduler/scheduler for network=default\\\\nI1127 06:53:15.358593 6403 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1127 06:53:15.358604 6403 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nI1127 06:53:15.358605 6403 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1127 06:53:15.358611 6403 obj_retry.go:386] Retry successful for *v1.Pod openshift-etcd/etcd-crc after 0 failed attempt(s)\\\\nI1127 06:53:15.358612 6403 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1127 06:53:15.358615 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:14Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-56nwb_openshift-ovn-kubernetes(47c0abbf-6e9f-4bca-b3ca-bd896be15f2c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-56nwb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:16Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.867561 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nxjns" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22ca58c-5169-4ae7-ac50-a6cfb48fa334\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nxjns\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:16Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.888932 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f110061b-13b1-4eae-b031-1f7899d7728d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6e3b39e688e045045a2a5b02dd0c3c7e7567657e0a9dff4d16f4f818e9e984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0568171255f8d7e993c025aece8b10eb0303efde25bc85b46f3b4b2c014ec66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ffb860fe1ce8cfa20be499da1932b4c5286cfdcc70418e7cdf5785cf746254c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dda9554712ec04cb4c9616ee62cb0e5e2cf106ba9c63ab411fb07fb892a1897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c09241254c68f6a129398092fe87bda15b581be840a5cf4034551b5f881042b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:16Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.901117 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:16Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.906224 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-pdn5j"] Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.906711 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:53:16 crc kubenswrapper[4971]: E1127 06:53:16.906771 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.915908 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0609bb483c2769bba8b86b1692fd22e002dc73a3655a931c4bc4f7f378b9649f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:16Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.930930 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5be4571b65d35e7a2d2888f250e6cf6e0c105b598047e2236b3ec90bbdbee5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:16Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.933379 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rj8cl\" (UniqueName: \"kubernetes.io/projected/e710c835-f600-448f-a110-4ff4cef9d5f6-kube-api-access-rj8cl\") pod \"network-metrics-daemon-pdn5j\" (UID: \"e710c835-f600-448f-a110-4ff4cef9d5f6\") " pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.933452 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e710c835-f600-448f-a110-4ff4cef9d5f6-metrics-certs\") pod \"network-metrics-daemon-pdn5j\" (UID: \"e710c835-f600-448f-a110-4ff4cef9d5f6\") " pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.952488 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.952553 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.952569 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.952588 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.952603 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:16Z","lastTransitionTime":"2025-11-27T06:53:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.954288 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31806e765969bacb5e0230e2f2868d6bb37910390507c99725d28c6fad548919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://31806e765969bacb5e0230e2f2868d6bb37910390507c99725d28c6fad548919\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-27T06:53:15Z\\\",\\\"message\\\":\\\"r because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:15Z is after 2025-08-24T17:21:41Z]\\\\nI1127 06:53:15.358596 6403 obj_retry.go:365] Adding new object: *v1.Pod openshift-etcd/etcd-crc\\\\nI1127 06:53:15.358594 6403 services_controller.go:356] Processing sync for service openshift-kube-scheduler/scheduler for network=default\\\\nI1127 06:53:15.358593 6403 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1127 06:53:15.358604 6403 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nI1127 06:53:15.358605 6403 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1127 06:53:15.358611 6403 obj_retry.go:386] Retry successful for *v1.Pod openshift-etcd/etcd-crc after 0 failed attempt(s)\\\\nI1127 06:53:15.358612 6403 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1127 06:53:15.358615 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:14Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-56nwb_openshift-ovn-kubernetes(47c0abbf-6e9f-4bca-b3ca-bd896be15f2c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-56nwb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:16Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.968016 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nxjns" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22ca58c-5169-4ae7-ac50-a6cfb48fa334\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237653db3d61407da4c98734dc9cc8e349e67fabab8ad7fde3ceda2cf15e2bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf345d1f97fcf7d514b9bb82e0cf811761950ad9bfe627b738b5271261ee0aa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nxjns\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:16Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:16 crc kubenswrapper[4971]: I1127 06:53:16.984615 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"ed a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\"\\\\nI1127 06:53:01.687702 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1127 06:53:02.478794 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1127 06:53:02.478820 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1127 06:53:02.478850 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1127 06:53:02.478856 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1127 06:53:02.511358 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1127 06:53:02.511475 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511499 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511521 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1127 06:53:02.511559 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1127 06:53:02.511581 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1127 06:53:02.511603 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1127 06:53:02.511767 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1127 06:53:02.517623 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764226365\\\\\\\\\\\\\\\" (2025-11-27 06:52:45 +0000 UTC to 2025-12-27 06:52:46 +0000 UTC (now=2025-11-27 06:53:02.517594908 +0000 UTC))\\\\\\\"\\\\nF1127 06:53:02.517671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:16Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.001210 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:16Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.016069 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c02f3806e673886030fe506e4903bfa7f383bdc7130e4f3d251b5851cb99fb52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d1fb1c5f4949857e54218b78eb7d2a695aa1baf7fe0d4e26888cad2e2cdb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:17Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.031594 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:17Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.034006 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e710c835-f600-448f-a110-4ff4cef9d5f6-metrics-certs\") pod \"network-metrics-daemon-pdn5j\" (UID: \"e710c835-f600-448f-a110-4ff4cef9d5f6\") " pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.034122 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rj8cl\" (UniqueName: \"kubernetes.io/projected/e710c835-f600-448f-a110-4ff4cef9d5f6-kube-api-access-rj8cl\") pod \"network-metrics-daemon-pdn5j\" (UID: \"e710c835-f600-448f-a110-4ff4cef9d5f6\") " pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:53:17 crc kubenswrapper[4971]: E1127 06:53:17.034135 4971 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 27 06:53:17 crc kubenswrapper[4971]: E1127 06:53:17.034214 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e710c835-f600-448f-a110-4ff4cef9d5f6-metrics-certs podName:e710c835-f600-448f-a110-4ff4cef9d5f6 nodeName:}" failed. No retries permitted until 2025-11-27 06:53:17.534188622 +0000 UTC m=+35.726232540 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e710c835-f600-448f-a110-4ff4cef9d5f6-metrics-certs") pod "network-metrics-daemon-pdn5j" (UID: "e710c835-f600-448f-a110-4ff4cef9d5f6") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.043459 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://673830b15fa2749a267b5b4e1116f8d7c2cbe419b963a87da48dfa8fe9eaf8c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdz6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:17Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.058181 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:17Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.058306 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rj8cl\" (UniqueName: \"kubernetes.io/projected/e710c835-f600-448f-a110-4ff4cef9d5f6-kube-api-access-rj8cl\") pod \"network-metrics-daemon-pdn5j\" (UID: \"e710c835-f600-448f-a110-4ff4cef9d5f6\") " pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.059501 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.059605 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.059616 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.059635 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.059665 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:17Z","lastTransitionTime":"2025-11-27T06:53:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.071269 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db8ead1e3b35d86c894bb1067d64e31f7ac2ec518180db02176574eb245fafb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:17Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.088140 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://239d7a81aa3003faaae51b9e8f67d4ba1495328340befcc2b962804e376c9c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w2fnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:17Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.103794 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lr9p6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5609ab7b327e7ec603c40b9b27f4ee52b4b49e647d0c6e9db5d360bf686c493c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5pmn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lr9p6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:17Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.116634 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckzrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc0de2fe198ee71bf786a69fa44fa2bd1c0d29e16391fdafd96bb6b062f6f6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xx7nr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:05Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckzrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:17Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.131574 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5be4571b65d35e7a2d2888f250e6cf6e0c105b598047e2236b3ec90bbdbee5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:17Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.149364 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31806e765969bacb5e0230e2f2868d6bb37910390507c99725d28c6fad548919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://31806e765969bacb5e0230e2f2868d6bb37910390507c99725d28c6fad548919\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-27T06:53:15Z\\\",\\\"message\\\":\\\"r because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:15Z is after 2025-08-24T17:21:41Z]\\\\nI1127 06:53:15.358596 6403 obj_retry.go:365] Adding new object: *v1.Pod openshift-etcd/etcd-crc\\\\nI1127 06:53:15.358594 6403 services_controller.go:356] Processing sync for service openshift-kube-scheduler/scheduler for network=default\\\\nI1127 06:53:15.358593 6403 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1127 06:53:15.358604 6403 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nI1127 06:53:15.358605 6403 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1127 06:53:15.358611 6403 obj_retry.go:386] Retry successful for *v1.Pod openshift-etcd/etcd-crc after 0 failed attempt(s)\\\\nI1127 06:53:15.358612 6403 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1127 06:53:15.358615 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:14Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-56nwb_openshift-ovn-kubernetes(47c0abbf-6e9f-4bca-b3ca-bd896be15f2c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-56nwb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:17Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.160194 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nxjns" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22ca58c-5169-4ae7-ac50-a6cfb48fa334\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237653db3d61407da4c98734dc9cc8e349e67fabab8ad7fde3ceda2cf15e2bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf345d1f97fcf7d514b9bb82e0cf811761950ad9bfe627b738b5271261ee0aa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nxjns\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:17Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.161746 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.161791 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.161804 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.161826 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.161837 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:17Z","lastTransitionTime":"2025-11-27T06:53:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.174014 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"ed a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\"\\\\nI1127 06:53:01.687702 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1127 06:53:02.478794 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1127 06:53:02.478820 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1127 06:53:02.478850 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1127 06:53:02.478856 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1127 06:53:02.511358 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1127 06:53:02.511475 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511499 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511521 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1127 06:53:02.511559 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1127 06:53:02.511581 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1127 06:53:02.511603 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1127 06:53:02.511767 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1127 06:53:02.517623 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764226365\\\\\\\\\\\\\\\" (2025-11-27 06:52:45 +0000 UTC to 2025-12-27 06:52:46 +0000 UTC (now=2025-11-27 06:53:02.517594908 +0000 UTC))\\\\\\\"\\\\nF1127 06:53:02.517671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:17Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.185321 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:17Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.198327 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c02f3806e673886030fe506e4903bfa7f383bdc7130e4f3d251b5851cb99fb52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d1fb1c5f4949857e54218b78eb7d2a695aa1baf7fe0d4e26888cad2e2cdb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:17Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.211607 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:17Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.223655 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://673830b15fa2749a267b5b4e1116f8d7c2cbe419b963a87da48dfa8fe9eaf8c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdz6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:17Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.237353 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pdn5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e710c835-f600-448f-a110-4ff4cef9d5f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rj8cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rj8cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:16Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pdn5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:17Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.252041 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:17Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.264047 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.264082 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.264091 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.264104 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.264114 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:17Z","lastTransitionTime":"2025-11-27T06:53:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.264950 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db8ead1e3b35d86c894bb1067d64e31f7ac2ec518180db02176574eb245fafb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:17Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.282054 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://239d7a81aa3003faaae51b9e8f67d4ba1495328340befcc2b962804e376c9c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w2fnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:17Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.293890 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lr9p6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5609ab7b327e7ec603c40b9b27f4ee52b4b49e647d0c6e9db5d360bf686c493c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5pmn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lr9p6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:17Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.303604 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckzrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc0de2fe198ee71bf786a69fa44fa2bd1c0d29e16391fdafd96bb6b062f6f6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xx7nr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:05Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckzrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:17Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.327678 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f110061b-13b1-4eae-b031-1f7899d7728d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6e3b39e688e045045a2a5b02dd0c3c7e7567657e0a9dff4d16f4f818e9e984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0568171255f8d7e993c025aece8b10eb0303efde25bc85b46f3b4b2c014ec66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ffb860fe1ce8cfa20be499da1932b4c5286cfdcc70418e7cdf5785cf746254c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dda9554712ec04cb4c9616ee62cb0e5e2cf106ba9c63ab411fb07fb892a1897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c09241254c68f6a129398092fe87bda15b581be840a5cf4034551b5f881042b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:17Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.338698 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:17Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.349275 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0609bb483c2769bba8b86b1692fd22e002dc73a3655a931c4bc4f7f378b9649f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:17Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.366768 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.366803 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.366813 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.366827 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.366837 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:17Z","lastTransitionTime":"2025-11-27T06:53:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.468996 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.469039 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.469048 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.469060 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.469070 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:17Z","lastTransitionTime":"2025-11-27T06:53:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.539267 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e710c835-f600-448f-a110-4ff4cef9d5f6-metrics-certs\") pod \"network-metrics-daemon-pdn5j\" (UID: \"e710c835-f600-448f-a110-4ff4cef9d5f6\") " pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:53:17 crc kubenswrapper[4971]: E1127 06:53:17.539482 4971 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 27 06:53:17 crc kubenswrapper[4971]: E1127 06:53:17.539583 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e710c835-f600-448f-a110-4ff4cef9d5f6-metrics-certs podName:e710c835-f600-448f-a110-4ff4cef9d5f6 nodeName:}" failed. No retries permitted until 2025-11-27 06:53:18.53956038 +0000 UTC m=+36.731604298 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e710c835-f600-448f-a110-4ff4cef9d5f6-metrics-certs") pod "network-metrics-daemon-pdn5j" (UID: "e710c835-f600-448f-a110-4ff4cef9d5f6") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.571766 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.571806 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.571814 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.571831 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.571844 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:17Z","lastTransitionTime":"2025-11-27T06:53:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.673874 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.673913 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.673924 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.673939 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.673949 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:17Z","lastTransitionTime":"2025-11-27T06:53:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.775771 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.775799 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.775807 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.775821 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.775830 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:17Z","lastTransitionTime":"2025-11-27T06:53:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.877894 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.877938 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.877949 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.877966 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.877980 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:17Z","lastTransitionTime":"2025-11-27T06:53:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.980600 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.980665 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.980682 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.980699 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:17 crc kubenswrapper[4971]: I1127 06:53:17.980711 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:17Z","lastTransitionTime":"2025-11-27T06:53:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.057645 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.057681 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.057690 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.057704 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.057713 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:18Z","lastTransitionTime":"2025-11-27T06:53:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:18 crc kubenswrapper[4971]: E1127 06:53:18.071843 4971 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4602e4e9-64d4-4227-8212-1a84a264f109\\\",\\\"systemUUID\\\":\\\"d12f7ae2-c7c1-475e-a2cb-1f1e626e5071\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:18Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.074914 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.074945 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.074954 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.074967 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.074977 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:18Z","lastTransitionTime":"2025-11-27T06:53:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:18 crc kubenswrapper[4971]: E1127 06:53:18.086325 4971 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4602e4e9-64d4-4227-8212-1a84a264f109\\\",\\\"systemUUID\\\":\\\"d12f7ae2-c7c1-475e-a2cb-1f1e626e5071\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:18Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.090717 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.090760 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.090772 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.090789 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.090799 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:18Z","lastTransitionTime":"2025-11-27T06:53:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:18 crc kubenswrapper[4971]: E1127 06:53:18.101552 4971 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4602e4e9-64d4-4227-8212-1a84a264f109\\\",\\\"systemUUID\\\":\\\"d12f7ae2-c7c1-475e-a2cb-1f1e626e5071\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:18Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.104234 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.104260 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.104272 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.104291 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.104303 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:18Z","lastTransitionTime":"2025-11-27T06:53:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:18 crc kubenswrapper[4971]: E1127 06:53:18.114612 4971 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4602e4e9-64d4-4227-8212-1a84a264f109\\\",\\\"systemUUID\\\":\\\"d12f7ae2-c7c1-475e-a2cb-1f1e626e5071\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:18Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.117682 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.117731 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.117742 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.117759 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.117771 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:18Z","lastTransitionTime":"2025-11-27T06:53:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:18 crc kubenswrapper[4971]: E1127 06:53:18.128959 4971 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4602e4e9-64d4-4227-8212-1a84a264f109\\\",\\\"systemUUID\\\":\\\"d12f7ae2-c7c1-475e-a2cb-1f1e626e5071\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:18Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:18 crc kubenswrapper[4971]: E1127 06:53:18.129103 4971 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.130623 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.130660 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.130669 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.130686 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.130697 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:18Z","lastTransitionTime":"2025-11-27T06:53:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.233889 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.233927 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.233935 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.233950 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.233959 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:18Z","lastTransitionTime":"2025-11-27T06:53:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.245512 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:53:18 crc kubenswrapper[4971]: E1127 06:53:18.245644 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:53:34.245622321 +0000 UTC m=+52.437666239 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.335758 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.335794 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.335804 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.335817 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.335826 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:18Z","lastTransitionTime":"2025-11-27T06:53:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.347219 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.347286 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.347342 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.347382 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:53:18 crc kubenswrapper[4971]: E1127 06:53:18.347346 4971 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 27 06:53:18 crc kubenswrapper[4971]: E1127 06:53:18.347412 4971 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 27 06:53:18 crc kubenswrapper[4971]: E1127 06:53:18.347754 4971 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 27 06:53:18 crc kubenswrapper[4971]: E1127 06:53:18.347805 4971 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 27 06:53:18 crc kubenswrapper[4971]: E1127 06:53:18.347825 4971 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 27 06:53:18 crc kubenswrapper[4971]: E1127 06:53:18.347725 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-27 06:53:34.347701457 +0000 UTC m=+52.539745395 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 27 06:53:18 crc kubenswrapper[4971]: E1127 06:53:18.347909 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-27 06:53:34.347894572 +0000 UTC m=+52.539938490 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 27 06:53:18 crc kubenswrapper[4971]: E1127 06:53:18.347926 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-27 06:53:34.347918103 +0000 UTC m=+52.539962021 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 27 06:53:18 crc kubenswrapper[4971]: E1127 06:53:18.347754 4971 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 27 06:53:18 crc kubenswrapper[4971]: E1127 06:53:18.347948 4971 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 27 06:53:18 crc kubenswrapper[4971]: E1127 06:53:18.347960 4971 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 27 06:53:18 crc kubenswrapper[4971]: E1127 06:53:18.347988 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-27 06:53:34.347981645 +0000 UTC m=+52.540025553 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.443207 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.443276 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.443295 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.443324 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.443342 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:18Z","lastTransitionTime":"2025-11-27T06:53:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.546175 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.546207 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.546217 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.546230 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.546241 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:18Z","lastTransitionTime":"2025-11-27T06:53:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.548825 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e710c835-f600-448f-a110-4ff4cef9d5f6-metrics-certs\") pod \"network-metrics-daemon-pdn5j\" (UID: \"e710c835-f600-448f-a110-4ff4cef9d5f6\") " pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:53:18 crc kubenswrapper[4971]: E1127 06:53:18.548924 4971 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 27 06:53:18 crc kubenswrapper[4971]: E1127 06:53:18.548996 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e710c835-f600-448f-a110-4ff4cef9d5f6-metrics-certs podName:e710c835-f600-448f-a110-4ff4cef9d5f6 nodeName:}" failed. No retries permitted until 2025-11-27 06:53:20.548980137 +0000 UTC m=+38.741024055 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e710c835-f600-448f-a110-4ff4cef9d5f6-metrics-certs") pod "network-metrics-daemon-pdn5j" (UID: "e710c835-f600-448f-a110-4ff4cef9d5f6") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.549233 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.549283 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.549390 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:18 crc kubenswrapper[4971]: E1127 06:53:18.549390 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.549468 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:53:18 crc kubenswrapper[4971]: E1127 06:53:18.549579 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:53:18 crc kubenswrapper[4971]: E1127 06:53:18.549652 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:53:18 crc kubenswrapper[4971]: E1127 06:53:18.549768 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.648370 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.648415 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.648432 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.648454 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.648471 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:18Z","lastTransitionTime":"2025-11-27T06:53:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.750637 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.750676 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.750685 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.750697 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.750706 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:18Z","lastTransitionTime":"2025-11-27T06:53:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.853339 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.853612 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.853622 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.853635 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.853646 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:18Z","lastTransitionTime":"2025-11-27T06:53:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.957117 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.957163 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.957174 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.957190 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:18 crc kubenswrapper[4971]: I1127 06:53:18.957200 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:18Z","lastTransitionTime":"2025-11-27T06:53:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.059895 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.059930 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.059940 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.059953 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.059965 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:19Z","lastTransitionTime":"2025-11-27T06:53:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.162567 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.162645 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.162656 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.162671 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.162696 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:19Z","lastTransitionTime":"2025-11-27T06:53:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.265172 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.265203 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.265211 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.265224 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.265233 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:19Z","lastTransitionTime":"2025-11-27T06:53:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.367334 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.367398 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.367415 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.367438 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.367456 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:19Z","lastTransitionTime":"2025-11-27T06:53:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.469521 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.469585 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.469599 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.469623 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.469671 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:19Z","lastTransitionTime":"2025-11-27T06:53:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.572698 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.572750 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.572766 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.572790 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.572807 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:19Z","lastTransitionTime":"2025-11-27T06:53:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.675125 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.675244 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.675263 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.675289 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.675306 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:19Z","lastTransitionTime":"2025-11-27T06:53:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.777669 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.777725 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.777741 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.777760 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.777772 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:19Z","lastTransitionTime":"2025-11-27T06:53:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.880309 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.880341 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.880352 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.880366 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.880374 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:19Z","lastTransitionTime":"2025-11-27T06:53:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.982972 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.983002 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.983010 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.983023 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:19 crc kubenswrapper[4971]: I1127 06:53:19.983034 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:19Z","lastTransitionTime":"2025-11-27T06:53:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.086108 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.086171 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.086189 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.086212 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.086230 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:20Z","lastTransitionTime":"2025-11-27T06:53:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.189282 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.189346 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.189363 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.189392 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.189411 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:20Z","lastTransitionTime":"2025-11-27T06:53:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.263055 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.288373 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"ed a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\"\\\\nI1127 06:53:01.687702 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1127 06:53:02.478794 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1127 06:53:02.478820 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1127 06:53:02.478850 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1127 06:53:02.478856 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1127 06:53:02.511358 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1127 06:53:02.511475 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511499 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511521 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1127 06:53:02.511559 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1127 06:53:02.511581 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1127 06:53:02.511603 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1127 06:53:02.511767 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1127 06:53:02.517623 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764226365\\\\\\\\\\\\\\\" (2025-11-27 06:52:45 +0000 UTC to 2025-12-27 06:52:46 +0000 UTC (now=2025-11-27 06:53:02.517594908 +0000 UTC))\\\\\\\"\\\\nF1127 06:53:02.517671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:20Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.292654 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.292695 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.292706 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.292724 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.292736 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:20Z","lastTransitionTime":"2025-11-27T06:53:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.310141 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:20Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.328576 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c02f3806e673886030fe506e4903bfa7f383bdc7130e4f3d251b5851cb99fb52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d1fb1c5f4949857e54218b78eb7d2a695aa1baf7fe0d4e26888cad2e2cdb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:20Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.350861 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:20Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.371249 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://673830b15fa2749a267b5b4e1116f8d7c2cbe419b963a87da48dfa8fe9eaf8c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdz6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:20Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.392706 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pdn5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e710c835-f600-448f-a110-4ff4cef9d5f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rj8cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rj8cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:16Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pdn5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:20Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.396977 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.397042 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.397063 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.397092 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.397113 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:20Z","lastTransitionTime":"2025-11-27T06:53:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.415895 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:20Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.433077 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db8ead1e3b35d86c894bb1067d64e31f7ac2ec518180db02176574eb245fafb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:20Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.453576 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://239d7a81aa3003faaae51b9e8f67d4ba1495328340befcc2b962804e376c9c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w2fnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:20Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.473052 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lr9p6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5609ab7b327e7ec603c40b9b27f4ee52b4b49e647d0c6e9db5d360bf686c493c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5pmn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lr9p6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:20Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.489645 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckzrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc0de2fe198ee71bf786a69fa44fa2bd1c0d29e16391fdafd96bb6b062f6f6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xx7nr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:05Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckzrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:20Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.500409 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.500447 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.500457 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.500475 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.500488 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:20Z","lastTransitionTime":"2025-11-27T06:53:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.521026 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f110061b-13b1-4eae-b031-1f7899d7728d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6e3b39e688e045045a2a5b02dd0c3c7e7567657e0a9dff4d16f4f818e9e984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0568171255f8d7e993c025aece8b10eb0303efde25bc85b46f3b4b2c014ec66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ffb860fe1ce8cfa20be499da1932b4c5286cfdcc70418e7cdf5785cf746254c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dda9554712ec04cb4c9616ee62cb0e5e2cf106ba9c63ab411fb07fb892a1897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c09241254c68f6a129398092fe87bda15b581be840a5cf4034551b5f881042b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:20Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.537889 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:20Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.550369 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:20 crc kubenswrapper[4971]: E1127 06:53:20.550566 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.550762 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.550900 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:53:20 crc kubenswrapper[4971]: E1127 06:53:20.551027 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.551085 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:53:20 crc kubenswrapper[4971]: E1127 06:53:20.551189 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:53:20 crc kubenswrapper[4971]: E1127 06:53:20.551260 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.556051 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0609bb483c2769bba8b86b1692fd22e002dc73a3655a931c4bc4f7f378b9649f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:20Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.570731 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e710c835-f600-448f-a110-4ff4cef9d5f6-metrics-certs\") pod \"network-metrics-daemon-pdn5j\" (UID: \"e710c835-f600-448f-a110-4ff4cef9d5f6\") " pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:53:20 crc kubenswrapper[4971]: E1127 06:53:20.570968 4971 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 27 06:53:20 crc kubenswrapper[4971]: E1127 06:53:20.581650 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e710c835-f600-448f-a110-4ff4cef9d5f6-metrics-certs podName:e710c835-f600-448f-a110-4ff4cef9d5f6 nodeName:}" failed. No retries permitted until 2025-11-27 06:53:24.581614823 +0000 UTC m=+42.773658741 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e710c835-f600-448f-a110-4ff4cef9d5f6-metrics-certs") pod "network-metrics-daemon-pdn5j" (UID: "e710c835-f600-448f-a110-4ff4cef9d5f6") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.582664 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5be4571b65d35e7a2d2888f250e6cf6e0c105b598047e2236b3ec90bbdbee5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:20Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.603083 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.603131 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.603142 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.603159 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.603170 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:20Z","lastTransitionTime":"2025-11-27T06:53:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.603927 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31806e765969bacb5e0230e2f2868d6bb37910390507c99725d28c6fad548919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://31806e765969bacb5e0230e2f2868d6bb37910390507c99725d28c6fad548919\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-27T06:53:15Z\\\",\\\"message\\\":\\\"r because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:15Z is after 2025-08-24T17:21:41Z]\\\\nI1127 06:53:15.358596 6403 obj_retry.go:365] Adding new object: *v1.Pod openshift-etcd/etcd-crc\\\\nI1127 06:53:15.358594 6403 services_controller.go:356] Processing sync for service openshift-kube-scheduler/scheduler for network=default\\\\nI1127 06:53:15.358593 6403 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1127 06:53:15.358604 6403 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nI1127 06:53:15.358605 6403 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1127 06:53:15.358611 6403 obj_retry.go:386] Retry successful for *v1.Pod openshift-etcd/etcd-crc after 0 failed attempt(s)\\\\nI1127 06:53:15.358612 6403 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1127 06:53:15.358615 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:14Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-56nwb_openshift-ovn-kubernetes(47c0abbf-6e9f-4bca-b3ca-bd896be15f2c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-56nwb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:20Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.619955 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nxjns" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22ca58c-5169-4ae7-ac50-a6cfb48fa334\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237653db3d61407da4c98734dc9cc8e349e67fabab8ad7fde3ceda2cf15e2bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf345d1f97fcf7d514b9bb82e0cf811761950ad9bfe627b738b5271261ee0aa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nxjns\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:20Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.705692 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.705736 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.705745 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.705763 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.705772 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:20Z","lastTransitionTime":"2025-11-27T06:53:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.808386 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.808429 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.808443 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.808462 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.808475 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:20Z","lastTransitionTime":"2025-11-27T06:53:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.911376 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.911434 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.911447 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.911468 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:20 crc kubenswrapper[4971]: I1127 06:53:20.911482 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:20Z","lastTransitionTime":"2025-11-27T06:53:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.014815 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.015056 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.015086 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.015117 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.015139 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:21Z","lastTransitionTime":"2025-11-27T06:53:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.117932 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.117977 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.117987 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.118001 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.118011 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:21Z","lastTransitionTime":"2025-11-27T06:53:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.220630 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.220688 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.220702 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.220721 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.220731 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:21Z","lastTransitionTime":"2025-11-27T06:53:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.323164 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.323201 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.323210 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.323222 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.323231 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:21Z","lastTransitionTime":"2025-11-27T06:53:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.426369 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.426426 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.426444 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.426464 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.426480 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:21Z","lastTransitionTime":"2025-11-27T06:53:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.529924 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.530004 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.530028 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.530057 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.530083 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:21Z","lastTransitionTime":"2025-11-27T06:53:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.633771 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.633840 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.633859 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.633887 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.633909 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:21Z","lastTransitionTime":"2025-11-27T06:53:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.737046 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.737094 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.737108 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.737125 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.737138 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:21Z","lastTransitionTime":"2025-11-27T06:53:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.840135 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.840214 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.840239 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.840271 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.840295 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:21Z","lastTransitionTime":"2025-11-27T06:53:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.943389 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.943436 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.943451 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.943470 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:21 crc kubenswrapper[4971]: I1127 06:53:21.943484 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:21Z","lastTransitionTime":"2025-11-27T06:53:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.045990 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.046035 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.046047 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.046064 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.046076 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:22Z","lastTransitionTime":"2025-11-27T06:53:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.148785 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.148862 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.148887 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.148918 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.148940 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:22Z","lastTransitionTime":"2025-11-27T06:53:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.251785 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.251823 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.251835 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.251852 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.251862 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:22Z","lastTransitionTime":"2025-11-27T06:53:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.358974 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.359044 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.359061 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.359098 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.359137 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:22Z","lastTransitionTime":"2025-11-27T06:53:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.461660 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.461741 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.461771 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.461797 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.461815 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:22Z","lastTransitionTime":"2025-11-27T06:53:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.550351 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:53:22 crc kubenswrapper[4971]: E1127 06:53:22.550518 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.551410 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:53:22 crc kubenswrapper[4971]: E1127 06:53:22.551611 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.551707 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:53:22 crc kubenswrapper[4971]: E1127 06:53:22.551830 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.551928 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:22 crc kubenswrapper[4971]: E1127 06:53:22.552016 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.563625 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.563665 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.563686 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.563708 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.563719 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:22Z","lastTransitionTime":"2025-11-27T06:53:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.573669 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lr9p6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5609ab7b327e7ec603c40b9b27f4ee52b4b49e647d0c6e9db5d360bf686c493c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5pmn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lr9p6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:22Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.590083 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckzrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc0de2fe198ee71bf786a69fa44fa2bd1c0d29e16391fdafd96bb6b062f6f6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xx7nr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:05Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckzrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:22Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.606306 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pdn5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e710c835-f600-448f-a110-4ff4cef9d5f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rj8cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rj8cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:16Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pdn5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:22Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.624919 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:22Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.638294 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db8ead1e3b35d86c894bb1067d64e31f7ac2ec518180db02176574eb245fafb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:22Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.655116 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://239d7a81aa3003faaae51b9e8f67d4ba1495328340befcc2b962804e376c9c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w2fnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:22Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.665483 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.665591 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.665616 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.665647 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.665671 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:22Z","lastTransitionTime":"2025-11-27T06:53:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.678713 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f110061b-13b1-4eae-b031-1f7899d7728d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6e3b39e688e045045a2a5b02dd0c3c7e7567657e0a9dff4d16f4f818e9e984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0568171255f8d7e993c025aece8b10eb0303efde25bc85b46f3b4b2c014ec66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ffb860fe1ce8cfa20be499da1932b4c5286cfdcc70418e7cdf5785cf746254c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dda9554712ec04cb4c9616ee62cb0e5e2cf106ba9c63ab411fb07fb892a1897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c09241254c68f6a129398092fe87bda15b581be840a5cf4034551b5f881042b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:22Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.690811 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:22Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.701695 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0609bb483c2769bba8b86b1692fd22e002dc73a3655a931c4bc4f7f378b9649f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:22Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.723827 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31806e765969bacb5e0230e2f2868d6bb37910390507c99725d28c6fad548919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://31806e765969bacb5e0230e2f2868d6bb37910390507c99725d28c6fad548919\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-27T06:53:15Z\\\",\\\"message\\\":\\\"r because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:15Z is after 2025-08-24T17:21:41Z]\\\\nI1127 06:53:15.358596 6403 obj_retry.go:365] Adding new object: *v1.Pod openshift-etcd/etcd-crc\\\\nI1127 06:53:15.358594 6403 services_controller.go:356] Processing sync for service openshift-kube-scheduler/scheduler for network=default\\\\nI1127 06:53:15.358593 6403 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1127 06:53:15.358604 6403 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nI1127 06:53:15.358605 6403 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1127 06:53:15.358611 6403 obj_retry.go:386] Retry successful for *v1.Pod openshift-etcd/etcd-crc after 0 failed attempt(s)\\\\nI1127 06:53:15.358612 6403 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1127 06:53:15.358615 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:14Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-56nwb_openshift-ovn-kubernetes(47c0abbf-6e9f-4bca-b3ca-bd896be15f2c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-56nwb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:22Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.738557 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nxjns" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22ca58c-5169-4ae7-ac50-a6cfb48fa334\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237653db3d61407da4c98734dc9cc8e349e67fabab8ad7fde3ceda2cf15e2bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf345d1f97fcf7d514b9bb82e0cf811761950ad9bfe627b738b5271261ee0aa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nxjns\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:22Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.755040 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5be4571b65d35e7a2d2888f250e6cf6e0c105b598047e2236b3ec90bbdbee5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:22Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.768329 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:22Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.768991 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.769022 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.769031 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.769045 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.769055 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:22Z","lastTransitionTime":"2025-11-27T06:53:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.781148 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://673830b15fa2749a267b5b4e1116f8d7c2cbe419b963a87da48dfa8fe9eaf8c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdz6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:22Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.793344 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"ed a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\"\\\\nI1127 06:53:01.687702 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1127 06:53:02.478794 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1127 06:53:02.478820 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1127 06:53:02.478850 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1127 06:53:02.478856 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1127 06:53:02.511358 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1127 06:53:02.511475 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511499 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511521 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1127 06:53:02.511559 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1127 06:53:02.511581 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1127 06:53:02.511603 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1127 06:53:02.511767 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1127 06:53:02.517623 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764226365\\\\\\\\\\\\\\\" (2025-11-27 06:52:45 +0000 UTC to 2025-12-27 06:52:46 +0000 UTC (now=2025-11-27 06:53:02.517594908 +0000 UTC))\\\\\\\"\\\\nF1127 06:53:02.517671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:22Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.806595 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:22Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.819773 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c02f3806e673886030fe506e4903bfa7f383bdc7130e4f3d251b5851cb99fb52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d1fb1c5f4949857e54218b78eb7d2a695aa1baf7fe0d4e26888cad2e2cdb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:22Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.871322 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.871961 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.872087 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.872188 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.872272 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:22Z","lastTransitionTime":"2025-11-27T06:53:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.975406 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.975451 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.975464 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.975514 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:22 crc kubenswrapper[4971]: I1127 06:53:22.975527 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:22Z","lastTransitionTime":"2025-11-27T06:53:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.077938 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.078011 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.078033 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.078067 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.078094 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:23Z","lastTransitionTime":"2025-11-27T06:53:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.182000 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.182064 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.182078 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.182097 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.182112 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:23Z","lastTransitionTime":"2025-11-27T06:53:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.285317 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.285389 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.285413 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.285444 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.285465 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:23Z","lastTransitionTime":"2025-11-27T06:53:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.388722 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.388769 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.388782 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.388802 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.388814 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:23Z","lastTransitionTime":"2025-11-27T06:53:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.491819 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.491878 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.491894 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.491919 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.491951 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:23Z","lastTransitionTime":"2025-11-27T06:53:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.594292 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.594339 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.594355 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.594374 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.594388 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:23Z","lastTransitionTime":"2025-11-27T06:53:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.697065 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.697104 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.697117 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.697145 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.697171 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:23Z","lastTransitionTime":"2025-11-27T06:53:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.799345 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.799420 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.799442 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.799482 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.799506 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:23Z","lastTransitionTime":"2025-11-27T06:53:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.902107 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.902178 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.902200 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.902233 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:23 crc kubenswrapper[4971]: I1127 06:53:23.902254 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:23Z","lastTransitionTime":"2025-11-27T06:53:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.004096 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.004144 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.004157 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.004179 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.004192 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:24Z","lastTransitionTime":"2025-11-27T06:53:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.106643 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.106675 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.106683 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.106702 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.106710 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:24Z","lastTransitionTime":"2025-11-27T06:53:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.209154 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.209222 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.209239 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.209264 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.209281 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:24Z","lastTransitionTime":"2025-11-27T06:53:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.311333 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.311381 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.311397 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.311420 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.311436 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:24Z","lastTransitionTime":"2025-11-27T06:53:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.414299 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.414388 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.414420 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.414451 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.414475 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:24Z","lastTransitionTime":"2025-11-27T06:53:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.516919 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.516953 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.516967 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.516984 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.516993 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:24Z","lastTransitionTime":"2025-11-27T06:53:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.553841 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:53:24 crc kubenswrapper[4971]: E1127 06:53:24.553997 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.554471 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:24 crc kubenswrapper[4971]: E1127 06:53:24.554605 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.554858 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:53:24 crc kubenswrapper[4971]: E1127 06:53:24.554935 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.555000 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:53:24 crc kubenswrapper[4971]: E1127 06:53:24.555140 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.619657 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.619694 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.619703 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.619717 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.619726 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:24Z","lastTransitionTime":"2025-11-27T06:53:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.624314 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e710c835-f600-448f-a110-4ff4cef9d5f6-metrics-certs\") pod \"network-metrics-daemon-pdn5j\" (UID: \"e710c835-f600-448f-a110-4ff4cef9d5f6\") " pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:53:24 crc kubenswrapper[4971]: E1127 06:53:24.624592 4971 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 27 06:53:24 crc kubenswrapper[4971]: E1127 06:53:24.624737 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e710c835-f600-448f-a110-4ff4cef9d5f6-metrics-certs podName:e710c835-f600-448f-a110-4ff4cef9d5f6 nodeName:}" failed. No retries permitted until 2025-11-27 06:53:32.624711975 +0000 UTC m=+50.816755893 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e710c835-f600-448f-a110-4ff4cef9d5f6-metrics-certs") pod "network-metrics-daemon-pdn5j" (UID: "e710c835-f600-448f-a110-4ff4cef9d5f6") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.722260 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.722331 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.722348 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.722369 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.722384 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:24Z","lastTransitionTime":"2025-11-27T06:53:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.825275 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.825328 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.825338 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.825353 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.825362 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:24Z","lastTransitionTime":"2025-11-27T06:53:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.928102 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.928144 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.928153 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.928170 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:24 crc kubenswrapper[4971]: I1127 06:53:24.928180 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:24Z","lastTransitionTime":"2025-11-27T06:53:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.030493 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.030580 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.030600 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.030624 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.030643 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:25Z","lastTransitionTime":"2025-11-27T06:53:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.133688 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.133757 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.133775 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.133799 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.133816 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:25Z","lastTransitionTime":"2025-11-27T06:53:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.236955 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.237032 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.237055 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.237086 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.237110 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:25Z","lastTransitionTime":"2025-11-27T06:53:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.340465 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.340609 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.340629 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.340652 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.340669 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:25Z","lastTransitionTime":"2025-11-27T06:53:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.444132 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.444185 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.444202 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.444225 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.444242 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:25Z","lastTransitionTime":"2025-11-27T06:53:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.546970 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.547051 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.547064 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.547091 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.547107 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:25Z","lastTransitionTime":"2025-11-27T06:53:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.650336 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.650398 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.650415 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.650439 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.650457 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:25Z","lastTransitionTime":"2025-11-27T06:53:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.753850 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.753913 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.753931 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.753956 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.753974 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:25Z","lastTransitionTime":"2025-11-27T06:53:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.856559 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.856622 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.856639 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.856665 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.856684 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:25Z","lastTransitionTime":"2025-11-27T06:53:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.959231 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.959289 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.959306 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.959344 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:25 crc kubenswrapper[4971]: I1127 06:53:25.959381 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:25Z","lastTransitionTime":"2025-11-27T06:53:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.062431 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.062495 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.062513 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.062567 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.062586 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:26Z","lastTransitionTime":"2025-11-27T06:53:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.166109 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.166181 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.166205 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.166235 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.166257 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:26Z","lastTransitionTime":"2025-11-27T06:53:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.269169 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.269333 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.269354 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.269380 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.269398 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:26Z","lastTransitionTime":"2025-11-27T06:53:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.371842 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.371917 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.371940 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.371973 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.372001 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:26Z","lastTransitionTime":"2025-11-27T06:53:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.475490 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.475564 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.475583 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.475610 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.475627 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:26Z","lastTransitionTime":"2025-11-27T06:53:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.550108 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.550143 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.550337 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:53:26 crc kubenswrapper[4971]: E1127 06:53:26.550321 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.550413 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:53:26 crc kubenswrapper[4971]: E1127 06:53:26.550479 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:53:26 crc kubenswrapper[4971]: E1127 06:53:26.551083 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:53:26 crc kubenswrapper[4971]: E1127 06:53:26.551067 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.578959 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.579031 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.579054 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.579078 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.579095 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:26Z","lastTransitionTime":"2025-11-27T06:53:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.681782 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.681927 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.681998 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.682080 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.682111 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:26Z","lastTransitionTime":"2025-11-27T06:53:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.785272 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.785342 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.785360 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.785383 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.785402 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:26Z","lastTransitionTime":"2025-11-27T06:53:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.888149 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.888235 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.888258 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.888288 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.888311 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:26Z","lastTransitionTime":"2025-11-27T06:53:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.991774 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.991843 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.991926 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.991954 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:26 crc kubenswrapper[4971]: I1127 06:53:26.992027 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:26Z","lastTransitionTime":"2025-11-27T06:53:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.095842 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.095980 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.096002 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.096033 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.096054 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:27Z","lastTransitionTime":"2025-11-27T06:53:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.198201 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.198246 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.198257 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.198273 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.198283 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:27Z","lastTransitionTime":"2025-11-27T06:53:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.300174 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.300230 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.300245 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.300268 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.300286 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:27Z","lastTransitionTime":"2025-11-27T06:53:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.403373 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.403417 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.403427 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.403444 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.403455 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:27Z","lastTransitionTime":"2025-11-27T06:53:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.506842 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.506905 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.506924 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.506953 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.506976 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:27Z","lastTransitionTime":"2025-11-27T06:53:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.610775 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.610844 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.610868 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.610898 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.610920 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:27Z","lastTransitionTime":"2025-11-27T06:53:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.712910 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.712977 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.713000 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.713026 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.713043 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:27Z","lastTransitionTime":"2025-11-27T06:53:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.815058 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.815099 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.815109 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.815125 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.815137 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:27Z","lastTransitionTime":"2025-11-27T06:53:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.917327 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.917363 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.917374 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.917388 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:27 crc kubenswrapper[4971]: I1127 06:53:27.917399 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:27Z","lastTransitionTime":"2025-11-27T06:53:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.019114 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.019146 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.019156 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.019169 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.019179 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:28Z","lastTransitionTime":"2025-11-27T06:53:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.122015 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.122052 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.122061 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.122078 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.122090 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:28Z","lastTransitionTime":"2025-11-27T06:53:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.225680 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.225732 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.225748 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.225771 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.225787 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:28Z","lastTransitionTime":"2025-11-27T06:53:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.328430 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.328469 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.328477 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.328508 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.328518 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:28Z","lastTransitionTime":"2025-11-27T06:53:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.430500 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.430566 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.430577 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.430594 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.430606 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:28Z","lastTransitionTime":"2025-11-27T06:53:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.460086 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.460166 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.460199 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.460230 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.460250 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:28Z","lastTransitionTime":"2025-11-27T06:53:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:28 crc kubenswrapper[4971]: E1127 06:53:28.475081 4971 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4602e4e9-64d4-4227-8212-1a84a264f109\\\",\\\"systemUUID\\\":\\\"d12f7ae2-c7c1-475e-a2cb-1f1e626e5071\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:28Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.478636 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.478671 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.478680 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.478695 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.478704 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:28Z","lastTransitionTime":"2025-11-27T06:53:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:28 crc kubenswrapper[4971]: E1127 06:53:28.492351 4971 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4602e4e9-64d4-4227-8212-1a84a264f109\\\",\\\"systemUUID\\\":\\\"d12f7ae2-c7c1-475e-a2cb-1f1e626e5071\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:28Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.495797 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.495838 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.495850 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.495868 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.495880 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:28Z","lastTransitionTime":"2025-11-27T06:53:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:28 crc kubenswrapper[4971]: E1127 06:53:28.508336 4971 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4602e4e9-64d4-4227-8212-1a84a264f109\\\",\\\"systemUUID\\\":\\\"d12f7ae2-c7c1-475e-a2cb-1f1e626e5071\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:28Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.512228 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.512264 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.512273 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.512289 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.512303 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:28Z","lastTransitionTime":"2025-11-27T06:53:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:28 crc kubenswrapper[4971]: E1127 06:53:28.524014 4971 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4602e4e9-64d4-4227-8212-1a84a264f109\\\",\\\"systemUUID\\\":\\\"d12f7ae2-c7c1-475e-a2cb-1f1e626e5071\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:28Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.527139 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.527186 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.527198 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.527218 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.527230 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:28Z","lastTransitionTime":"2025-11-27T06:53:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:28 crc kubenswrapper[4971]: E1127 06:53:28.538467 4971 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4602e4e9-64d4-4227-8212-1a84a264f109\\\",\\\"systemUUID\\\":\\\"d12f7ae2-c7c1-475e-a2cb-1f1e626e5071\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:28Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:28 crc kubenswrapper[4971]: E1127 06:53:28.538650 4971 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.539965 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.540005 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.540022 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.540040 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.540052 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:28Z","lastTransitionTime":"2025-11-27T06:53:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.550246 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.550262 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.550324 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.550254 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:53:28 crc kubenswrapper[4971]: E1127 06:53:28.550372 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:53:28 crc kubenswrapper[4971]: E1127 06:53:28.550705 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:53:28 crc kubenswrapper[4971]: E1127 06:53:28.550788 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:53:28 crc kubenswrapper[4971]: E1127 06:53:28.550872 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.551015 4971 scope.go:117] "RemoveContainer" containerID="31806e765969bacb5e0230e2f2868d6bb37910390507c99725d28c6fad548919" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.641948 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.642241 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.642249 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.642265 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.642275 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:28Z","lastTransitionTime":"2025-11-27T06:53:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.676122 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-56nwb_47c0abbf-6e9f-4bca-b3ca-bd896be15f2c/ovnkube-controller/1.log" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.679348 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" event={"ID":"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c","Type":"ContainerStarted","Data":"2f979974fc6abc523b9818905ba1547fe4da8a7c17e5a149838788bb91940a27"} Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.681426 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.702845 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f110061b-13b1-4eae-b031-1f7899d7728d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6e3b39e688e045045a2a5b02dd0c3c7e7567657e0a9dff4d16f4f818e9e984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0568171255f8d7e993c025aece8b10eb0303efde25bc85b46f3b4b2c014ec66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ffb860fe1ce8cfa20be499da1932b4c5286cfdcc70418e7cdf5785cf746254c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dda9554712ec04cb4c9616ee62cb0e5e2cf106ba9c63ab411fb07fb892a1897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c09241254c68f6a129398092fe87bda15b581be840a5cf4034551b5f881042b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:28Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.714105 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:28Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.724931 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0609bb483c2769bba8b86b1692fd22e002dc73a3655a931c4bc4f7f378b9649f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:28Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.734051 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nxjns" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22ca58c-5169-4ae7-ac50-a6cfb48fa334\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237653db3d61407da4c98734dc9cc8e349e67fabab8ad7fde3ceda2cf15e2bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf345d1f97fcf7d514b9bb82e0cf811761950ad9bfe627b738b5271261ee0aa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nxjns\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:28Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.745085 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.745112 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.745123 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.745136 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.745145 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:28Z","lastTransitionTime":"2025-11-27T06:53:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.745764 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5be4571b65d35e7a2d2888f250e6cf6e0c105b598047e2236b3ec90bbdbee5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:28Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.764759 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f979974fc6abc523b9818905ba1547fe4da8a7c17e5a149838788bb91940a27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://31806e765969bacb5e0230e2f2868d6bb37910390507c99725d28c6fad548919\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-27T06:53:15Z\\\",\\\"message\\\":\\\"r because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:15Z is after 2025-08-24T17:21:41Z]\\\\nI1127 06:53:15.358596 6403 obj_retry.go:365] Adding new object: *v1.Pod openshift-etcd/etcd-crc\\\\nI1127 06:53:15.358594 6403 services_controller.go:356] Processing sync for service openshift-kube-scheduler/scheduler for network=default\\\\nI1127 06:53:15.358593 6403 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1127 06:53:15.358604 6403 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nI1127 06:53:15.358605 6403 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1127 06:53:15.358611 6403 obj_retry.go:386] Retry successful for *v1.Pod openshift-etcd/etcd-crc after 0 failed attempt(s)\\\\nI1127 06:53:15.358612 6403 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1127 06:53:15.358615 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:14Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-56nwb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:28Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.775865 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://673830b15fa2749a267b5b4e1116f8d7c2cbe419b963a87da48dfa8fe9eaf8c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdz6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:28Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.788149 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"ed a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\"\\\\nI1127 06:53:01.687702 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1127 06:53:02.478794 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1127 06:53:02.478820 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1127 06:53:02.478850 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1127 06:53:02.478856 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1127 06:53:02.511358 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1127 06:53:02.511475 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511499 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511521 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1127 06:53:02.511559 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1127 06:53:02.511581 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1127 06:53:02.511603 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1127 06:53:02.511767 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1127 06:53:02.517623 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764226365\\\\\\\\\\\\\\\" (2025-11-27 06:52:45 +0000 UTC to 2025-12-27 06:52:46 +0000 UTC (now=2025-11-27 06:53:02.517594908 +0000 UTC))\\\\\\\"\\\\nF1127 06:53:02.517671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:28Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.799349 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:28Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.810013 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c02f3806e673886030fe506e4903bfa7f383bdc7130e4f3d251b5851cb99fb52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d1fb1c5f4949857e54218b78eb7d2a695aa1baf7fe0d4e26888cad2e2cdb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:28Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.821046 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:28Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.833822 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckzrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc0de2fe198ee71bf786a69fa44fa2bd1c0d29e16391fdafd96bb6b062f6f6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xx7nr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:05Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckzrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:28Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.847311 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.847401 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.847410 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.847431 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.847442 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:28Z","lastTransitionTime":"2025-11-27T06:53:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.849318 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pdn5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e710c835-f600-448f-a110-4ff4cef9d5f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rj8cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rj8cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:16Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pdn5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:28Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.868061 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:28Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.887038 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db8ead1e3b35d86c894bb1067d64e31f7ac2ec518180db02176574eb245fafb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:28Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.908637 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://239d7a81aa3003faaae51b9e8f67d4ba1495328340befcc2b962804e376c9c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w2fnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:28Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.924522 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lr9p6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5609ab7b327e7ec603c40b9b27f4ee52b4b49e647d0c6e9db5d360bf686c493c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5pmn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lr9p6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:28Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.950260 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.950303 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.950313 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.950327 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:28 crc kubenswrapper[4971]: I1127 06:53:28.950336 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:28Z","lastTransitionTime":"2025-11-27T06:53:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.053108 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.053144 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.053152 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.053167 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.053178 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:29Z","lastTransitionTime":"2025-11-27T06:53:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.155437 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.155475 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.155484 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.155499 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.155509 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:29Z","lastTransitionTime":"2025-11-27T06:53:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.257455 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.257499 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.257509 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.257523 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.257554 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:29Z","lastTransitionTime":"2025-11-27T06:53:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.359476 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.359527 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.359557 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.359576 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.359588 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:29Z","lastTransitionTime":"2025-11-27T06:53:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.462504 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.462589 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.462607 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.462629 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.462645 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:29Z","lastTransitionTime":"2025-11-27T06:53:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.564742 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.564790 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.564806 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.564831 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.564846 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:29Z","lastTransitionTime":"2025-11-27T06:53:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.666746 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.666796 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.666808 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.666825 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.666838 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:29Z","lastTransitionTime":"2025-11-27T06:53:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.684871 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-56nwb_47c0abbf-6e9f-4bca-b3ca-bd896be15f2c/ovnkube-controller/2.log" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.685462 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-56nwb_47c0abbf-6e9f-4bca-b3ca-bd896be15f2c/ovnkube-controller/1.log" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.687714 4971 generic.go:334] "Generic (PLEG): container finished" podID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerID="2f979974fc6abc523b9818905ba1547fe4da8a7c17e5a149838788bb91940a27" exitCode=1 Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.687775 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" event={"ID":"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c","Type":"ContainerDied","Data":"2f979974fc6abc523b9818905ba1547fe4da8a7c17e5a149838788bb91940a27"} Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.687824 4971 scope.go:117] "RemoveContainer" containerID="31806e765969bacb5e0230e2f2868d6bb37910390507c99725d28c6fad548919" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.688650 4971 scope.go:117] "RemoveContainer" containerID="2f979974fc6abc523b9818905ba1547fe4da8a7c17e5a149838788bb91940a27" Nov 27 06:53:29 crc kubenswrapper[4971]: E1127 06:53:29.688898 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-56nwb_openshift-ovn-kubernetes(47c0abbf-6e9f-4bca-b3ca-bd896be15f2c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.721136 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f110061b-13b1-4eae-b031-1f7899d7728d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6e3b39e688e045045a2a5b02dd0c3c7e7567657e0a9dff4d16f4f818e9e984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0568171255f8d7e993c025aece8b10eb0303efde25bc85b46f3b4b2c014ec66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ffb860fe1ce8cfa20be499da1932b4c5286cfdcc70418e7cdf5785cf746254c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dda9554712ec04cb4c9616ee62cb0e5e2cf106ba9c63ab411fb07fb892a1897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c09241254c68f6a129398092fe87bda15b581be840a5cf4034551b5f881042b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:29Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.735667 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:29Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.747931 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0609bb483c2769bba8b86b1692fd22e002dc73a3655a931c4bc4f7f378b9649f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:29Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.757708 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nxjns" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22ca58c-5169-4ae7-ac50-a6cfb48fa334\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237653db3d61407da4c98734dc9cc8e349e67fabab8ad7fde3ceda2cf15e2bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf345d1f97fcf7d514b9bb82e0cf811761950ad9bfe627b738b5271261ee0aa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nxjns\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:29Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.768773 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.768805 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.768815 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.768831 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.768841 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:29Z","lastTransitionTime":"2025-11-27T06:53:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.770857 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5be4571b65d35e7a2d2888f250e6cf6e0c105b598047e2236b3ec90bbdbee5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:29Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.786807 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f979974fc6abc523b9818905ba1547fe4da8a7c17e5a149838788bb91940a27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://31806e765969bacb5e0230e2f2868d6bb37910390507c99725d28c6fad548919\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-27T06:53:15Z\\\",\\\"message\\\":\\\"r because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:15Z is after 2025-08-24T17:21:41Z]\\\\nI1127 06:53:15.358596 6403 obj_retry.go:365] Adding new object: *v1.Pod openshift-etcd/etcd-crc\\\\nI1127 06:53:15.358594 6403 services_controller.go:356] Processing sync for service openshift-kube-scheduler/scheduler for network=default\\\\nI1127 06:53:15.358593 6403 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1127 06:53:15.358604 6403 ovn.go:134] Ensuring zone local for Pod openshift-etcd/etcd-crc in node crc\\\\nI1127 06:53:15.358605 6403 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1127 06:53:15.358611 6403 obj_retry.go:386] Retry successful for *v1.Pod openshift-etcd/etcd-crc after 0 failed attempt(s)\\\\nI1127 06:53:15.358612 6403 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI1127 06:53:15.358615 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:14Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f979974fc6abc523b9818905ba1547fe4da8a7c17e5a149838788bb91940a27\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-27T06:53:29Z\\\",\\\"message\\\":\\\"ncer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1127 06:53:29.268703 6621 services_controller.go:451] Built service openshift-apiserver-operator/metrics cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-apiserver-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-apiserver-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.38\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1127 06:53:29.268155 6621 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1127 06:53:29.267856 6621 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/machine-api-operator-machine-webhook]} name:Service_openshift-machine-api/machine-api-operator-machine-webhook_TCP_cluster options:{GoMap:map[event:f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-56nwb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:29Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.797005 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://673830b15fa2749a267b5b4e1116f8d7c2cbe419b963a87da48dfa8fe9eaf8c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdz6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:29Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.810858 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"ed a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\"\\\\nI1127 06:53:01.687702 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1127 06:53:02.478794 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1127 06:53:02.478820 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1127 06:53:02.478850 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1127 06:53:02.478856 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1127 06:53:02.511358 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1127 06:53:02.511475 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511499 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511521 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1127 06:53:02.511559 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1127 06:53:02.511581 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1127 06:53:02.511603 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1127 06:53:02.511767 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1127 06:53:02.517623 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764226365\\\\\\\\\\\\\\\" (2025-11-27 06:52:45 +0000 UTC to 2025-12-27 06:52:46 +0000 UTC (now=2025-11-27 06:53:02.517594908 +0000 UTC))\\\\\\\"\\\\nF1127 06:53:02.517671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:29Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.823892 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:29Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.836633 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c02f3806e673886030fe506e4903bfa7f383bdc7130e4f3d251b5851cb99fb52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d1fb1c5f4949857e54218b78eb7d2a695aa1baf7fe0d4e26888cad2e2cdb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:29Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.849042 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:29Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.859187 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckzrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc0de2fe198ee71bf786a69fa44fa2bd1c0d29e16391fdafd96bb6b062f6f6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xx7nr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:05Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckzrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:29Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.868859 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pdn5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e710c835-f600-448f-a110-4ff4cef9d5f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rj8cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rj8cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:16Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pdn5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:29Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.870971 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.871012 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.871024 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.871040 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.871054 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:29Z","lastTransitionTime":"2025-11-27T06:53:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.880384 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:29Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.889617 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db8ead1e3b35d86c894bb1067d64e31f7ac2ec518180db02176574eb245fafb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:29Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.900890 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://239d7a81aa3003faaae51b9e8f67d4ba1495328340befcc2b962804e376c9c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w2fnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:29Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.913370 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lr9p6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5609ab7b327e7ec603c40b9b27f4ee52b4b49e647d0c6e9db5d360bf686c493c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5pmn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lr9p6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:29Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.973587 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.973871 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.973945 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.974006 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:29 crc kubenswrapper[4971]: I1127 06:53:29.974067 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:29Z","lastTransitionTime":"2025-11-27T06:53:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.076981 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.077054 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.077073 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.077100 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.077117 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:30Z","lastTransitionTime":"2025-11-27T06:53:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.180028 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.180090 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.180113 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.180139 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.180157 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:30Z","lastTransitionTime":"2025-11-27T06:53:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.284239 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.284294 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.284309 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.284331 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.284345 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:30Z","lastTransitionTime":"2025-11-27T06:53:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.387623 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.387657 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.387666 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.387682 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.387692 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:30Z","lastTransitionTime":"2025-11-27T06:53:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.490480 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.490524 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.490560 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.490579 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.490590 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:30Z","lastTransitionTime":"2025-11-27T06:53:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.550026 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.550078 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.550140 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:30 crc kubenswrapper[4971]: E1127 06:53:30.550252 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.550369 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:53:30 crc kubenswrapper[4971]: E1127 06:53:30.550500 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:53:30 crc kubenswrapper[4971]: E1127 06:53:30.550667 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:53:30 crc kubenswrapper[4971]: E1127 06:53:30.550872 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.593398 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.593459 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.593471 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.593492 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.593511 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:30Z","lastTransitionTime":"2025-11-27T06:53:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.693787 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-56nwb_47c0abbf-6e9f-4bca-b3ca-bd896be15f2c/ovnkube-controller/2.log" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.696348 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.696388 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.696400 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.696419 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.696433 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:30Z","lastTransitionTime":"2025-11-27T06:53:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.704347 4971 scope.go:117] "RemoveContainer" containerID="2f979974fc6abc523b9818905ba1547fe4da8a7c17e5a149838788bb91940a27" Nov 27 06:53:30 crc kubenswrapper[4971]: E1127 06:53:30.704654 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-56nwb_openshift-ovn-kubernetes(47c0abbf-6e9f-4bca-b3ca-bd896be15f2c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.719735 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"ed a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\"\\\\nI1127 06:53:01.687702 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1127 06:53:02.478794 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1127 06:53:02.478820 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1127 06:53:02.478850 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1127 06:53:02.478856 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1127 06:53:02.511358 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1127 06:53:02.511475 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511499 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511521 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1127 06:53:02.511559 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1127 06:53:02.511581 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1127 06:53:02.511603 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1127 06:53:02.511767 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1127 06:53:02.517623 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764226365\\\\\\\\\\\\\\\" (2025-11-27 06:52:45 +0000 UTC to 2025-12-27 06:52:46 +0000 UTC (now=2025-11-27 06:53:02.517594908 +0000 UTC))\\\\\\\"\\\\nF1127 06:53:02.517671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:30Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.732717 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:30Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.747509 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c02f3806e673886030fe506e4903bfa7f383bdc7130e4f3d251b5851cb99fb52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d1fb1c5f4949857e54218b78eb7d2a695aa1baf7fe0d4e26888cad2e2cdb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:30Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.761242 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:30Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.773836 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://673830b15fa2749a267b5b4e1116f8d7c2cbe419b963a87da48dfa8fe9eaf8c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdz6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:30Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.785996 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:30Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.798106 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db8ead1e3b35d86c894bb1067d64e31f7ac2ec518180db02176574eb245fafb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:30Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.799613 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.799720 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.799737 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.799760 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.799778 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:30Z","lastTransitionTime":"2025-11-27T06:53:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.815554 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://239d7a81aa3003faaae51b9e8f67d4ba1495328340befcc2b962804e376c9c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w2fnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:30Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.830121 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lr9p6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5609ab7b327e7ec603c40b9b27f4ee52b4b49e647d0c6e9db5d360bf686c493c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5pmn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lr9p6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:30Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.841453 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckzrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc0de2fe198ee71bf786a69fa44fa2bd1c0d29e16391fdafd96bb6b062f6f6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xx7nr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:05Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckzrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:30Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.855269 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pdn5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e710c835-f600-448f-a110-4ff4cef9d5f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rj8cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rj8cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:16Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pdn5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:30Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.877835 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f110061b-13b1-4eae-b031-1f7899d7728d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6e3b39e688e045045a2a5b02dd0c3c7e7567657e0a9dff4d16f4f818e9e984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0568171255f8d7e993c025aece8b10eb0303efde25bc85b46f3b4b2c014ec66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ffb860fe1ce8cfa20be499da1932b4c5286cfdcc70418e7cdf5785cf746254c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dda9554712ec04cb4c9616ee62cb0e5e2cf106ba9c63ab411fb07fb892a1897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c09241254c68f6a129398092fe87bda15b581be840a5cf4034551b5f881042b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:30Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.890944 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:30Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.901969 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.902016 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.902029 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.902046 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.902060 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:30Z","lastTransitionTime":"2025-11-27T06:53:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.903884 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0609bb483c2769bba8b86b1692fd22e002dc73a3655a931c4bc4f7f378b9649f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:30Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.923111 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5be4571b65d35e7a2d2888f250e6cf6e0c105b598047e2236b3ec90bbdbee5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:30Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.944523 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f979974fc6abc523b9818905ba1547fe4da8a7c17e5a149838788bb91940a27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f979974fc6abc523b9818905ba1547fe4da8a7c17e5a149838788bb91940a27\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-27T06:53:29Z\\\",\\\"message\\\":\\\"ncer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1127 06:53:29.268703 6621 services_controller.go:451] Built service openshift-apiserver-operator/metrics cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-apiserver-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-apiserver-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.38\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1127 06:53:29.268155 6621 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1127 06:53:29.267856 6621 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/machine-api-operator-machine-webhook]} name:Service_openshift-machine-api/machine-api-operator-machine-webhook_TCP_cluster options:{GoMap:map[event:f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:28Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-56nwb_openshift-ovn-kubernetes(47c0abbf-6e9f-4bca-b3ca-bd896be15f2c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-56nwb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:30Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:30 crc kubenswrapper[4971]: I1127 06:53:30.961244 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nxjns" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22ca58c-5169-4ae7-ac50-a6cfb48fa334\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237653db3d61407da4c98734dc9cc8e349e67fabab8ad7fde3ceda2cf15e2bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf345d1f97fcf7d514b9bb82e0cf811761950ad9bfe627b738b5271261ee0aa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nxjns\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:30Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.004641 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.004678 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.004691 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.004708 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.004721 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:31Z","lastTransitionTime":"2025-11-27T06:53:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.108790 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.108857 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.108881 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.108909 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.108933 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:31Z","lastTransitionTime":"2025-11-27T06:53:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.212633 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.212702 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.212725 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.212751 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.212772 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:31Z","lastTransitionTime":"2025-11-27T06:53:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.315754 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.315806 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.315822 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.315845 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.315860 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:31Z","lastTransitionTime":"2025-11-27T06:53:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.418004 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.418041 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.418055 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.418070 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.418082 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:31Z","lastTransitionTime":"2025-11-27T06:53:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.520455 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.520513 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.520570 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.520597 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.520615 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:31Z","lastTransitionTime":"2025-11-27T06:53:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.623072 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.623126 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.623141 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.623162 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.623175 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:31Z","lastTransitionTime":"2025-11-27T06:53:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.725105 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.725152 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.725168 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.725186 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.725200 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:31Z","lastTransitionTime":"2025-11-27T06:53:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.827366 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.827432 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.827456 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.827487 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.827510 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:31Z","lastTransitionTime":"2025-11-27T06:53:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.930740 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.930823 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.930849 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.930875 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:31 crc kubenswrapper[4971]: I1127 06:53:31.930891 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:31Z","lastTransitionTime":"2025-11-27T06:53:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.033708 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.033773 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.033798 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.033826 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.033846 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:32Z","lastTransitionTime":"2025-11-27T06:53:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.137457 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.137508 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.137525 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.137584 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.137608 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:32Z","lastTransitionTime":"2025-11-27T06:53:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.240715 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.240767 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.240786 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.240809 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.240826 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:32Z","lastTransitionTime":"2025-11-27T06:53:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.343669 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.343795 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.343821 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.343850 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.343869 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:32Z","lastTransitionTime":"2025-11-27T06:53:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.447754 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.447792 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.447803 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.447820 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.447830 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:32Z","lastTransitionTime":"2025-11-27T06:53:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.549820 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.549862 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:53:32 crc kubenswrapper[4971]: E1127 06:53:32.549934 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.549820 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.550075 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.551062 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.551101 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.551114 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.551133 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.551146 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:32Z","lastTransitionTime":"2025-11-27T06:53:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:32 crc kubenswrapper[4971]: E1127 06:53:32.552612 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:53:32 crc kubenswrapper[4971]: E1127 06:53:32.552671 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:53:32 crc kubenswrapper[4971]: E1127 06:53:32.552956 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.563203 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db8ead1e3b35d86c894bb1067d64e31f7ac2ec518180db02176574eb245fafb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:32Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.584649 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://239d7a81aa3003faaae51b9e8f67d4ba1495328340befcc2b962804e376c9c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w2fnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:32Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.604401 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lr9p6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5609ab7b327e7ec603c40b9b27f4ee52b4b49e647d0c6e9db5d360bf686c493c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5pmn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lr9p6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:32Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.616311 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckzrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc0de2fe198ee71bf786a69fa44fa2bd1c0d29e16391fdafd96bb6b062f6f6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xx7nr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:05Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckzrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:32Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.629251 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e710c835-f600-448f-a110-4ff4cef9d5f6-metrics-certs\") pod \"network-metrics-daemon-pdn5j\" (UID: \"e710c835-f600-448f-a110-4ff4cef9d5f6\") " pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:53:32 crc kubenswrapper[4971]: E1127 06:53:32.629470 4971 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 27 06:53:32 crc kubenswrapper[4971]: E1127 06:53:32.629591 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e710c835-f600-448f-a110-4ff4cef9d5f6-metrics-certs podName:e710c835-f600-448f-a110-4ff4cef9d5f6 nodeName:}" failed. No retries permitted until 2025-11-27 06:53:48.629563872 +0000 UTC m=+66.821607890 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e710c835-f600-448f-a110-4ff4cef9d5f6-metrics-certs") pod "network-metrics-daemon-pdn5j" (UID: "e710c835-f600-448f-a110-4ff4cef9d5f6") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.630198 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pdn5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e710c835-f600-448f-a110-4ff4cef9d5f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rj8cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rj8cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:16Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pdn5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:32Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.646927 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:32Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.653871 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.653936 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.653945 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.653960 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.653971 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:32Z","lastTransitionTime":"2025-11-27T06:53:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.662622 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:32Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.679879 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0609bb483c2769bba8b86b1692fd22e002dc73a3655a931c4bc4f7f378b9649f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:32Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.702041 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f110061b-13b1-4eae-b031-1f7899d7728d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6e3b39e688e045045a2a5b02dd0c3c7e7567657e0a9dff4d16f4f818e9e984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0568171255f8d7e993c025aece8b10eb0303efde25bc85b46f3b4b2c014ec66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ffb860fe1ce8cfa20be499da1932b4c5286cfdcc70418e7cdf5785cf746254c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dda9554712ec04cb4c9616ee62cb0e5e2cf106ba9c63ab411fb07fb892a1897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c09241254c68f6a129398092fe87bda15b581be840a5cf4034551b5f881042b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:32Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.715847 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5be4571b65d35e7a2d2888f250e6cf6e0c105b598047e2236b3ec90bbdbee5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:32Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.738412 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f979974fc6abc523b9818905ba1547fe4da8a7c17e5a149838788bb91940a27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f979974fc6abc523b9818905ba1547fe4da8a7c17e5a149838788bb91940a27\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-27T06:53:29Z\\\",\\\"message\\\":\\\"ncer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1127 06:53:29.268703 6621 services_controller.go:451] Built service openshift-apiserver-operator/metrics cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-apiserver-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-apiserver-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.38\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1127 06:53:29.268155 6621 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1127 06:53:29.267856 6621 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/machine-api-operator-machine-webhook]} name:Service_openshift-machine-api/machine-api-operator-machine-webhook_TCP_cluster options:{GoMap:map[event:f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:28Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-56nwb_openshift-ovn-kubernetes(47c0abbf-6e9f-4bca-b3ca-bd896be15f2c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-56nwb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:32Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.756470 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.756514 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.756547 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.756562 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.756572 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:32Z","lastTransitionTime":"2025-11-27T06:53:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.757267 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nxjns" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22ca58c-5169-4ae7-ac50-a6cfb48fa334\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237653db3d61407da4c98734dc9cc8e349e67fabab8ad7fde3ceda2cf15e2bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf345d1f97fcf7d514b9bb82e0cf811761950ad9bfe627b738b5271261ee0aa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nxjns\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:32Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.771804 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:32Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.786826 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c02f3806e673886030fe506e4903bfa7f383bdc7130e4f3d251b5851cb99fb52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d1fb1c5f4949857e54218b78eb7d2a695aa1baf7fe0d4e26888cad2e2cdb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:32Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.801939 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:32Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.815166 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://673830b15fa2749a267b5b4e1116f8d7c2cbe419b963a87da48dfa8fe9eaf8c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdz6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:32Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.830139 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"ed a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\"\\\\nI1127 06:53:01.687702 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1127 06:53:02.478794 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1127 06:53:02.478820 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1127 06:53:02.478850 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1127 06:53:02.478856 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1127 06:53:02.511358 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1127 06:53:02.511475 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511499 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511521 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1127 06:53:02.511559 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1127 06:53:02.511581 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1127 06:53:02.511603 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1127 06:53:02.511767 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1127 06:53:02.517623 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764226365\\\\\\\\\\\\\\\" (2025-11-27 06:52:45 +0000 UTC to 2025-12-27 06:52:46 +0000 UTC (now=2025-11-27 06:53:02.517594908 +0000 UTC))\\\\\\\"\\\\nF1127 06:53:02.517671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:32Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.859322 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.859515 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.859687 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.859846 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.859993 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:32Z","lastTransitionTime":"2025-11-27T06:53:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.962782 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.963035 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.963130 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.963261 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:32 crc kubenswrapper[4971]: I1127 06:53:32.963345 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:32Z","lastTransitionTime":"2025-11-27T06:53:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.066205 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.066240 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.066249 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.066263 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.066273 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:33Z","lastTransitionTime":"2025-11-27T06:53:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.169079 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.170016 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.170162 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.170345 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.170476 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:33Z","lastTransitionTime":"2025-11-27T06:53:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.274344 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.274390 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.274406 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.274423 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.274433 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:33Z","lastTransitionTime":"2025-11-27T06:53:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.377718 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.377797 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.377825 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.377857 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.377882 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:33Z","lastTransitionTime":"2025-11-27T06:53:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.481504 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.481557 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.481567 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.481583 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.481592 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:33Z","lastTransitionTime":"2025-11-27T06:53:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.554960 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.562777 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.567720 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pdn5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e710c835-f600-448f-a110-4ff4cef9d5f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rj8cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rj8cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:16Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pdn5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:33Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.578372 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:33Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.583830 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.583856 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.583865 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.583877 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.583886 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:33Z","lastTransitionTime":"2025-11-27T06:53:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.591442 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db8ead1e3b35d86c894bb1067d64e31f7ac2ec518180db02176574eb245fafb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:33Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.608454 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://239d7a81aa3003faaae51b9e8f67d4ba1495328340befcc2b962804e376c9c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w2fnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:33Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.625756 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lr9p6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5609ab7b327e7ec603c40b9b27f4ee52b4b49e647d0c6e9db5d360bf686c493c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5pmn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lr9p6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:33Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.639694 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckzrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc0de2fe198ee71bf786a69fa44fa2bd1c0d29e16391fdafd96bb6b062f6f6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xx7nr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:05Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckzrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:33Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.664589 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f110061b-13b1-4eae-b031-1f7899d7728d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6e3b39e688e045045a2a5b02dd0c3c7e7567657e0a9dff4d16f4f818e9e984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0568171255f8d7e993c025aece8b10eb0303efde25bc85b46f3b4b2c014ec66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ffb860fe1ce8cfa20be499da1932b4c5286cfdcc70418e7cdf5785cf746254c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dda9554712ec04cb4c9616ee62cb0e5e2cf106ba9c63ab411fb07fb892a1897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c09241254c68f6a129398092fe87bda15b581be840a5cf4034551b5f881042b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:33Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.677068 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:33Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.686413 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.686447 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.686455 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.686468 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.686478 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:33Z","lastTransitionTime":"2025-11-27T06:53:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.689569 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0609bb483c2769bba8b86b1692fd22e002dc73a3655a931c4bc4f7f378b9649f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:33Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.702512 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5be4571b65d35e7a2d2888f250e6cf6e0c105b598047e2236b3ec90bbdbee5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:33Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.720347 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f979974fc6abc523b9818905ba1547fe4da8a7c17e5a149838788bb91940a27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f979974fc6abc523b9818905ba1547fe4da8a7c17e5a149838788bb91940a27\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-27T06:53:29Z\\\",\\\"message\\\":\\\"ncer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1127 06:53:29.268703 6621 services_controller.go:451] Built service openshift-apiserver-operator/metrics cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-apiserver-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-apiserver-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.38\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1127 06:53:29.268155 6621 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1127 06:53:29.267856 6621 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/machine-api-operator-machine-webhook]} name:Service_openshift-machine-api/machine-api-operator-machine-webhook_TCP_cluster options:{GoMap:map[event:f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:28Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-56nwb_openshift-ovn-kubernetes(47c0abbf-6e9f-4bca-b3ca-bd896be15f2c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-56nwb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:33Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.733353 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nxjns" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22ca58c-5169-4ae7-ac50-a6cfb48fa334\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237653db3d61407da4c98734dc9cc8e349e67fabab8ad7fde3ceda2cf15e2bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf345d1f97fcf7d514b9bb82e0cf811761950ad9bfe627b738b5271261ee0aa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nxjns\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:33Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.747560 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"ed a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\"\\\\nI1127 06:53:01.687702 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1127 06:53:02.478794 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1127 06:53:02.478820 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1127 06:53:02.478850 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1127 06:53:02.478856 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1127 06:53:02.511358 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1127 06:53:02.511475 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511499 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511521 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1127 06:53:02.511559 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1127 06:53:02.511581 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1127 06:53:02.511603 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1127 06:53:02.511767 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1127 06:53:02.517623 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764226365\\\\\\\\\\\\\\\" (2025-11-27 06:52:45 +0000 UTC to 2025-12-27 06:52:46 +0000 UTC (now=2025-11-27 06:53:02.517594908 +0000 UTC))\\\\\\\"\\\\nF1127 06:53:02.517671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:33Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.763908 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:33Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.776861 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c02f3806e673886030fe506e4903bfa7f383bdc7130e4f3d251b5851cb99fb52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d1fb1c5f4949857e54218b78eb7d2a695aa1baf7fe0d4e26888cad2e2cdb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:33Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.788642 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.788667 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.788676 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.788689 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.788699 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:33Z","lastTransitionTime":"2025-11-27T06:53:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.790405 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:33Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.804828 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://673830b15fa2749a267b5b4e1116f8d7c2cbe419b963a87da48dfa8fe9eaf8c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdz6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:33Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.891171 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.891206 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.891214 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.891228 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.891237 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:33Z","lastTransitionTime":"2025-11-27T06:53:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.994826 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.994897 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.994915 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.994941 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:33 crc kubenswrapper[4971]: I1127 06:53:33.994960 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:33Z","lastTransitionTime":"2025-11-27T06:53:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.097263 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.097304 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.097312 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.097327 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.097336 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:34Z","lastTransitionTime":"2025-11-27T06:53:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.200064 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.200143 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.200166 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.200199 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.200224 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:34Z","lastTransitionTime":"2025-11-27T06:53:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.246898 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:53:34 crc kubenswrapper[4971]: E1127 06:53:34.247011 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:54:06.246987189 +0000 UTC m=+84.439031117 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.302678 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.302718 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.302730 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.302746 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.302758 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:34Z","lastTransitionTime":"2025-11-27T06:53:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.349141 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.349225 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.349309 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.349362 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:53:34 crc kubenswrapper[4971]: E1127 06:53:34.349466 4971 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 27 06:53:34 crc kubenswrapper[4971]: E1127 06:53:34.349466 4971 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 27 06:53:34 crc kubenswrapper[4971]: E1127 06:53:34.349589 4971 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 27 06:53:34 crc kubenswrapper[4971]: E1127 06:53:34.349469 4971 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 27 06:53:34 crc kubenswrapper[4971]: E1127 06:53:34.349621 4971 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 27 06:53:34 crc kubenswrapper[4971]: E1127 06:53:34.349645 4971 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 27 06:53:34 crc kubenswrapper[4971]: E1127 06:53:34.349665 4971 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 27 06:53:34 crc kubenswrapper[4971]: E1127 06:53:34.349689 4971 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 27 06:53:34 crc kubenswrapper[4971]: E1127 06:53:34.349599 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-27 06:54:06.349570169 +0000 UTC m=+84.541614097 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 27 06:53:34 crc kubenswrapper[4971]: E1127 06:53:34.349736 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-27 06:54:06.349713253 +0000 UTC m=+84.541757211 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 27 06:53:34 crc kubenswrapper[4971]: E1127 06:53:34.349764 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-27 06:54:06.349748784 +0000 UTC m=+84.541792742 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 27 06:53:34 crc kubenswrapper[4971]: E1127 06:53:34.349821 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-27 06:54:06.349802306 +0000 UTC m=+84.541846264 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.405124 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.405179 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.405199 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.405228 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.405278 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:34Z","lastTransitionTime":"2025-11-27T06:53:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.508518 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.508570 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.508579 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.508592 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.508602 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:34Z","lastTransitionTime":"2025-11-27T06:53:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.550158 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.550197 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.550247 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:53:34 crc kubenswrapper[4971]: E1127 06:53:34.550290 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.550306 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:53:34 crc kubenswrapper[4971]: E1127 06:53:34.550360 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:53:34 crc kubenswrapper[4971]: E1127 06:53:34.550447 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:53:34 crc kubenswrapper[4971]: E1127 06:53:34.550508 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.610587 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.610636 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.610648 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.610668 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.610680 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:34Z","lastTransitionTime":"2025-11-27T06:53:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.712766 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.712807 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.712820 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.712835 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.712845 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:34Z","lastTransitionTime":"2025-11-27T06:53:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.815461 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.815508 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.815524 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.815568 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.815587 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:34Z","lastTransitionTime":"2025-11-27T06:53:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.917171 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.917217 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.917236 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.917256 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:34 crc kubenswrapper[4971]: I1127 06:53:34.917283 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:34Z","lastTransitionTime":"2025-11-27T06:53:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.019815 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.019870 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.019886 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.019910 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.019926 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:35Z","lastTransitionTime":"2025-11-27T06:53:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.122000 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.122040 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.122053 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.122068 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.122079 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:35Z","lastTransitionTime":"2025-11-27T06:53:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.224609 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.224715 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.224741 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.224773 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.224794 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:35Z","lastTransitionTime":"2025-11-27T06:53:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.330848 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.330887 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.330896 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.330911 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.330925 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:35Z","lastTransitionTime":"2025-11-27T06:53:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.433519 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.433575 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.433584 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.433598 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.433608 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:35Z","lastTransitionTime":"2025-11-27T06:53:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.536095 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.536124 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.536133 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.536145 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.536154 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:35Z","lastTransitionTime":"2025-11-27T06:53:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.637807 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.637842 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.637852 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.637867 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.637880 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:35Z","lastTransitionTime":"2025-11-27T06:53:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.740062 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.740099 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.740109 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.740124 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.740135 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:35Z","lastTransitionTime":"2025-11-27T06:53:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.842899 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.842944 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.842960 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.842978 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.842991 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:35Z","lastTransitionTime":"2025-11-27T06:53:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.945873 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.945930 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.945946 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.945967 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:35 crc kubenswrapper[4971]: I1127 06:53:35.945982 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:35Z","lastTransitionTime":"2025-11-27T06:53:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.048627 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.048674 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.048683 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.048698 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.048707 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:36Z","lastTransitionTime":"2025-11-27T06:53:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.151310 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.151581 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.151680 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.151781 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.151876 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:36Z","lastTransitionTime":"2025-11-27T06:53:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.254478 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.254576 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.254588 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.254602 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.254614 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:36Z","lastTransitionTime":"2025-11-27T06:53:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.357326 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.357383 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.357401 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.357422 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.357440 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:36Z","lastTransitionTime":"2025-11-27T06:53:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.460636 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.460686 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.460697 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.460719 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.460731 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:36Z","lastTransitionTime":"2025-11-27T06:53:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.551797 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:53:36 crc kubenswrapper[4971]: E1127 06:53:36.551907 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.551963 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:53:36 crc kubenswrapper[4971]: E1127 06:53:36.552011 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.552041 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:53:36 crc kubenswrapper[4971]: E1127 06:53:36.552081 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.552114 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:36 crc kubenswrapper[4971]: E1127 06:53:36.552151 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.564141 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.564189 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.564201 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.564220 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.564233 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:36Z","lastTransitionTime":"2025-11-27T06:53:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.666390 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.666445 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.666458 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.666477 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.666491 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:36Z","lastTransitionTime":"2025-11-27T06:53:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.769348 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.769392 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.769407 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.769422 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.769431 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:36Z","lastTransitionTime":"2025-11-27T06:53:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.871984 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.872025 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.872035 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.872052 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.872062 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:36Z","lastTransitionTime":"2025-11-27T06:53:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.975798 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.975834 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.975843 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.975861 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:36 crc kubenswrapper[4971]: I1127 06:53:36.975874 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:36Z","lastTransitionTime":"2025-11-27T06:53:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.078208 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.078243 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.078251 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.078266 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.078275 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:37Z","lastTransitionTime":"2025-11-27T06:53:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.180201 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.180249 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.180260 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.180275 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.180289 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:37Z","lastTransitionTime":"2025-11-27T06:53:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.282542 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.282579 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.282596 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.282613 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.282623 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:37Z","lastTransitionTime":"2025-11-27T06:53:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.384470 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.384564 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.384579 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.384598 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.384609 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:37Z","lastTransitionTime":"2025-11-27T06:53:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.486334 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.486361 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.486370 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.486382 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.486390 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:37Z","lastTransitionTime":"2025-11-27T06:53:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.589266 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.589294 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.589302 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.589316 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.589325 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:37Z","lastTransitionTime":"2025-11-27T06:53:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.691941 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.692001 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.692023 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.692052 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.692074 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:37Z","lastTransitionTime":"2025-11-27T06:53:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.796549 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.796608 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.796623 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.796649 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.798890 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:37Z","lastTransitionTime":"2025-11-27T06:53:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.901918 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.901976 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.901987 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.902004 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:37 crc kubenswrapper[4971]: I1127 06:53:37.902016 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:37Z","lastTransitionTime":"2025-11-27T06:53:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.004706 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.004755 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.004765 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.004785 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.004799 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:38Z","lastTransitionTime":"2025-11-27T06:53:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.108222 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.108304 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.108329 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.108362 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.108390 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:38Z","lastTransitionTime":"2025-11-27T06:53:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.211251 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.211339 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.211386 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.211418 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.211445 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:38Z","lastTransitionTime":"2025-11-27T06:53:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.314140 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.314197 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.314211 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.314236 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.314252 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:38Z","lastTransitionTime":"2025-11-27T06:53:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.417513 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.417594 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.417608 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.417628 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.417644 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:38Z","lastTransitionTime":"2025-11-27T06:53:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.521310 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.521364 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.521375 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.521392 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.521403 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:38Z","lastTransitionTime":"2025-11-27T06:53:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.549934 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.549988 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.550041 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.550045 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:53:38 crc kubenswrapper[4971]: E1127 06:53:38.550224 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:53:38 crc kubenswrapper[4971]: E1127 06:53:38.550378 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:53:38 crc kubenswrapper[4971]: E1127 06:53:38.550500 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:53:38 crc kubenswrapper[4971]: E1127 06:53:38.550693 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.598423 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.598468 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.598481 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.598503 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.598516 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:38Z","lastTransitionTime":"2025-11-27T06:53:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:38 crc kubenswrapper[4971]: E1127 06:53:38.617135 4971 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4602e4e9-64d4-4227-8212-1a84a264f109\\\",\\\"systemUUID\\\":\\\"d12f7ae2-c7c1-475e-a2cb-1f1e626e5071\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:38Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.620974 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.621024 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.621038 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.621059 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.621075 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:38Z","lastTransitionTime":"2025-11-27T06:53:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:38 crc kubenswrapper[4971]: E1127 06:53:38.635181 4971 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4602e4e9-64d4-4227-8212-1a84a264f109\\\",\\\"systemUUID\\\":\\\"d12f7ae2-c7c1-475e-a2cb-1f1e626e5071\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:38Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.640699 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.640731 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.640744 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.640762 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.640776 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:38Z","lastTransitionTime":"2025-11-27T06:53:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:38 crc kubenswrapper[4971]: E1127 06:53:38.656235 4971 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4602e4e9-64d4-4227-8212-1a84a264f109\\\",\\\"systemUUID\\\":\\\"d12f7ae2-c7c1-475e-a2cb-1f1e626e5071\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:38Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.660133 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.660202 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.660217 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.660234 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.660246 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:38Z","lastTransitionTime":"2025-11-27T06:53:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:38 crc kubenswrapper[4971]: E1127 06:53:38.674043 4971 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4602e4e9-64d4-4227-8212-1a84a264f109\\\",\\\"systemUUID\\\":\\\"d12f7ae2-c7c1-475e-a2cb-1f1e626e5071\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:38Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.677731 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.677834 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.677851 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.677871 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.677887 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:38Z","lastTransitionTime":"2025-11-27T06:53:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:38 crc kubenswrapper[4971]: E1127 06:53:38.692312 4971 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4602e4e9-64d4-4227-8212-1a84a264f109\\\",\\\"systemUUID\\\":\\\"d12f7ae2-c7c1-475e-a2cb-1f1e626e5071\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:38Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:38 crc kubenswrapper[4971]: E1127 06:53:38.692721 4971 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.694317 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.694344 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.694353 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.694366 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.694376 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:38Z","lastTransitionTime":"2025-11-27T06:53:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.796907 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.796952 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.796965 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.796981 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.796994 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:38Z","lastTransitionTime":"2025-11-27T06:53:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.899840 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.899884 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.899932 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.899952 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:38 crc kubenswrapper[4971]: I1127 06:53:38.899965 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:38Z","lastTransitionTime":"2025-11-27T06:53:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.002305 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.002348 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.002360 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.002378 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.002391 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:39Z","lastTransitionTime":"2025-11-27T06:53:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.105266 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.105313 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.105329 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.105351 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.105368 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:39Z","lastTransitionTime":"2025-11-27T06:53:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.207544 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.207599 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.207615 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.207641 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.207662 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:39Z","lastTransitionTime":"2025-11-27T06:53:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.310263 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.310341 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.310427 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.310504 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.310521 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:39Z","lastTransitionTime":"2025-11-27T06:53:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.414379 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.414463 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.414486 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.414524 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.414584 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:39Z","lastTransitionTime":"2025-11-27T06:53:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.516642 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.516708 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.516727 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.516756 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.516776 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:39Z","lastTransitionTime":"2025-11-27T06:53:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.619603 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.619646 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.619655 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.619670 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.619679 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:39Z","lastTransitionTime":"2025-11-27T06:53:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.722694 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.722748 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.722762 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.722786 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.722798 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:39Z","lastTransitionTime":"2025-11-27T06:53:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.824852 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.824897 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.824909 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.824927 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.824941 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:39Z","lastTransitionTime":"2025-11-27T06:53:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.927362 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.927401 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.927412 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.927428 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:39 crc kubenswrapper[4971]: I1127 06:53:39.927437 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:39Z","lastTransitionTime":"2025-11-27T06:53:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.030911 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.030986 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.031012 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.031041 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.031066 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:40Z","lastTransitionTime":"2025-11-27T06:53:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.134055 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.134118 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.134138 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.134163 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.134181 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:40Z","lastTransitionTime":"2025-11-27T06:53:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.237394 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.237437 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.237450 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.237466 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.237477 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:40Z","lastTransitionTime":"2025-11-27T06:53:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.340827 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.340863 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.340877 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.340891 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.340901 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:40Z","lastTransitionTime":"2025-11-27T06:53:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.444331 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.444380 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.444395 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.444421 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.444437 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:40Z","lastTransitionTime":"2025-11-27T06:53:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.546202 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.546242 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.546253 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.546269 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.546279 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:40Z","lastTransitionTime":"2025-11-27T06:53:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.549573 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.549602 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.549622 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.549661 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:53:40 crc kubenswrapper[4971]: E1127 06:53:40.549657 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:53:40 crc kubenswrapper[4971]: E1127 06:53:40.549742 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:53:40 crc kubenswrapper[4971]: E1127 06:53:40.549810 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:53:40 crc kubenswrapper[4971]: E1127 06:53:40.549882 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.647851 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.647894 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.647915 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.647931 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.647943 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:40Z","lastTransitionTime":"2025-11-27T06:53:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.750510 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.750594 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.750613 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.750638 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.750656 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:40Z","lastTransitionTime":"2025-11-27T06:53:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.854109 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.854177 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.854194 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.854217 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.854236 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:40Z","lastTransitionTime":"2025-11-27T06:53:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.956909 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.957021 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.957041 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.957058 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:40 crc kubenswrapper[4971]: I1127 06:53:40.957069 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:40Z","lastTransitionTime":"2025-11-27T06:53:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.059600 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.059676 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.059700 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.059731 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.059756 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:41Z","lastTransitionTime":"2025-11-27T06:53:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.162951 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.163003 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.163030 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.163061 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.163085 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:41Z","lastTransitionTime":"2025-11-27T06:53:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.266090 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.266130 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.266139 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.266154 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.266164 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:41Z","lastTransitionTime":"2025-11-27T06:53:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.369096 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.369144 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.369179 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.369239 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.369291 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:41Z","lastTransitionTime":"2025-11-27T06:53:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.472002 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.472086 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.472110 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.472145 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.472167 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:41Z","lastTransitionTime":"2025-11-27T06:53:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.576526 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.576600 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.576617 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.576639 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.576654 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:41Z","lastTransitionTime":"2025-11-27T06:53:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.680398 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.680482 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.680505 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.680575 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.680600 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:41Z","lastTransitionTime":"2025-11-27T06:53:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.783013 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.783061 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.783075 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.783092 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.783106 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:41Z","lastTransitionTime":"2025-11-27T06:53:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.886169 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.886215 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.886225 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.886246 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.886258 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:41Z","lastTransitionTime":"2025-11-27T06:53:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.991829 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.991927 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.991951 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.991984 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:41 crc kubenswrapper[4971]: I1127 06:53:41.992015 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:41Z","lastTransitionTime":"2025-11-27T06:53:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.096039 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.096143 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.096200 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.096240 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.096266 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:42Z","lastTransitionTime":"2025-11-27T06:53:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.200366 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.200423 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.200436 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.200457 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.200477 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:42Z","lastTransitionTime":"2025-11-27T06:53:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.304641 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.304694 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.304707 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.304729 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.304745 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:42Z","lastTransitionTime":"2025-11-27T06:53:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.408297 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.408362 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.408382 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.408414 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.408437 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:42Z","lastTransitionTime":"2025-11-27T06:53:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.511165 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.511240 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.511267 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.511300 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.511320 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:42Z","lastTransitionTime":"2025-11-27T06:53:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.551876 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.551876 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:53:42 crc kubenswrapper[4971]: E1127 06:53:42.553806 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.552006 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:53:42 crc kubenswrapper[4971]: E1127 06:53:42.553930 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.551927 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:53:42 crc kubenswrapper[4971]: E1127 06:53:42.553981 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.552906 4971 scope.go:117] "RemoveContainer" containerID="2f979974fc6abc523b9818905ba1547fe4da8a7c17e5a149838788bb91940a27" Nov 27 06:53:42 crc kubenswrapper[4971]: E1127 06:53:42.554043 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:53:42 crc kubenswrapper[4971]: E1127 06:53:42.554408 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-56nwb_openshift-ovn-kubernetes(47c0abbf-6e9f-4bca-b3ca-bd896be15f2c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.565030 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8c5d5ac-1172-4d70-95cc-09a985cdccf9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://233f958888e7af4cbaf18ca0b743ae116a205a5d250fcb5d8d25c6c33cc6edfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be990eb8e6023cc976afaf7edaba3eed7bf119dd4b31378671564feaf9cdd5c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0636fb6663c0b29767d749d85154ba8c5707c5022f211d815895bfc844d6d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5822703375f22f8adb588a790d4f00e78b8a2cd8fe3f902fe4732a133511d63a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5822703375f22f8adb588a790d4f00e78b8a2cd8fe3f902fe4732a133511d63a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:42Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.585088 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5be4571b65d35e7a2d2888f250e6cf6e0c105b598047e2236b3ec90bbdbee5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:42Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.613404 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f979974fc6abc523b9818905ba1547fe4da8a7c17e5a149838788bb91940a27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f979974fc6abc523b9818905ba1547fe4da8a7c17e5a149838788bb91940a27\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-27T06:53:29Z\\\",\\\"message\\\":\\\"ncer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1127 06:53:29.268703 6621 services_controller.go:451] Built service openshift-apiserver-operator/metrics cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-apiserver-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-apiserver-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.38\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1127 06:53:29.268155 6621 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1127 06:53:29.267856 6621 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/machine-api-operator-machine-webhook]} name:Service_openshift-machine-api/machine-api-operator-machine-webhook_TCP_cluster options:{GoMap:map[event:f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:28Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-56nwb_openshift-ovn-kubernetes(47c0abbf-6e9f-4bca-b3ca-bd896be15f2c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-56nwb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:42Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.614601 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.614647 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.614662 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.614685 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.614701 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:42Z","lastTransitionTime":"2025-11-27T06:53:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.625114 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nxjns" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22ca58c-5169-4ae7-ac50-a6cfb48fa334\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237653db3d61407da4c98734dc9cc8e349e67fabab8ad7fde3ceda2cf15e2bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf345d1f97fcf7d514b9bb82e0cf811761950ad9bfe627b738b5271261ee0aa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nxjns\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:42Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.645842 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"ed a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\"\\\\nI1127 06:53:01.687702 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1127 06:53:02.478794 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1127 06:53:02.478820 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1127 06:53:02.478850 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1127 06:53:02.478856 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1127 06:53:02.511358 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1127 06:53:02.511475 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511499 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511521 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1127 06:53:02.511559 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1127 06:53:02.511581 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1127 06:53:02.511603 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1127 06:53:02.511767 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1127 06:53:02.517623 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764226365\\\\\\\\\\\\\\\" (2025-11-27 06:52:45 +0000 UTC to 2025-12-27 06:52:46 +0000 UTC (now=2025-11-27 06:53:02.517594908 +0000 UTC))\\\\\\\"\\\\nF1127 06:53:02.517671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:42Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.658993 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:42Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.681561 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c02f3806e673886030fe506e4903bfa7f383bdc7130e4f3d251b5851cb99fb52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d1fb1c5f4949857e54218b78eb7d2a695aa1baf7fe0d4e26888cad2e2cdb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:42Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.696473 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:42Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.708502 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://673830b15fa2749a267b5b4e1116f8d7c2cbe419b963a87da48dfa8fe9eaf8c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdz6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:42Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.718418 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.719038 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.719215 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.719371 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.719571 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:42Z","lastTransitionTime":"2025-11-27T06:53:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.719519 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pdn5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e710c835-f600-448f-a110-4ff4cef9d5f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rj8cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rj8cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:16Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pdn5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:42Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.733476 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:42Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.747813 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db8ead1e3b35d86c894bb1067d64e31f7ac2ec518180db02176574eb245fafb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:42Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.764685 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://239d7a81aa3003faaae51b9e8f67d4ba1495328340befcc2b962804e376c9c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w2fnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:42Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.779245 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lr9p6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5609ab7b327e7ec603c40b9b27f4ee52b4b49e647d0c6e9db5d360bf686c493c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5pmn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lr9p6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:42Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.793390 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckzrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc0de2fe198ee71bf786a69fa44fa2bd1c0d29e16391fdafd96bb6b062f6f6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xx7nr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:05Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckzrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:42Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.818833 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f110061b-13b1-4eae-b031-1f7899d7728d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6e3b39e688e045045a2a5b02dd0c3c7e7567657e0a9dff4d16f4f818e9e984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0568171255f8d7e993c025aece8b10eb0303efde25bc85b46f3b4b2c014ec66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ffb860fe1ce8cfa20be499da1932b4c5286cfdcc70418e7cdf5785cf746254c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dda9554712ec04cb4c9616ee62cb0e5e2cf106ba9c63ab411fb07fb892a1897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c09241254c68f6a129398092fe87bda15b581be840a5cf4034551b5f881042b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:42Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.823473 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.823508 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.823520 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.823557 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.823571 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:42Z","lastTransitionTime":"2025-11-27T06:53:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.833338 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:42Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.848204 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0609bb483c2769bba8b86b1692fd22e002dc73a3655a931c4bc4f7f378b9649f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:42Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.926525 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.926622 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.926639 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.926665 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:42 crc kubenswrapper[4971]: I1127 06:53:42.926682 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:42Z","lastTransitionTime":"2025-11-27T06:53:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.030975 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.031035 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.031052 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.031080 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.031104 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:43Z","lastTransitionTime":"2025-11-27T06:53:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.134784 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.134862 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.134877 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.134897 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.134910 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:43Z","lastTransitionTime":"2025-11-27T06:53:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.237386 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.237435 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.237447 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.237464 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.237477 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:43Z","lastTransitionTime":"2025-11-27T06:53:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.340050 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.340086 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.340104 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.340120 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.340131 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:43Z","lastTransitionTime":"2025-11-27T06:53:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.442903 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.442965 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.442978 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.443001 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.443015 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:43Z","lastTransitionTime":"2025-11-27T06:53:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.546378 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.546431 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.546444 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.546464 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.546479 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:43Z","lastTransitionTime":"2025-11-27T06:53:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.649925 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.649968 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.649982 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.649999 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.650009 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:43Z","lastTransitionTime":"2025-11-27T06:53:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.752559 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.752594 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.752614 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.752632 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.752642 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:43Z","lastTransitionTime":"2025-11-27T06:53:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.855733 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.855787 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.855797 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.855814 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.855824 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:43Z","lastTransitionTime":"2025-11-27T06:53:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.960920 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.960958 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.960967 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.960982 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:43 crc kubenswrapper[4971]: I1127 06:53:43.960991 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:43Z","lastTransitionTime":"2025-11-27T06:53:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.063460 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.063513 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.063522 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.063555 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.063565 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:44Z","lastTransitionTime":"2025-11-27T06:53:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.166587 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.166638 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.166649 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.166666 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.166710 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:44Z","lastTransitionTime":"2025-11-27T06:53:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.269156 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.269207 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.269220 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.269241 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.269257 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:44Z","lastTransitionTime":"2025-11-27T06:53:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.372890 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.372937 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.372950 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.372970 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.372984 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:44Z","lastTransitionTime":"2025-11-27T06:53:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.475069 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.475119 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.475129 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.475145 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.475156 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:44Z","lastTransitionTime":"2025-11-27T06:53:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.550034 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.550095 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.550046 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.550646 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:53:44 crc kubenswrapper[4971]: E1127 06:53:44.550737 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:53:44 crc kubenswrapper[4971]: E1127 06:53:44.550960 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:53:44 crc kubenswrapper[4971]: E1127 06:53:44.551230 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:53:44 crc kubenswrapper[4971]: E1127 06:53:44.551282 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.577926 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.577967 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.577981 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.578003 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.578015 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:44Z","lastTransitionTime":"2025-11-27T06:53:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.680217 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.680266 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.680280 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.680299 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.680313 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:44Z","lastTransitionTime":"2025-11-27T06:53:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.782238 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.782272 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.782282 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.782296 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.782305 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:44Z","lastTransitionTime":"2025-11-27T06:53:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.884868 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.884897 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.884905 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.884918 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.884926 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:44Z","lastTransitionTime":"2025-11-27T06:53:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.989118 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.989168 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.989179 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.989194 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:44 crc kubenswrapper[4971]: I1127 06:53:44.989207 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:44Z","lastTransitionTime":"2025-11-27T06:53:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.092154 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.092204 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.092220 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.092238 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.092253 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:45Z","lastTransitionTime":"2025-11-27T06:53:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.195757 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.195834 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.195864 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.195898 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.195919 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:45Z","lastTransitionTime":"2025-11-27T06:53:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.300338 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.300413 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.300432 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.300461 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.300480 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:45Z","lastTransitionTime":"2025-11-27T06:53:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.404524 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.404619 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.404644 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.404678 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.404706 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:45Z","lastTransitionTime":"2025-11-27T06:53:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.509085 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.509179 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.509214 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.509252 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.509279 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:45Z","lastTransitionTime":"2025-11-27T06:53:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.612697 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.612752 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.612767 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.612851 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.612912 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:45Z","lastTransitionTime":"2025-11-27T06:53:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.715986 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.716029 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.716038 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.716054 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.716067 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:45Z","lastTransitionTime":"2025-11-27T06:53:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.819131 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.819195 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.819208 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.819229 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.819245 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:45Z","lastTransitionTime":"2025-11-27T06:53:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.921913 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.921993 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.922036 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.922091 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:45 crc kubenswrapper[4971]: I1127 06:53:45.922106 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:45Z","lastTransitionTime":"2025-11-27T06:53:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.024988 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.025063 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.025081 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.025115 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.025133 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:46Z","lastTransitionTime":"2025-11-27T06:53:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.127593 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.127645 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.127659 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.127677 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.127690 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:46Z","lastTransitionTime":"2025-11-27T06:53:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.231398 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.231460 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.231473 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.231492 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.231502 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:46Z","lastTransitionTime":"2025-11-27T06:53:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.334000 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.334054 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.334063 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.334079 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.334090 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:46Z","lastTransitionTime":"2025-11-27T06:53:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.436515 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.436577 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.436586 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.436601 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.436610 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:46Z","lastTransitionTime":"2025-11-27T06:53:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.540441 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.540596 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.540624 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.540683 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.540708 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:46Z","lastTransitionTime":"2025-11-27T06:53:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.549582 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:53:46 crc kubenswrapper[4971]: E1127 06:53:46.549727 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.549808 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.549847 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:53:46 crc kubenswrapper[4971]: E1127 06:53:46.549940 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.549961 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:53:46 crc kubenswrapper[4971]: E1127 06:53:46.550019 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:53:46 crc kubenswrapper[4971]: E1127 06:53:46.550138 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.644917 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.644977 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.644996 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.645023 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.645041 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:46Z","lastTransitionTime":"2025-11-27T06:53:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.748034 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.748120 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.748132 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.748156 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.748169 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:46Z","lastTransitionTime":"2025-11-27T06:53:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.850257 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.850307 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.850324 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.850346 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.850364 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:46Z","lastTransitionTime":"2025-11-27T06:53:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.953268 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.953334 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.953349 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.953366 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:46 crc kubenswrapper[4971]: I1127 06:53:46.953379 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:46Z","lastTransitionTime":"2025-11-27T06:53:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.055868 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.055973 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.055995 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.056026 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.056049 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:47Z","lastTransitionTime":"2025-11-27T06:53:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.158699 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.158743 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.158757 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.158776 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.158789 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:47Z","lastTransitionTime":"2025-11-27T06:53:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.261379 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.261428 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.261442 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.261461 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.261474 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:47Z","lastTransitionTime":"2025-11-27T06:53:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.363668 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.363732 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.363755 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.363785 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.363812 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:47Z","lastTransitionTime":"2025-11-27T06:53:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.466484 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.466551 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.466561 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.466583 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.466592 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:47Z","lastTransitionTime":"2025-11-27T06:53:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.568976 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.569331 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.569473 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.569617 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.569864 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:47Z","lastTransitionTime":"2025-11-27T06:53:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.671653 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.671701 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.671714 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.671729 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.671739 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:47Z","lastTransitionTime":"2025-11-27T06:53:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.774396 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.774437 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.774447 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.774461 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.774470 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:47Z","lastTransitionTime":"2025-11-27T06:53:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.876759 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.877120 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.877287 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.877437 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.877737 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:47Z","lastTransitionTime":"2025-11-27T06:53:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.980076 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.980110 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.980119 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.980132 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:47 crc kubenswrapper[4971]: I1127 06:53:47.980142 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:47Z","lastTransitionTime":"2025-11-27T06:53:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.082242 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.082522 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.082642 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.082716 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.082774 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:48Z","lastTransitionTime":"2025-11-27T06:53:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.185775 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.186086 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.186243 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.186522 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.186801 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:48Z","lastTransitionTime":"2025-11-27T06:53:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.290617 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.290706 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.290731 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.290772 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.290802 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:48Z","lastTransitionTime":"2025-11-27T06:53:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.393660 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.393739 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.393765 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.393797 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.393818 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:48Z","lastTransitionTime":"2025-11-27T06:53:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.497104 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.497180 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.497226 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.497255 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.497273 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:48Z","lastTransitionTime":"2025-11-27T06:53:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.549912 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.549939 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:48 crc kubenswrapper[4971]: E1127 06:53:48.550071 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.550121 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.550176 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:53:48 crc kubenswrapper[4971]: E1127 06:53:48.550324 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:53:48 crc kubenswrapper[4971]: E1127 06:53:48.550667 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:53:48 crc kubenswrapper[4971]: E1127 06:53:48.550809 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.600146 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.600190 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.600199 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.600214 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.600225 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:48Z","lastTransitionTime":"2025-11-27T06:53:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.703092 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.703147 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.703160 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.703178 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.703191 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:48Z","lastTransitionTime":"2025-11-27T06:53:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.707010 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e710c835-f600-448f-a110-4ff4cef9d5f6-metrics-certs\") pod \"network-metrics-daemon-pdn5j\" (UID: \"e710c835-f600-448f-a110-4ff4cef9d5f6\") " pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:53:48 crc kubenswrapper[4971]: E1127 06:53:48.707175 4971 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 27 06:53:48 crc kubenswrapper[4971]: E1127 06:53:48.707247 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e710c835-f600-448f-a110-4ff4cef9d5f6-metrics-certs podName:e710c835-f600-448f-a110-4ff4cef9d5f6 nodeName:}" failed. No retries permitted until 2025-11-27 06:54:20.707226155 +0000 UTC m=+98.899270073 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e710c835-f600-448f-a110-4ff4cef9d5f6-metrics-certs") pod "network-metrics-daemon-pdn5j" (UID: "e710c835-f600-448f-a110-4ff4cef9d5f6") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.806453 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.806960 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.806987 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.807097 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.807125 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:48Z","lastTransitionTime":"2025-11-27T06:53:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.813526 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.813631 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.813652 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.813673 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.813689 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:48Z","lastTransitionTime":"2025-11-27T06:53:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:48 crc kubenswrapper[4971]: E1127 06:53:48.833951 4971 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4602e4e9-64d4-4227-8212-1a84a264f109\\\",\\\"systemUUID\\\":\\\"d12f7ae2-c7c1-475e-a2cb-1f1e626e5071\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:48Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.840793 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.840881 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.840910 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.840947 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.840970 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:48Z","lastTransitionTime":"2025-11-27T06:53:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:48 crc kubenswrapper[4971]: E1127 06:53:48.862464 4971 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4602e4e9-64d4-4227-8212-1a84a264f109\\\",\\\"systemUUID\\\":\\\"d12f7ae2-c7c1-475e-a2cb-1f1e626e5071\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:48Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.867746 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.867823 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.867853 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.867887 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.867913 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:48Z","lastTransitionTime":"2025-11-27T06:53:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:48 crc kubenswrapper[4971]: E1127 06:53:48.887442 4971 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4602e4e9-64d4-4227-8212-1a84a264f109\\\",\\\"systemUUID\\\":\\\"d12f7ae2-c7c1-475e-a2cb-1f1e626e5071\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:48Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.891817 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.891902 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.891923 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.891954 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.891978 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:48Z","lastTransitionTime":"2025-11-27T06:53:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:48 crc kubenswrapper[4971]: E1127 06:53:48.905005 4971 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4602e4e9-64d4-4227-8212-1a84a264f109\\\",\\\"systemUUID\\\":\\\"d12f7ae2-c7c1-475e-a2cb-1f1e626e5071\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:48Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.909744 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.909792 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.909808 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.909834 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.909861 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:48Z","lastTransitionTime":"2025-11-27T06:53:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:48 crc kubenswrapper[4971]: E1127 06:53:48.923873 4971 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4602e4e9-64d4-4227-8212-1a84a264f109\\\",\\\"systemUUID\\\":\\\"d12f7ae2-c7c1-475e-a2cb-1f1e626e5071\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:48Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:48 crc kubenswrapper[4971]: E1127 06:53:48.924091 4971 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.926582 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.926625 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.926638 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.926658 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:48 crc kubenswrapper[4971]: I1127 06:53:48.926671 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:48Z","lastTransitionTime":"2025-11-27T06:53:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.030090 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.030136 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.030149 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.030167 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.030177 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:49Z","lastTransitionTime":"2025-11-27T06:53:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.132832 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.132881 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.132893 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.132914 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.132926 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:49Z","lastTransitionTime":"2025-11-27T06:53:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.235497 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.235572 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.235585 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.235602 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.235613 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:49Z","lastTransitionTime":"2025-11-27T06:53:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.337957 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.337992 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.338002 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.338019 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.338027 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:49Z","lastTransitionTime":"2025-11-27T06:53:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.441060 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.441152 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.441166 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.441186 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.441202 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:49Z","lastTransitionTime":"2025-11-27T06:53:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.544562 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.544632 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.544644 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.544662 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.544674 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:49Z","lastTransitionTime":"2025-11-27T06:53:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.647070 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.647140 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.647159 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.647184 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.647203 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:49Z","lastTransitionTime":"2025-11-27T06:53:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.751205 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.751270 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.751288 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.751321 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.751342 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:49Z","lastTransitionTime":"2025-11-27T06:53:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.854326 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.854372 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.854380 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.854396 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.854407 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:49Z","lastTransitionTime":"2025-11-27T06:53:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.957108 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.957144 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.957153 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.957170 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:49 crc kubenswrapper[4971]: I1127 06:53:49.957181 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:49Z","lastTransitionTime":"2025-11-27T06:53:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.059603 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.059679 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.059692 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.059715 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.059730 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:50Z","lastTransitionTime":"2025-11-27T06:53:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.162439 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.162480 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.162488 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.162522 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.162557 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:50Z","lastTransitionTime":"2025-11-27T06:53:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.265035 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.265083 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.265096 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.265112 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.265125 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:50Z","lastTransitionTime":"2025-11-27T06:53:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.368334 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.368393 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.368409 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.368432 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.368448 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:50Z","lastTransitionTime":"2025-11-27T06:53:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.471699 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.471749 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.471789 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.471809 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.471822 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:50Z","lastTransitionTime":"2025-11-27T06:53:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.549579 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.549719 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:50 crc kubenswrapper[4971]: E1127 06:53:50.550304 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.550368 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.550331 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:53:50 crc kubenswrapper[4971]: E1127 06:53:50.551141 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:53:50 crc kubenswrapper[4971]: E1127 06:53:50.551603 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:53:50 crc kubenswrapper[4971]: E1127 06:53:50.551720 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.566104 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.574809 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.574857 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.574874 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.574897 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.574915 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:50Z","lastTransitionTime":"2025-11-27T06:53:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.677771 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.677842 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.677859 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.677883 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.677902 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:50Z","lastTransitionTime":"2025-11-27T06:53:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.780840 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.780884 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.780897 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.780916 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.780928 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:50Z","lastTransitionTime":"2025-11-27T06:53:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.883428 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.883471 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.883481 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.883498 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.883509 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:50Z","lastTransitionTime":"2025-11-27T06:53:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.985659 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.985719 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.985738 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.985764 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:50 crc kubenswrapper[4971]: I1127 06:53:50.985784 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:50Z","lastTransitionTime":"2025-11-27T06:53:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.088762 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.088864 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.088881 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.088901 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.088916 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:51Z","lastTransitionTime":"2025-11-27T06:53:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.191083 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.191125 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.191136 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.191151 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.191165 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:51Z","lastTransitionTime":"2025-11-27T06:53:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.293333 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.293375 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.293388 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.293405 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.293417 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:51Z","lastTransitionTime":"2025-11-27T06:53:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.395875 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.395940 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.395951 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.395975 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.395987 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:51Z","lastTransitionTime":"2025-11-27T06:53:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.498473 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.498522 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.498600 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.498621 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.498636 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:51Z","lastTransitionTime":"2025-11-27T06:53:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.600460 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.600496 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.600507 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.600524 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.600587 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:51Z","lastTransitionTime":"2025-11-27T06:53:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.702928 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.703016 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.703033 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.703055 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.703068 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:51Z","lastTransitionTime":"2025-11-27T06:53:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.804886 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.804924 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.804933 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.804948 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.804957 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:51Z","lastTransitionTime":"2025-11-27T06:53:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.907804 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.907872 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.907885 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.907900 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:51 crc kubenswrapper[4971]: I1127 06:53:51.907912 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:51Z","lastTransitionTime":"2025-11-27T06:53:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.009900 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.009942 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.009952 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.009969 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.009985 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:52Z","lastTransitionTime":"2025-11-27T06:53:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.113021 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.113063 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.113071 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.113086 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.113097 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:52Z","lastTransitionTime":"2025-11-27T06:53:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.215403 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.215448 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.215460 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.215477 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.215488 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:52Z","lastTransitionTime":"2025-11-27T06:53:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.318615 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.318664 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.318676 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.318694 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.318712 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:52Z","lastTransitionTime":"2025-11-27T06:53:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.421214 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.421267 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.421277 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.421297 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.421310 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:52Z","lastTransitionTime":"2025-11-27T06:53:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.523750 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.523807 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.523823 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.523845 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.523859 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:52Z","lastTransitionTime":"2025-11-27T06:53:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.549954 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.550034 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.550063 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.550069 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:53:52 crc kubenswrapper[4971]: E1127 06:53:52.550469 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:53:52 crc kubenswrapper[4971]: E1127 06:53:52.550688 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:53:52 crc kubenswrapper[4971]: E1127 06:53:52.550754 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:53:52 crc kubenswrapper[4971]: E1127 06:53:52.550837 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.560544 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0c2c36-71a4-49a3-b0a3-cd73bc42b9b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a1ad326792f4efe7ca0bbe3cb729cdb3b4047e1d142b95f425b47fd07250a7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47e46ba3596addbd137a1dfaa457e183bd7543340d5a0a85b61ee97f7c1a6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47e46ba3596addbd137a1dfaa457e183bd7543340d5a0a85b61ee97f7c1a6cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:52Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.570929 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8c5d5ac-1172-4d70-95cc-09a985cdccf9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://233f958888e7af4cbaf18ca0b743ae116a205a5d250fcb5d8d25c6c33cc6edfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be990eb8e6023cc976afaf7edaba3eed7bf119dd4b31378671564feaf9cdd5c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0636fb6663c0b29767d749d85154ba8c5707c5022f211d815895bfc844d6d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5822703375f22f8adb588a790d4f00e78b8a2cd8fe3f902fe4732a133511d63a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5822703375f22f8adb588a790d4f00e78b8a2cd8fe3f902fe4732a133511d63a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:52Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.585378 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5be4571b65d35e7a2d2888f250e6cf6e0c105b598047e2236b3ec90bbdbee5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:52Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.604481 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f979974fc6abc523b9818905ba1547fe4da8a7c17e5a149838788bb91940a27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f979974fc6abc523b9818905ba1547fe4da8a7c17e5a149838788bb91940a27\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-27T06:53:29Z\\\",\\\"message\\\":\\\"ncer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1127 06:53:29.268703 6621 services_controller.go:451] Built service openshift-apiserver-operator/metrics cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-apiserver-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-apiserver-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.38\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1127 06:53:29.268155 6621 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1127 06:53:29.267856 6621 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/machine-api-operator-machine-webhook]} name:Service_openshift-machine-api/machine-api-operator-machine-webhook_TCP_cluster options:{GoMap:map[event:f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:28Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-56nwb_openshift-ovn-kubernetes(47c0abbf-6e9f-4bca-b3ca-bd896be15f2c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-56nwb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:52Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.615229 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nxjns" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22ca58c-5169-4ae7-ac50-a6cfb48fa334\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237653db3d61407da4c98734dc9cc8e349e67fabab8ad7fde3ceda2cf15e2bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf345d1f97fcf7d514b9bb82e0cf811761950ad9bfe627b738b5271261ee0aa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nxjns\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:52Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.626809 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.626865 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.626878 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.626897 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.626912 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:52Z","lastTransitionTime":"2025-11-27T06:53:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.629574 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"ed a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\"\\\\nI1127 06:53:01.687702 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1127 06:53:02.478794 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1127 06:53:02.478820 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1127 06:53:02.478850 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1127 06:53:02.478856 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1127 06:53:02.511358 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1127 06:53:02.511475 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511499 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511521 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1127 06:53:02.511559 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1127 06:53:02.511581 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1127 06:53:02.511603 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1127 06:53:02.511767 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1127 06:53:02.517623 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764226365\\\\\\\\\\\\\\\" (2025-11-27 06:52:45 +0000 UTC to 2025-12-27 06:52:46 +0000 UTC (now=2025-11-27 06:53:02.517594908 +0000 UTC))\\\\\\\"\\\\nF1127 06:53:02.517671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:52Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.642582 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:52Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.655469 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c02f3806e673886030fe506e4903bfa7f383bdc7130e4f3d251b5851cb99fb52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d1fb1c5f4949857e54218b78eb7d2a695aa1baf7fe0d4e26888cad2e2cdb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:52Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.666876 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:52Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.676663 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://673830b15fa2749a267b5b4e1116f8d7c2cbe419b963a87da48dfa8fe9eaf8c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdz6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:52Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.686229 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:52Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.695247 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db8ead1e3b35d86c894bb1067d64e31f7ac2ec518180db02176574eb245fafb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:52Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.707846 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://239d7a81aa3003faaae51b9e8f67d4ba1495328340befcc2b962804e376c9c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w2fnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:52Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.719849 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lr9p6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5609ab7b327e7ec603c40b9b27f4ee52b4b49e647d0c6e9db5d360bf686c493c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5pmn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lr9p6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:52Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.729067 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.729312 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.729430 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.729615 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.729730 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:52Z","lastTransitionTime":"2025-11-27T06:53:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.730929 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckzrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc0de2fe198ee71bf786a69fa44fa2bd1c0d29e16391fdafd96bb6b062f6f6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xx7nr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:05Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckzrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:52Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.741647 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pdn5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e710c835-f600-448f-a110-4ff4cef9d5f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rj8cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rj8cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:16Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pdn5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:52Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.758642 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f110061b-13b1-4eae-b031-1f7899d7728d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6e3b39e688e045045a2a5b02dd0c3c7e7567657e0a9dff4d16f4f818e9e984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0568171255f8d7e993c025aece8b10eb0303efde25bc85b46f3b4b2c014ec66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ffb860fe1ce8cfa20be499da1932b4c5286cfdcc70418e7cdf5785cf746254c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dda9554712ec04cb4c9616ee62cb0e5e2cf106ba9c63ab411fb07fb892a1897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c09241254c68f6a129398092fe87bda15b581be840a5cf4034551b5f881042b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:52Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.770010 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lr9p6_a2136014-aa8f-48e5-bccf-64cdd3cbc5f9/kube-multus/0.log" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.770068 4971 generic.go:334] "Generic (PLEG): container finished" podID="a2136014-aa8f-48e5-bccf-64cdd3cbc5f9" containerID="5609ab7b327e7ec603c40b9b27f4ee52b4b49e647d0c6e9db5d360bf686c493c" exitCode=1 Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.770099 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-lr9p6" event={"ID":"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9","Type":"ContainerDied","Data":"5609ab7b327e7ec603c40b9b27f4ee52b4b49e647d0c6e9db5d360bf686c493c"} Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.770464 4971 scope.go:117] "RemoveContainer" containerID="5609ab7b327e7ec603c40b9b27f4ee52b4b49e647d0c6e9db5d360bf686c493c" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.772067 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:52Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.782959 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0609bb483c2769bba8b86b1692fd22e002dc73a3655a931c4bc4f7f378b9649f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:52Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.799917 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f110061b-13b1-4eae-b031-1f7899d7728d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6e3b39e688e045045a2a5b02dd0c3c7e7567657e0a9dff4d16f4f818e9e984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0568171255f8d7e993c025aece8b10eb0303efde25bc85b46f3b4b2c014ec66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ffb860fe1ce8cfa20be499da1932b4c5286cfdcc70418e7cdf5785cf746254c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dda9554712ec04cb4c9616ee62cb0e5e2cf106ba9c63ab411fb07fb892a1897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c09241254c68f6a129398092fe87bda15b581be840a5cf4034551b5f881042b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:52Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.820226 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:52Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.831202 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0609bb483c2769bba8b86b1692fd22e002dc73a3655a931c4bc4f7f378b9649f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:52Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.832714 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.832746 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.832756 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.832772 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.832784 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:52Z","lastTransitionTime":"2025-11-27T06:53:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.845752 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0c2c36-71a4-49a3-b0a3-cd73bc42b9b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a1ad326792f4efe7ca0bbe3cb729cdb3b4047e1d142b95f425b47fd07250a7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47e46ba3596addbd137a1dfaa457e183bd7543340d5a0a85b61ee97f7c1a6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47e46ba3596addbd137a1dfaa457e183bd7543340d5a0a85b61ee97f7c1a6cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:52Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.862076 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8c5d5ac-1172-4d70-95cc-09a985cdccf9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://233f958888e7af4cbaf18ca0b743ae116a205a5d250fcb5d8d25c6c33cc6edfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be990eb8e6023cc976afaf7edaba3eed7bf119dd4b31378671564feaf9cdd5c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0636fb6663c0b29767d749d85154ba8c5707c5022f211d815895bfc844d6d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5822703375f22f8adb588a790d4f00e78b8a2cd8fe3f902fe4732a133511d63a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5822703375f22f8adb588a790d4f00e78b8a2cd8fe3f902fe4732a133511d63a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:52Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.875995 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5be4571b65d35e7a2d2888f250e6cf6e0c105b598047e2236b3ec90bbdbee5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:52Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.896929 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f979974fc6abc523b9818905ba1547fe4da8a7c17e5a149838788bb91940a27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f979974fc6abc523b9818905ba1547fe4da8a7c17e5a149838788bb91940a27\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-27T06:53:29Z\\\",\\\"message\\\":\\\"ncer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1127 06:53:29.268703 6621 services_controller.go:451] Built service openshift-apiserver-operator/metrics cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-apiserver-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-apiserver-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.38\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1127 06:53:29.268155 6621 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1127 06:53:29.267856 6621 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/machine-api-operator-machine-webhook]} name:Service_openshift-machine-api/machine-api-operator-machine-webhook_TCP_cluster options:{GoMap:map[event:f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:28Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-56nwb_openshift-ovn-kubernetes(47c0abbf-6e9f-4bca-b3ca-bd896be15f2c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-56nwb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:52Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.908309 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nxjns" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22ca58c-5169-4ae7-ac50-a6cfb48fa334\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237653db3d61407da4c98734dc9cc8e349e67fabab8ad7fde3ceda2cf15e2bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf345d1f97fcf7d514b9bb82e0cf811761950ad9bfe627b738b5271261ee0aa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nxjns\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:52Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.921161 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"ed a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\"\\\\nI1127 06:53:01.687702 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1127 06:53:02.478794 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1127 06:53:02.478820 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1127 06:53:02.478850 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1127 06:53:02.478856 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1127 06:53:02.511358 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1127 06:53:02.511475 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511499 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511521 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1127 06:53:02.511559 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1127 06:53:02.511581 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1127 06:53:02.511603 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1127 06:53:02.511767 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1127 06:53:02.517623 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764226365\\\\\\\\\\\\\\\" (2025-11-27 06:52:45 +0000 UTC to 2025-12-27 06:52:46 +0000 UTC (now=2025-11-27 06:53:02.517594908 +0000 UTC))\\\\\\\"\\\\nF1127 06:53:02.517671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:52Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.934230 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:52Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.934617 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.934669 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.934681 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.934699 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.934712 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:52Z","lastTransitionTime":"2025-11-27T06:53:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.948340 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c02f3806e673886030fe506e4903bfa7f383bdc7130e4f3d251b5851cb99fb52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d1fb1c5f4949857e54218b78eb7d2a695aa1baf7fe0d4e26888cad2e2cdb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:52Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.958852 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:52Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.969417 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://673830b15fa2749a267b5b4e1116f8d7c2cbe419b963a87da48dfa8fe9eaf8c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdz6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:52Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.980164 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:52Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:52 crc kubenswrapper[4971]: I1127 06:53:52.988818 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db8ead1e3b35d86c894bb1067d64e31f7ac2ec518180db02176574eb245fafb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:52Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.001178 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://239d7a81aa3003faaae51b9e8f67d4ba1495328340befcc2b962804e376c9c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w2fnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:52Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.013047 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lr9p6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5609ab7b327e7ec603c40b9b27f4ee52b4b49e647d0c6e9db5d360bf686c493c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5609ab7b327e7ec603c40b9b27f4ee52b4b49e647d0c6e9db5d360bf686c493c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-27T06:53:52Z\\\",\\\"message\\\":\\\"2025-11-27T06:53:07+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_94bc62df-014f-47cf-be41-454f276551dd\\\\n2025-11-27T06:53:07+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_94bc62df-014f-47cf-be41-454f276551dd to /host/opt/cni/bin/\\\\n2025-11-27T06:53:07Z [verbose] multus-daemon started\\\\n2025-11-27T06:53:07Z [verbose] Readiness Indicator file check\\\\n2025-11-27T06:53:52Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5pmn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lr9p6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:53Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.022020 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckzrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc0de2fe198ee71bf786a69fa44fa2bd1c0d29e16391fdafd96bb6b062f6f6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xx7nr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:05Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckzrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:53Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.030344 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pdn5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e710c835-f600-448f-a110-4ff4cef9d5f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rj8cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rj8cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:16Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pdn5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:53Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.037145 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.037187 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.037200 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.037219 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.037231 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:53Z","lastTransitionTime":"2025-11-27T06:53:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.139624 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.139669 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.139677 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.139694 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.139703 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:53Z","lastTransitionTime":"2025-11-27T06:53:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.241897 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.241944 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.241957 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.241975 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.241990 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:53Z","lastTransitionTime":"2025-11-27T06:53:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.344821 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.344866 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.344875 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.344890 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.344900 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:53Z","lastTransitionTime":"2025-11-27T06:53:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.447208 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.447249 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.447259 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.447275 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.447286 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:53Z","lastTransitionTime":"2025-11-27T06:53:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.549282 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.549336 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.549353 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.549379 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.549402 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:53Z","lastTransitionTime":"2025-11-27T06:53:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.652023 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.652069 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.652083 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.652104 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.652120 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:53Z","lastTransitionTime":"2025-11-27T06:53:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.754482 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.754522 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.754563 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.754580 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.754591 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:53Z","lastTransitionTime":"2025-11-27T06:53:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.773922 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lr9p6_a2136014-aa8f-48e5-bccf-64cdd3cbc5f9/kube-multus/0.log" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.773975 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-lr9p6" event={"ID":"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9","Type":"ContainerStarted","Data":"a2536efee5c523594e02d64a0d1113af4945a84c32d06c78f840ef292143578e"} Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.792977 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f110061b-13b1-4eae-b031-1f7899d7728d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6e3b39e688e045045a2a5b02dd0c3c7e7567657e0a9dff4d16f4f818e9e984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0568171255f8d7e993c025aece8b10eb0303efde25bc85b46f3b4b2c014ec66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ffb860fe1ce8cfa20be499da1932b4c5286cfdcc70418e7cdf5785cf746254c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dda9554712ec04cb4c9616ee62cb0e5e2cf106ba9c63ab411fb07fb892a1897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c09241254c68f6a129398092fe87bda15b581be840a5cf4034551b5f881042b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:53Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.805258 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:53Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.817656 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0609bb483c2769bba8b86b1692fd22e002dc73a3655a931c4bc4f7f378b9649f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:53Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.829062 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0c2c36-71a4-49a3-b0a3-cd73bc42b9b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a1ad326792f4efe7ca0bbe3cb729cdb3b4047e1d142b95f425b47fd07250a7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47e46ba3596addbd137a1dfaa457e183bd7543340d5a0a85b61ee97f7c1a6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47e46ba3596addbd137a1dfaa457e183bd7543340d5a0a85b61ee97f7c1a6cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:53Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.840373 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8c5d5ac-1172-4d70-95cc-09a985cdccf9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://233f958888e7af4cbaf18ca0b743ae116a205a5d250fcb5d8d25c6c33cc6edfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be990eb8e6023cc976afaf7edaba3eed7bf119dd4b31378671564feaf9cdd5c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0636fb6663c0b29767d749d85154ba8c5707c5022f211d815895bfc844d6d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5822703375f22f8adb588a790d4f00e78b8a2cd8fe3f902fe4732a133511d63a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5822703375f22f8adb588a790d4f00e78b8a2cd8fe3f902fe4732a133511d63a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:53Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.856398 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.856436 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.856447 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.856464 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.856485 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:53Z","lastTransitionTime":"2025-11-27T06:53:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.859344 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5be4571b65d35e7a2d2888f250e6cf6e0c105b598047e2236b3ec90bbdbee5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:53Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.876724 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f979974fc6abc523b9818905ba1547fe4da8a7c17e5a149838788bb91940a27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f979974fc6abc523b9818905ba1547fe4da8a7c17e5a149838788bb91940a27\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-27T06:53:29Z\\\",\\\"message\\\":\\\"ncer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1127 06:53:29.268703 6621 services_controller.go:451] Built service openshift-apiserver-operator/metrics cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-apiserver-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-apiserver-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.38\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1127 06:53:29.268155 6621 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1127 06:53:29.267856 6621 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/machine-api-operator-machine-webhook]} name:Service_openshift-machine-api/machine-api-operator-machine-webhook_TCP_cluster options:{GoMap:map[event:f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:28Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-56nwb_openshift-ovn-kubernetes(47c0abbf-6e9f-4bca-b3ca-bd896be15f2c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-56nwb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:53Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.889035 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nxjns" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22ca58c-5169-4ae7-ac50-a6cfb48fa334\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237653db3d61407da4c98734dc9cc8e349e67fabab8ad7fde3ceda2cf15e2bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf345d1f97fcf7d514b9bb82e0cf811761950ad9bfe627b738b5271261ee0aa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nxjns\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:53Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.903056 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"ed a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\"\\\\nI1127 06:53:01.687702 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1127 06:53:02.478794 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1127 06:53:02.478820 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1127 06:53:02.478850 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1127 06:53:02.478856 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1127 06:53:02.511358 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1127 06:53:02.511475 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511499 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511521 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1127 06:53:02.511559 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1127 06:53:02.511581 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1127 06:53:02.511603 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1127 06:53:02.511767 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1127 06:53:02.517623 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764226365\\\\\\\\\\\\\\\" (2025-11-27 06:52:45 +0000 UTC to 2025-12-27 06:52:46 +0000 UTC (now=2025-11-27 06:53:02.517594908 +0000 UTC))\\\\\\\"\\\\nF1127 06:53:02.517671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:53Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.915213 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:53Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.927509 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c02f3806e673886030fe506e4903bfa7f383bdc7130e4f3d251b5851cb99fb52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d1fb1c5f4949857e54218b78eb7d2a695aa1baf7fe0d4e26888cad2e2cdb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:53Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.940723 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:53Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.951275 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://673830b15fa2749a267b5b4e1116f8d7c2cbe419b963a87da48dfa8fe9eaf8c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdz6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:53Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.958469 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.958505 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.958516 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.958543 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.958555 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:53Z","lastTransitionTime":"2025-11-27T06:53:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.960647 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pdn5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e710c835-f600-448f-a110-4ff4cef9d5f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rj8cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rj8cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:16Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pdn5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:53Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.970561 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:53Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.979162 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db8ead1e3b35d86c894bb1067d64e31f7ac2ec518180db02176574eb245fafb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:53Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:53 crc kubenswrapper[4971]: I1127 06:53:53.990096 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://239d7a81aa3003faaae51b9e8f67d4ba1495328340befcc2b962804e376c9c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w2fnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:53Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.001639 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lr9p6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2536efee5c523594e02d64a0d1113af4945a84c32d06c78f840ef292143578e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5609ab7b327e7ec603c40b9b27f4ee52b4b49e647d0c6e9db5d360bf686c493c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-27T06:53:52Z\\\",\\\"message\\\":\\\"2025-11-27T06:53:07+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_94bc62df-014f-47cf-be41-454f276551dd\\\\n2025-11-27T06:53:07+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_94bc62df-014f-47cf-be41-454f276551dd to /host/opt/cni/bin/\\\\n2025-11-27T06:53:07Z [verbose] multus-daemon started\\\\n2025-11-27T06:53:07Z [verbose] Readiness Indicator file check\\\\n2025-11-27T06:53:52Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5pmn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lr9p6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:54Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.010245 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckzrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc0de2fe198ee71bf786a69fa44fa2bd1c0d29e16391fdafd96bb6b062f6f6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xx7nr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:05Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckzrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:54Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.061222 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.061251 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.061264 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.061281 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.061295 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:54Z","lastTransitionTime":"2025-11-27T06:53:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.164287 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.164351 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.164375 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.164406 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.164428 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:54Z","lastTransitionTime":"2025-11-27T06:53:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.267494 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.267704 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.267725 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.267751 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.267769 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:54Z","lastTransitionTime":"2025-11-27T06:53:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.369635 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.369671 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.369681 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.369695 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.369703 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:54Z","lastTransitionTime":"2025-11-27T06:53:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.472214 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.472257 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.472266 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.472281 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.472295 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:54Z","lastTransitionTime":"2025-11-27T06:53:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.550094 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.550188 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:53:54 crc kubenswrapper[4971]: E1127 06:53:54.550228 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.550258 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.550265 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:54 crc kubenswrapper[4971]: E1127 06:53:54.550363 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:53:54 crc kubenswrapper[4971]: E1127 06:53:54.550418 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:53:54 crc kubenswrapper[4971]: E1127 06:53:54.550477 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.573834 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.573880 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.573891 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.573908 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.573918 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:54Z","lastTransitionTime":"2025-11-27T06:53:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.675889 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.675922 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.675932 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.675945 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.675956 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:54Z","lastTransitionTime":"2025-11-27T06:53:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.777175 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.777206 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.777216 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.777228 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.777237 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:54Z","lastTransitionTime":"2025-11-27T06:53:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.879235 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.879274 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.879287 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.879305 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.879316 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:54Z","lastTransitionTime":"2025-11-27T06:53:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.981568 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.981622 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.981642 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.981666 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:54 crc kubenswrapper[4971]: I1127 06:53:54.981684 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:54Z","lastTransitionTime":"2025-11-27T06:53:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.083843 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.083876 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.083885 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.083899 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.083910 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:55Z","lastTransitionTime":"2025-11-27T06:53:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.186543 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.186589 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.186599 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.186617 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.186627 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:55Z","lastTransitionTime":"2025-11-27T06:53:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.288778 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.288835 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.288851 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.288877 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.288896 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:55Z","lastTransitionTime":"2025-11-27T06:53:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.391523 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.391572 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.391582 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.391596 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.391608 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:55Z","lastTransitionTime":"2025-11-27T06:53:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.493867 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.493894 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.493903 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.493918 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.493927 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:55Z","lastTransitionTime":"2025-11-27T06:53:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.595951 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.595991 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.596000 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.596011 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.596020 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:55Z","lastTransitionTime":"2025-11-27T06:53:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.698858 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.698897 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.698906 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.698923 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.698934 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:55Z","lastTransitionTime":"2025-11-27T06:53:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.801858 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.801933 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.801968 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.802001 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.802022 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:55Z","lastTransitionTime":"2025-11-27T06:53:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.904609 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.904652 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.904685 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.904702 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:55 crc kubenswrapper[4971]: I1127 06:53:55.904714 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:55Z","lastTransitionTime":"2025-11-27T06:53:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.007279 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.007329 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.007340 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.007359 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.007373 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:56Z","lastTransitionTime":"2025-11-27T06:53:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.111118 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.111163 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.111174 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.111191 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.111203 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:56Z","lastTransitionTime":"2025-11-27T06:53:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.214144 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.214183 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.214194 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.214208 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.214224 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:56Z","lastTransitionTime":"2025-11-27T06:53:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.316994 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.317043 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.317139 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.317160 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.317175 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:56Z","lastTransitionTime":"2025-11-27T06:53:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.419558 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.419601 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.419617 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.419636 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.419646 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:56Z","lastTransitionTime":"2025-11-27T06:53:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.521453 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.521495 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.521508 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.521524 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.521558 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:56Z","lastTransitionTime":"2025-11-27T06:53:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.549812 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.549898 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:53:56 crc kubenswrapper[4971]: E1127 06:53:56.549938 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.549947 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:53:56 crc kubenswrapper[4971]: E1127 06:53:56.550380 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.550415 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:53:56 crc kubenswrapper[4971]: E1127 06:53:56.550585 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:53:56 crc kubenswrapper[4971]: E1127 06:53:56.550698 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.551406 4971 scope.go:117] "RemoveContainer" containerID="2f979974fc6abc523b9818905ba1547fe4da8a7c17e5a149838788bb91940a27" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.624422 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.624477 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.624486 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.624513 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.624523 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:56Z","lastTransitionTime":"2025-11-27T06:53:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.727004 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.727030 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.727039 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.727051 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.727060 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:56Z","lastTransitionTime":"2025-11-27T06:53:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.784053 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-56nwb_47c0abbf-6e9f-4bca-b3ca-bd896be15f2c/ovnkube-controller/2.log" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.786200 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" event={"ID":"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c","Type":"ContainerStarted","Data":"91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85"} Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.787166 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.798401 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5be4571b65d35e7a2d2888f250e6cf6e0c105b598047e2236b3ec90bbdbee5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:56Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.814156 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f979974fc6abc523b9818905ba1547fe4da8a7c17e5a149838788bb91940a27\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-27T06:53:29Z\\\",\\\"message\\\":\\\"ncer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1127 06:53:29.268703 6621 services_controller.go:451] Built service openshift-apiserver-operator/metrics cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-apiserver-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-apiserver-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.38\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1127 06:53:29.268155 6621 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1127 06:53:29.267856 6621 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/machine-api-operator-machine-webhook]} name:Service_openshift-machine-api/machine-api-operator-machine-webhook_TCP_cluster options:{GoMap:map[event:f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:28Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-56nwb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:56Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.825775 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nxjns" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22ca58c-5169-4ae7-ac50-a6cfb48fa334\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237653db3d61407da4c98734dc9cc8e349e67fabab8ad7fde3ceda2cf15e2bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf345d1f97fcf7d514b9bb82e0cf811761950ad9bfe627b738b5271261ee0aa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nxjns\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:56Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.831064 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.831088 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.831119 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.831132 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.831141 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:56Z","lastTransitionTime":"2025-11-27T06:53:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.834223 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0c2c36-71a4-49a3-b0a3-cd73bc42b9b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a1ad326792f4efe7ca0bbe3cb729cdb3b4047e1d142b95f425b47fd07250a7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47e46ba3596addbd137a1dfaa457e183bd7543340d5a0a85b61ee97f7c1a6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47e46ba3596addbd137a1dfaa457e183bd7543340d5a0a85b61ee97f7c1a6cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:56Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.842862 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8c5d5ac-1172-4d70-95cc-09a985cdccf9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://233f958888e7af4cbaf18ca0b743ae116a205a5d250fcb5d8d25c6c33cc6edfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be990eb8e6023cc976afaf7edaba3eed7bf119dd4b31378671564feaf9cdd5c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0636fb6663c0b29767d749d85154ba8c5707c5022f211d815895bfc844d6d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5822703375f22f8adb588a790d4f00e78b8a2cd8fe3f902fe4732a133511d63a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5822703375f22f8adb588a790d4f00e78b8a2cd8fe3f902fe4732a133511d63a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:56Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.853338 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c02f3806e673886030fe506e4903bfa7f383bdc7130e4f3d251b5851cb99fb52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d1fb1c5f4949857e54218b78eb7d2a695aa1baf7fe0d4e26888cad2e2cdb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:56Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.862896 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:56Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.871778 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://673830b15fa2749a267b5b4e1116f8d7c2cbe419b963a87da48dfa8fe9eaf8c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdz6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:56Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.883104 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"ed a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\"\\\\nI1127 06:53:01.687702 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1127 06:53:02.478794 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1127 06:53:02.478820 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1127 06:53:02.478850 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1127 06:53:02.478856 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1127 06:53:02.511358 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1127 06:53:02.511475 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511499 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511521 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1127 06:53:02.511559 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1127 06:53:02.511581 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1127 06:53:02.511603 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1127 06:53:02.511767 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1127 06:53:02.517623 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764226365\\\\\\\\\\\\\\\" (2025-11-27 06:52:45 +0000 UTC to 2025-12-27 06:52:46 +0000 UTC (now=2025-11-27 06:53:02.517594908 +0000 UTC))\\\\\\\"\\\\nF1127 06:53:02.517671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:56Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.893452 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:56Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.906624 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://239d7a81aa3003faaae51b9e8f67d4ba1495328340befcc2b962804e376c9c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w2fnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:56Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.924373 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lr9p6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2536efee5c523594e02d64a0d1113af4945a84c32d06c78f840ef292143578e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5609ab7b327e7ec603c40b9b27f4ee52b4b49e647d0c6e9db5d360bf686c493c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-27T06:53:52Z\\\",\\\"message\\\":\\\"2025-11-27T06:53:07+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_94bc62df-014f-47cf-be41-454f276551dd\\\\n2025-11-27T06:53:07+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_94bc62df-014f-47cf-be41-454f276551dd to /host/opt/cni/bin/\\\\n2025-11-27T06:53:07Z [verbose] multus-daemon started\\\\n2025-11-27T06:53:07Z [verbose] Readiness Indicator file check\\\\n2025-11-27T06:53:52Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5pmn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lr9p6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:56Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.934464 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.934491 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.934500 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.934513 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.934522 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:56Z","lastTransitionTime":"2025-11-27T06:53:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.936571 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckzrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc0de2fe198ee71bf786a69fa44fa2bd1c0d29e16391fdafd96bb6b062f6f6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xx7nr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:05Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckzrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:56Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.947208 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pdn5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e710c835-f600-448f-a110-4ff4cef9d5f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rj8cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rj8cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:16Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pdn5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:56Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.960857 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:56Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.974938 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db8ead1e3b35d86c894bb1067d64e31f7ac2ec518180db02176574eb245fafb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:56Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:56 crc kubenswrapper[4971]: I1127 06:53:56.991305 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0609bb483c2769bba8b86b1692fd22e002dc73a3655a931c4bc4f7f378b9649f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:56Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.036750 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.036778 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.036787 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.036799 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.036809 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:57Z","lastTransitionTime":"2025-11-27T06:53:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.058145 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f110061b-13b1-4eae-b031-1f7899d7728d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6e3b39e688e045045a2a5b02dd0c3c7e7567657e0a9dff4d16f4f818e9e984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0568171255f8d7e993c025aece8b10eb0303efde25bc85b46f3b4b2c014ec66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ffb860fe1ce8cfa20be499da1932b4c5286cfdcc70418e7cdf5785cf746254c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dda9554712ec04cb4c9616ee62cb0e5e2cf106ba9c63ab411fb07fb892a1897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c09241254c68f6a129398092fe87bda15b581be840a5cf4034551b5f881042b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:57Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.077447 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:57Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.139048 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.139081 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.139099 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.139115 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.139125 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:57Z","lastTransitionTime":"2025-11-27T06:53:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.241735 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.241764 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.241771 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.241784 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.241793 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:57Z","lastTransitionTime":"2025-11-27T06:53:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.344504 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.344549 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.344559 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.344572 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.344580 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:57Z","lastTransitionTime":"2025-11-27T06:53:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.446675 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.446715 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.446726 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.446743 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.446755 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:57Z","lastTransitionTime":"2025-11-27T06:53:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.549101 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.549134 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.549143 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.549159 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.549168 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:57Z","lastTransitionTime":"2025-11-27T06:53:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.652480 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.652568 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.652580 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.652599 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.652614 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:57Z","lastTransitionTime":"2025-11-27T06:53:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.755798 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.755855 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.755872 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.755896 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.755914 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:57Z","lastTransitionTime":"2025-11-27T06:53:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.796966 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-56nwb_47c0abbf-6e9f-4bca-b3ca-bd896be15f2c/ovnkube-controller/3.log" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.797589 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-56nwb_47c0abbf-6e9f-4bca-b3ca-bd896be15f2c/ovnkube-controller/2.log" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.800048 4971 generic.go:334] "Generic (PLEG): container finished" podID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerID="91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85" exitCode=1 Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.800095 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" event={"ID":"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c","Type":"ContainerDied","Data":"91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85"} Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.800172 4971 scope.go:117] "RemoveContainer" containerID="2f979974fc6abc523b9818905ba1547fe4da8a7c17e5a149838788bb91940a27" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.801112 4971 scope.go:117] "RemoveContainer" containerID="91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85" Nov 27 06:53:57 crc kubenswrapper[4971]: E1127 06:53:57.801349 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-56nwb_openshift-ovn-kubernetes(47c0abbf-6e9f-4bca-b3ca-bd896be15f2c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.815122 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://239d7a81aa3003faaae51b9e8f67d4ba1495328340befcc2b962804e376c9c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w2fnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:57Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.825335 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lr9p6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2536efee5c523594e02d64a0d1113af4945a84c32d06c78f840ef292143578e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5609ab7b327e7ec603c40b9b27f4ee52b4b49e647d0c6e9db5d360bf686c493c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-27T06:53:52Z\\\",\\\"message\\\":\\\"2025-11-27T06:53:07+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_94bc62df-014f-47cf-be41-454f276551dd\\\\n2025-11-27T06:53:07+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_94bc62df-014f-47cf-be41-454f276551dd to /host/opt/cni/bin/\\\\n2025-11-27T06:53:07Z [verbose] multus-daemon started\\\\n2025-11-27T06:53:07Z [verbose] Readiness Indicator file check\\\\n2025-11-27T06:53:52Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5pmn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lr9p6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:57Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.835374 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckzrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc0de2fe198ee71bf786a69fa44fa2bd1c0d29e16391fdafd96bb6b062f6f6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xx7nr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:05Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckzrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:57Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.844735 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pdn5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e710c835-f600-448f-a110-4ff4cef9d5f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rj8cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rj8cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:16Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pdn5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:57Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.857425 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.857458 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.857474 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.857499 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.857509 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:57Z","lastTransitionTime":"2025-11-27T06:53:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.858089 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:57Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.867990 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db8ead1e3b35d86c894bb1067d64e31f7ac2ec518180db02176574eb245fafb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:57Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.879410 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0609bb483c2769bba8b86b1692fd22e002dc73a3655a931c4bc4f7f378b9649f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:57Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.896726 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f110061b-13b1-4eae-b031-1f7899d7728d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6e3b39e688e045045a2a5b02dd0c3c7e7567657e0a9dff4d16f4f818e9e984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0568171255f8d7e993c025aece8b10eb0303efde25bc85b46f3b4b2c014ec66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ffb860fe1ce8cfa20be499da1932b4c5286cfdcc70418e7cdf5785cf746254c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dda9554712ec04cb4c9616ee62cb0e5e2cf106ba9c63ab411fb07fb892a1897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c09241254c68f6a129398092fe87bda15b581be840a5cf4034551b5f881042b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:57Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.911322 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:57Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.924808 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5be4571b65d35e7a2d2888f250e6cf6e0c105b598047e2236b3ec90bbdbee5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:57Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.941865 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f979974fc6abc523b9818905ba1547fe4da8a7c17e5a149838788bb91940a27\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-27T06:53:29Z\\\",\\\"message\\\":\\\"ncer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1127 06:53:29.268703 6621 services_controller.go:451] Built service openshift-apiserver-operator/metrics cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-apiserver-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-apiserver-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.38\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1127 06:53:29.268155 6621 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1127 06:53:29.267856 6621 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/machine-api-operator-machine-webhook]} name:Service_openshift-machine-api/machine-api-operator-machine-webhook_TCP_cluster options:{GoMap:map[event:f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:28Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-27T06:53:57Z\\\",\\\"message\\\":\\\"reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1127 06:53:57.442154 6982 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1127 06:53:57.442265 6982 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1127 06:53:57.443053 6982 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1127 06:53:57.443079 6982 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1127 06:53:57.443123 6982 handler.go:208] Removed *v1.Node event handler 2\\\\nI1127 06:53:57.443650 6982 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1127 06:53:57.443742 6982 factory.go:656] Stopping watch factory\\\\nI1127 06:53:57.449072 6982 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1127 06:53:57.449088 6982 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1127 06:53:57.449139 6982 ovnkube.go:599] Stopped ovnkube\\\\nI1127 06:53:57.449161 6982 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1127 06:53:57.449225 6982 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-56nwb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:57Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.952975 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nxjns" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22ca58c-5169-4ae7-ac50-a6cfb48fa334\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237653db3d61407da4c98734dc9cc8e349e67fabab8ad7fde3ceda2cf15e2bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf345d1f97fcf7d514b9bb82e0cf811761950ad9bfe627b738b5271261ee0aa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nxjns\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:57Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.959926 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.959954 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.959963 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.959976 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.959985 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:57Z","lastTransitionTime":"2025-11-27T06:53:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.962946 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0c2c36-71a4-49a3-b0a3-cd73bc42b9b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a1ad326792f4efe7ca0bbe3cb729cdb3b4047e1d142b95f425b47fd07250a7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47e46ba3596addbd137a1dfaa457e183bd7543340d5a0a85b61ee97f7c1a6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47e46ba3596addbd137a1dfaa457e183bd7543340d5a0a85b61ee97f7c1a6cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:57Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.973245 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8c5d5ac-1172-4d70-95cc-09a985cdccf9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://233f958888e7af4cbaf18ca0b743ae116a205a5d250fcb5d8d25c6c33cc6edfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be990eb8e6023cc976afaf7edaba3eed7bf119dd4b31378671564feaf9cdd5c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0636fb6663c0b29767d749d85154ba8c5707c5022f211d815895bfc844d6d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5822703375f22f8adb588a790d4f00e78b8a2cd8fe3f902fe4732a133511d63a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5822703375f22f8adb588a790d4f00e78b8a2cd8fe3f902fe4732a133511d63a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:57Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.984651 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c02f3806e673886030fe506e4903bfa7f383bdc7130e4f3d251b5851cb99fb52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d1fb1c5f4949857e54218b78eb7d2a695aa1baf7fe0d4e26888cad2e2cdb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:57Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:57 crc kubenswrapper[4971]: I1127 06:53:57.998350 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:57Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.009865 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://673830b15fa2749a267b5b4e1116f8d7c2cbe419b963a87da48dfa8fe9eaf8c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdz6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:58Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.034114 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"ed a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\"\\\\nI1127 06:53:01.687702 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1127 06:53:02.478794 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1127 06:53:02.478820 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1127 06:53:02.478850 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1127 06:53:02.478856 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1127 06:53:02.511358 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1127 06:53:02.511475 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511499 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511521 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1127 06:53:02.511559 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1127 06:53:02.511581 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1127 06:53:02.511603 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1127 06:53:02.511767 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1127 06:53:02.517623 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764226365\\\\\\\\\\\\\\\" (2025-11-27 06:52:45 +0000 UTC to 2025-12-27 06:52:46 +0000 UTC (now=2025-11-27 06:53:02.517594908 +0000 UTC))\\\\\\\"\\\\nF1127 06:53:02.517671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:58Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.048139 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:58Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.061923 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.061963 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.061975 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.061991 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.062003 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:58Z","lastTransitionTime":"2025-11-27T06:53:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.164846 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.165222 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.165323 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.165409 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.165494 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:58Z","lastTransitionTime":"2025-11-27T06:53:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.268628 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.268684 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.268701 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.268725 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.268740 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:58Z","lastTransitionTime":"2025-11-27T06:53:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.371263 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.371523 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.371788 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.371897 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.371995 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:58Z","lastTransitionTime":"2025-11-27T06:53:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.474464 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.474509 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.474521 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.474555 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.474570 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:58Z","lastTransitionTime":"2025-11-27T06:53:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.549704 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.549751 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.549710 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:53:58 crc kubenswrapper[4971]: E1127 06:53:58.549824 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.549699 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:53:58 crc kubenswrapper[4971]: E1127 06:53:58.549937 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:53:58 crc kubenswrapper[4971]: E1127 06:53:58.550096 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:53:58 crc kubenswrapper[4971]: E1127 06:53:58.550126 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.577116 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.577161 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.577172 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.577190 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.577204 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:58Z","lastTransitionTime":"2025-11-27T06:53:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.679992 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.680038 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.680050 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.680068 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.680079 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:58Z","lastTransitionTime":"2025-11-27T06:53:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.782711 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.782799 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.782825 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.782856 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.782879 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:58Z","lastTransitionTime":"2025-11-27T06:53:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.803790 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-56nwb_47c0abbf-6e9f-4bca-b3ca-bd896be15f2c/ovnkube-controller/3.log" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.807790 4971 scope.go:117] "RemoveContainer" containerID="91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85" Nov 27 06:53:58 crc kubenswrapper[4971]: E1127 06:53:58.807930 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-56nwb_openshift-ovn-kubernetes(47c0abbf-6e9f-4bca-b3ca-bd896be15f2c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.825731 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:58Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.840915 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c02f3806e673886030fe506e4903bfa7f383bdc7130e4f3d251b5851cb99fb52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d1fb1c5f4949857e54218b78eb7d2a695aa1baf7fe0d4e26888cad2e2cdb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:58Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.852156 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:58Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.863430 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://673830b15fa2749a267b5b4e1116f8d7c2cbe419b963a87da48dfa8fe9eaf8c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdz6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:58Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.877078 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"ed a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\"\\\\nI1127 06:53:01.687702 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1127 06:53:02.478794 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1127 06:53:02.478820 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1127 06:53:02.478850 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1127 06:53:02.478856 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1127 06:53:02.511358 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1127 06:53:02.511475 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511499 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511521 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1127 06:53:02.511559 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1127 06:53:02.511581 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1127 06:53:02.511603 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1127 06:53:02.511767 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1127 06:53:02.517623 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764226365\\\\\\\\\\\\\\\" (2025-11-27 06:52:45 +0000 UTC to 2025-12-27 06:52:46 +0000 UTC (now=2025-11-27 06:53:02.517594908 +0000 UTC))\\\\\\\"\\\\nF1127 06:53:02.517671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:58Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.885447 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.885493 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.885505 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.885523 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.885553 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:58Z","lastTransitionTime":"2025-11-27T06:53:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.888352 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db8ead1e3b35d86c894bb1067d64e31f7ac2ec518180db02176574eb245fafb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:58Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.903900 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://239d7a81aa3003faaae51b9e8f67d4ba1495328340befcc2b962804e376c9c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w2fnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:58Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.917357 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lr9p6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2536efee5c523594e02d64a0d1113af4945a84c32d06c78f840ef292143578e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5609ab7b327e7ec603c40b9b27f4ee52b4b49e647d0c6e9db5d360bf686c493c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-27T06:53:52Z\\\",\\\"message\\\":\\\"2025-11-27T06:53:07+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_94bc62df-014f-47cf-be41-454f276551dd\\\\n2025-11-27T06:53:07+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_94bc62df-014f-47cf-be41-454f276551dd to /host/opt/cni/bin/\\\\n2025-11-27T06:53:07Z [verbose] multus-daemon started\\\\n2025-11-27T06:53:07Z [verbose] Readiness Indicator file check\\\\n2025-11-27T06:53:52Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5pmn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lr9p6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:58Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.928863 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckzrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc0de2fe198ee71bf786a69fa44fa2bd1c0d29e16391fdafd96bb6b062f6f6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xx7nr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:05Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckzrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:58Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.938183 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pdn5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e710c835-f600-448f-a110-4ff4cef9d5f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rj8cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rj8cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:16Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pdn5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:58Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.950209 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:58Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.962014 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:58Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.974941 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0609bb483c2769bba8b86b1692fd22e002dc73a3655a931c4bc4f7f378b9649f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:58Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.987652 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.987701 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.987714 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.987732 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.987744 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:58Z","lastTransitionTime":"2025-11-27T06:53:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:58 crc kubenswrapper[4971]: I1127 06:53:58.993132 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f110061b-13b1-4eae-b031-1f7899d7728d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6e3b39e688e045045a2a5b02dd0c3c7e7567657e0a9dff4d16f4f818e9e984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0568171255f8d7e993c025aece8b10eb0303efde25bc85b46f3b4b2c014ec66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ffb860fe1ce8cfa20be499da1932b4c5286cfdcc70418e7cdf5785cf746254c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dda9554712ec04cb4c9616ee62cb0e5e2cf106ba9c63ab411fb07fb892a1897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c09241254c68f6a129398092fe87bda15b581be840a5cf4034551b5f881042b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:58Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.004238 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8c5d5ac-1172-4d70-95cc-09a985cdccf9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://233f958888e7af4cbaf18ca0b743ae116a205a5d250fcb5d8d25c6c33cc6edfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be990eb8e6023cc976afaf7edaba3eed7bf119dd4b31378671564feaf9cdd5c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0636fb6663c0b29767d749d85154ba8c5707c5022f211d815895bfc844d6d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5822703375f22f8adb588a790d4f00e78b8a2cd8fe3f902fe4732a133511d63a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5822703375f22f8adb588a790d4f00e78b8a2cd8fe3f902fe4732a133511d63a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:59Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.017876 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5be4571b65d35e7a2d2888f250e6cf6e0c105b598047e2236b3ec90bbdbee5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:59Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.034836 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-27T06:53:57Z\\\",\\\"message\\\":\\\"reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1127 06:53:57.442154 6982 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1127 06:53:57.442265 6982 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1127 06:53:57.443053 6982 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1127 06:53:57.443079 6982 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1127 06:53:57.443123 6982 handler.go:208] Removed *v1.Node event handler 2\\\\nI1127 06:53:57.443650 6982 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1127 06:53:57.443742 6982 factory.go:656] Stopping watch factory\\\\nI1127 06:53:57.449072 6982 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1127 06:53:57.449088 6982 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1127 06:53:57.449139 6982 ovnkube.go:599] Stopped ovnkube\\\\nI1127 06:53:57.449161 6982 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1127 06:53:57.449225 6982 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-56nwb_openshift-ovn-kubernetes(47c0abbf-6e9f-4bca-b3ca-bd896be15f2c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-56nwb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:59Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.045697 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nxjns" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22ca58c-5169-4ae7-ac50-a6cfb48fa334\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237653db3d61407da4c98734dc9cc8e349e67fabab8ad7fde3ceda2cf15e2bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf345d1f97fcf7d514b9bb82e0cf811761950ad9bfe627b738b5271261ee0aa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nxjns\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:59Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.057234 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0c2c36-71a4-49a3-b0a3-cd73bc42b9b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a1ad326792f4efe7ca0bbe3cb729cdb3b4047e1d142b95f425b47fd07250a7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47e46ba3596addbd137a1dfaa457e183bd7543340d5a0a85b61ee97f7c1a6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47e46ba3596addbd137a1dfaa457e183bd7543340d5a0a85b61ee97f7c1a6cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:59Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.090100 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.090136 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.090147 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.090163 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.090174 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:59Z","lastTransitionTime":"2025-11-27T06:53:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.192581 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.192629 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.192640 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.192657 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.192665 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:59Z","lastTransitionTime":"2025-11-27T06:53:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.216979 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.217026 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.217045 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.217063 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.217073 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:59Z","lastTransitionTime":"2025-11-27T06:53:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:59 crc kubenswrapper[4971]: E1127 06:53:59.234347 4971 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4602e4e9-64d4-4227-8212-1a84a264f109\\\",\\\"systemUUID\\\":\\\"d12f7ae2-c7c1-475e-a2cb-1f1e626e5071\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:59Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.238642 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.238701 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.238712 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.238727 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.238737 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:59Z","lastTransitionTime":"2025-11-27T06:53:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:59 crc kubenswrapper[4971]: E1127 06:53:59.250861 4971 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4602e4e9-64d4-4227-8212-1a84a264f109\\\",\\\"systemUUID\\\":\\\"d12f7ae2-c7c1-475e-a2cb-1f1e626e5071\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:59Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.255116 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.255161 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.255177 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.255200 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.255217 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:59Z","lastTransitionTime":"2025-11-27T06:53:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:59 crc kubenswrapper[4971]: E1127 06:53:59.273424 4971 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4602e4e9-64d4-4227-8212-1a84a264f109\\\",\\\"systemUUID\\\":\\\"d12f7ae2-c7c1-475e-a2cb-1f1e626e5071\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:59Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.277271 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.277309 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.277321 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.277339 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.277354 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:59Z","lastTransitionTime":"2025-11-27T06:53:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:59 crc kubenswrapper[4971]: E1127 06:53:59.290195 4971 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4602e4e9-64d4-4227-8212-1a84a264f109\\\",\\\"systemUUID\\\":\\\"d12f7ae2-c7c1-475e-a2cb-1f1e626e5071\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:59Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.293821 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.293860 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.293875 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.293893 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.293904 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:59Z","lastTransitionTime":"2025-11-27T06:53:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:59 crc kubenswrapper[4971]: E1127 06:53:59.306783 4971 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4602e4e9-64d4-4227-8212-1a84a264f109\\\",\\\"systemUUID\\\":\\\"d12f7ae2-c7c1-475e-a2cb-1f1e626e5071\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:53:59Z is after 2025-08-24T17:21:41Z" Nov 27 06:53:59 crc kubenswrapper[4971]: E1127 06:53:59.306909 4971 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.308650 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.308697 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.308709 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.308729 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.308741 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:59Z","lastTransitionTime":"2025-11-27T06:53:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.412616 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.412662 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.412674 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.412695 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.412706 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:59Z","lastTransitionTime":"2025-11-27T06:53:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.514390 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.514425 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.514436 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.514457 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.514468 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:59Z","lastTransitionTime":"2025-11-27T06:53:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.617650 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.617694 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.617707 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.617725 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.617739 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:59Z","lastTransitionTime":"2025-11-27T06:53:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.720981 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.721055 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.721078 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.721111 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.721133 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:59Z","lastTransitionTime":"2025-11-27T06:53:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.824021 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.824063 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.824071 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.824086 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.824097 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:59Z","lastTransitionTime":"2025-11-27T06:53:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.926891 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.927852 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.928000 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.928133 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:53:59 crc kubenswrapper[4971]: I1127 06:53:59.928272 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:53:59Z","lastTransitionTime":"2025-11-27T06:53:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.032723 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.033079 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.033264 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.033411 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.033561 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:00Z","lastTransitionTime":"2025-11-27T06:54:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.136059 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.136381 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.136618 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.136827 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.137025 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:00Z","lastTransitionTime":"2025-11-27T06:54:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.240798 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.240884 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.240907 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.240938 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.240962 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:00Z","lastTransitionTime":"2025-11-27T06:54:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.343802 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.343864 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.343881 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.343905 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.343927 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:00Z","lastTransitionTime":"2025-11-27T06:54:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.448519 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.448699 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.448722 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.448784 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.448805 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:00Z","lastTransitionTime":"2025-11-27T06:54:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.549788 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.549826 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.549890 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.549977 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:54:00 crc kubenswrapper[4971]: E1127 06:54:00.549941 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:54:00 crc kubenswrapper[4971]: E1127 06:54:00.550361 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:54:00 crc kubenswrapper[4971]: E1127 06:54:00.550375 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:54:00 crc kubenswrapper[4971]: E1127 06:54:00.550470 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.551924 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.551953 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.551963 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.551976 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.551984 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:00Z","lastTransitionTime":"2025-11-27T06:54:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.654763 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.654798 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.654809 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.654825 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.654835 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:00Z","lastTransitionTime":"2025-11-27T06:54:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.757772 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.757866 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.757890 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.757924 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.757945 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:00Z","lastTransitionTime":"2025-11-27T06:54:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.860109 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.860160 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.860173 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.860196 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.860209 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:00Z","lastTransitionTime":"2025-11-27T06:54:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.962949 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.962982 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.962992 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.963006 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:00 crc kubenswrapper[4971]: I1127 06:54:00.963017 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:00Z","lastTransitionTime":"2025-11-27T06:54:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.065814 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.066215 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.066411 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.066598 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.066946 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:01Z","lastTransitionTime":"2025-11-27T06:54:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.169362 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.169399 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.169409 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.169423 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.169432 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:01Z","lastTransitionTime":"2025-11-27T06:54:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.272122 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.272191 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.272210 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.272237 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.272255 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:01Z","lastTransitionTime":"2025-11-27T06:54:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.374909 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.374975 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.374994 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.375019 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.375037 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:01Z","lastTransitionTime":"2025-11-27T06:54:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.478277 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.478478 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.478510 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.478605 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.478638 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:01Z","lastTransitionTime":"2025-11-27T06:54:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.582278 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.582327 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.582340 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.582363 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.582375 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:01Z","lastTransitionTime":"2025-11-27T06:54:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.684648 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.684839 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.684966 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.685067 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.685180 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:01Z","lastTransitionTime":"2025-11-27T06:54:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.966832 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.966889 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.966912 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.966938 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:01 crc kubenswrapper[4971]: I1127 06:54:01.966956 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:01Z","lastTransitionTime":"2025-11-27T06:54:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.075406 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.076080 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.076126 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.076261 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.076281 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:02Z","lastTransitionTime":"2025-11-27T06:54:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.179339 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.179387 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.179404 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.179428 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.179444 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:02Z","lastTransitionTime":"2025-11-27T06:54:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.283724 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.283779 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.283799 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.283823 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.283842 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:02Z","lastTransitionTime":"2025-11-27T06:54:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.386064 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.386151 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.386169 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.386191 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.386207 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:02Z","lastTransitionTime":"2025-11-27T06:54:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.488635 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.488675 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.488688 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.488706 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.488719 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:02Z","lastTransitionTime":"2025-11-27T06:54:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.554252 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.554780 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:54:02 crc kubenswrapper[4971]: E1127 06:54:02.555088 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.555178 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:54:02 crc kubenswrapper[4971]: E1127 06:54:02.555279 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.555340 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:54:02 crc kubenswrapper[4971]: E1127 06:54:02.555410 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:54:02 crc kubenswrapper[4971]: E1127 06:54:02.555677 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.575852 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8c5d5ac-1172-4d70-95cc-09a985cdccf9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://233f958888e7af4cbaf18ca0b743ae116a205a5d250fcb5d8d25c6c33cc6edfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be990eb8e6023cc976afaf7edaba3eed7bf119dd4b31378671564feaf9cdd5c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0636fb6663c0b29767d749d85154ba8c5707c5022f211d815895bfc844d6d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5822703375f22f8adb588a790d4f00e78b8a2cd8fe3f902fe4732a133511d63a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5822703375f22f8adb588a790d4f00e78b8a2cd8fe3f902fe4732a133511d63a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:54:02Z is after 2025-08-24T17:21:41Z" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.591761 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.591850 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.591868 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.591891 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.591913 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:02Z","lastTransitionTime":"2025-11-27T06:54:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.599199 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e5be4571b65d35e7a2d2888f250e6cf6e0c105b598047e2236b3ec90bbdbee5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:54:02Z is after 2025-08-24T17:21:41Z" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.631516 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-27T06:53:57Z\\\",\\\"message\\\":\\\"reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1127 06:53:57.442154 6982 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1127 06:53:57.442265 6982 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1127 06:53:57.443053 6982 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1127 06:53:57.443079 6982 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1127 06:53:57.443123 6982 handler.go:208] Removed *v1.Node event handler 2\\\\nI1127 06:53:57.443650 6982 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1127 06:53:57.443742 6982 factory.go:656] Stopping watch factory\\\\nI1127 06:53:57.449072 6982 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1127 06:53:57.449088 6982 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1127 06:53:57.449139 6982 ovnkube.go:599] Stopped ovnkube\\\\nI1127 06:53:57.449161 6982 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1127 06:53:57.449225 6982 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-56nwb_openshift-ovn-kubernetes(47c0abbf-6e9f-4bca-b3ca-bd896be15f2c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9lhg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-56nwb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:54:02Z is after 2025-08-24T17:21:41Z" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.649729 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nxjns" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22ca58c-5169-4ae7-ac50-a6cfb48fa334\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://237653db3d61407da4c98734dc9cc8e349e67fabab8ad7fde3ceda2cf15e2bd1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf345d1f97fcf7d514b9bb82e0cf811761950ad9bfe627b738b5271261ee0aa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzkwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-nxjns\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:54:02Z is after 2025-08-24T17:21:41Z" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.665414 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dd0c2c36-71a4-49a3-b0a3-cd73bc42b9b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a1ad326792f4efe7ca0bbe3cb729cdb3b4047e1d142b95f425b47fd07250a7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47e46ba3596addbd137a1dfaa457e183bd7543340d5a0a85b61ee97f7c1a6cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47e46ba3596addbd137a1dfaa457e183bd7543340d5a0a85b61ee97f7c1a6cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:54:02Z is after 2025-08-24T17:21:41Z" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.688876 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"256e133b-44a7-4c99-bf9d-0910e40231b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d44228ae00b2862f33be4bfc87be6113033094e712f6ebea6d7456ba7f3b517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e73ae6b810120c39d7af8e5c3f476f65f780ac676e96a9d8bbd780a79049ab04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://607a37892a98e94a37134aff73aab8d44d2c5dbc6054f2b6066af9a350f25b23\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:54:02Z is after 2025-08-24T17:21:41Z" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.697049 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.697119 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.697145 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.697177 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.697202 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:02Z","lastTransitionTime":"2025-11-27T06:54:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.710895 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c02f3806e673886030fe506e4903bfa7f383bdc7130e4f3d251b5851cb99fb52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3d1fb1c5f4949857e54218b78eb7d2a695aa1baf7fe0d4e26888cad2e2cdb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:54:02Z is after 2025-08-24T17:21:41Z" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.727935 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:54:02Z is after 2025-08-24T17:21:41Z" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.743493 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ab8c2ef-d82b-4396-919d-8550cc2e24d7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://673830b15fa2749a267b5b4e1116f8d7c2cbe419b963a87da48dfa8fe9eaf8c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxzdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdz6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:54:02Z is after 2025-08-24T17:21:41Z" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.759027 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52694deb-5a3b-4131-be17-d81560fac783\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"ed a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\"\\\\nI1127 06:53:01.687702 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1127 06:53:02.478794 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1127 06:53:02.478820 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1127 06:53:02.478850 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1127 06:53:02.478856 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1127 06:53:02.511358 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1127 06:53:02.511475 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511499 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1127 06:53:02.511521 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1127 06:53:02.511559 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1127 06:53:02.511581 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1127 06:53:02.511603 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1127 06:53:02.511767 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1127 06:53:02.517623 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-3536070107/tls.crt::/tmp/serving-cert-3536070107/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764226365\\\\\\\\\\\\\\\" (2025-11-27 06:52:45 +0000 UTC to 2025-12-27 06:52:46 +0000 UTC (now=2025-11-27 06:53:02.517594908 +0000 UTC))\\\\\\\"\\\\nF1127 06:53:02.517671 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:54:02Z is after 2025-08-24T17:21:41Z" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.772211 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-srcg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ea5e8b6-6a19-49f0-8107-d4cb93d52889\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db8ead1e3b35d86c894bb1067d64e31f7ac2ec518180db02176574eb245fafb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kwcr9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-srcg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:54:02Z is after 2025-08-24T17:21:41Z" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.788593 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6ad47ab-7d74-49d8-8cd7-3261e36837a9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://239d7a81aa3003faaae51b9e8f67d4ba1495328340befcc2b962804e376c9c18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e3279fbc0886a82b71a15d33d4e1b93b8a975560d7579f4309375ae81164148\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://16f18437454e8fd06326fccf79f747fb63665a5595a854ad0e246b14831f2661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21272b2f6bfbbf8c74a88c2ef676bdd6837499c8940c6051e4148bf4a4ce65d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3d2cfc76671b529149df59de22f290759f63e46c14c1a3801133cbfa6058263\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d783d483a65c321ed5e4d44c8356d5bd0e8f91195e530a91fe46e326c860b303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8845851dd0df71af1679f4f62e05192d3ba8395b338b5b65504a142f443e2fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:53:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc4z8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w2fnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:54:02Z is after 2025-08-24T17:21:41Z" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.799941 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.799994 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.800012 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.800035 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.800052 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:02Z","lastTransitionTime":"2025-11-27T06:54:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.806569 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-lr9p6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2536efee5c523594e02d64a0d1113af4945a84c32d06c78f840ef292143578e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5609ab7b327e7ec603c40b9b27f4ee52b4b49e647d0c6e9db5d360bf686c493c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-27T06:53:52Z\\\",\\\"message\\\":\\\"2025-11-27T06:53:07+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_94bc62df-014f-47cf-be41-454f276551dd\\\\n2025-11-27T06:53:07+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_94bc62df-014f-47cf-be41-454f276551dd to /host/opt/cni/bin/\\\\n2025-11-27T06:53:07Z [verbose] multus-daemon started\\\\n2025-11-27T06:53:07Z [verbose] Readiness Indicator file check\\\\n2025-11-27T06:53:52Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5pmn4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-lr9p6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:54:02Z is after 2025-08-24T17:21:41Z" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.827117 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckzrr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ff21e09-9978-472a-a7e5-3ff3b5d1d9e4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc0de2fe198ee71bf786a69fa44fa2bd1c0d29e16391fdafd96bb6b062f6f6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xx7nr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:05Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckzrr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:54:02Z is after 2025-08-24T17:21:41Z" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.837274 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pdn5j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e710c835-f600-448f-a110-4ff4cef9d5f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:16Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rj8cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rj8cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:53:16Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pdn5j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:54:02Z is after 2025-08-24T17:21:41Z" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.850154 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:54:02Z is after 2025-08-24T17:21:41Z" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.861084 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:54:02Z is after 2025-08-24T17:21:41Z" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.872798 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0609bb483c2769bba8b86b1692fd22e002dc73a3655a931c4bc4f7f378b9649f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:53:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:54:02Z is after 2025-08-24T17:21:41Z" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.897098 4971 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f110061b-13b1-4eae-b031-1f7899d7728d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:53:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-27T06:52:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ae6e3b39e688e045045a2a5b02dd0c3c7e7567657e0a9dff4d16f4f818e9e984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0568171255f8d7e993c025aece8b10eb0303efde25bc85b46f3b4b2c014ec66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ffb860fe1ce8cfa20be499da1932b4c5286cfdcc70418e7cdf5785cf746254c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dda9554712ec04cb4c9616ee62cb0e5e2cf106ba9c63ab411fb07fb892a1897\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c09241254c68f6a129398092fe87bda15b581be840a5cf4034551b5f881042b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-27T06:52:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b2198dc52243188b0d97cb95b063910afafce690502e9e25abbbe0bea2caf8ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c643d58f1e1ebaff70fea98c88d4bca47c5adc8d36cfed17e143088089449de7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:44Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5b78b607c2838d514052fd656c88e193c36a20af3edd1b8926834964db30cc18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-27T06:52:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-27T06:52:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-27T06:52:42Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-27T06:54:02Z is after 2025-08-24T17:21:41Z" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.903409 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.903477 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.903499 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.903557 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:02 crc kubenswrapper[4971]: I1127 06:54:02.903584 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:02Z","lastTransitionTime":"2025-11-27T06:54:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.006843 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.006899 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.006916 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.006938 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.006955 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:03Z","lastTransitionTime":"2025-11-27T06:54:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.109001 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.109061 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.109080 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.109105 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.109122 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:03Z","lastTransitionTime":"2025-11-27T06:54:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.212177 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.212238 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.212255 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.212278 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.212295 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:03Z","lastTransitionTime":"2025-11-27T06:54:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.314813 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.314901 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.314930 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.314965 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.314991 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:03Z","lastTransitionTime":"2025-11-27T06:54:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.418334 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.418388 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.418399 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.418419 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.418431 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:03Z","lastTransitionTime":"2025-11-27T06:54:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.521183 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.521227 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.521238 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.521255 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.521266 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:03Z","lastTransitionTime":"2025-11-27T06:54:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.624030 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.624071 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.624083 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.624100 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.624113 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:03Z","lastTransitionTime":"2025-11-27T06:54:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.727359 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.727396 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.727407 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.727424 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.727435 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:03Z","lastTransitionTime":"2025-11-27T06:54:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.829739 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.829797 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.829808 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.829823 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.829834 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:03Z","lastTransitionTime":"2025-11-27T06:54:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.932954 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.933025 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.933043 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.933068 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:03 crc kubenswrapper[4971]: I1127 06:54:03.933085 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:03Z","lastTransitionTime":"2025-11-27T06:54:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.035289 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.035334 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.035345 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.035363 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.035374 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:04Z","lastTransitionTime":"2025-11-27T06:54:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.138100 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.138131 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.138142 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.138159 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.138179 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:04Z","lastTransitionTime":"2025-11-27T06:54:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.241684 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.241728 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.241738 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.241755 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.241764 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:04Z","lastTransitionTime":"2025-11-27T06:54:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.344943 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.344995 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.345013 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.345039 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.345059 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:04Z","lastTransitionTime":"2025-11-27T06:54:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.447948 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.447992 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.448007 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.448029 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.448045 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:04Z","lastTransitionTime":"2025-11-27T06:54:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.549384 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.549439 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.549559 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.549400 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:54:04 crc kubenswrapper[4971]: E1127 06:54:04.549659 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:54:04 crc kubenswrapper[4971]: E1127 06:54:04.549841 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:54:04 crc kubenswrapper[4971]: E1127 06:54:04.549934 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:54:04 crc kubenswrapper[4971]: E1127 06:54:04.549985 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.551606 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.551656 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.551667 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.551684 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.551722 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:04Z","lastTransitionTime":"2025-11-27T06:54:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.654555 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.654606 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.654621 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.654644 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.654661 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:04Z","lastTransitionTime":"2025-11-27T06:54:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.758032 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.758085 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.758104 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.758129 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.758151 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:04Z","lastTransitionTime":"2025-11-27T06:54:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.861556 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.861601 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.861612 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.861776 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.861793 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:04Z","lastTransitionTime":"2025-11-27T06:54:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.964165 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.964211 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.964220 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.964235 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:04 crc kubenswrapper[4971]: I1127 06:54:04.964245 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:04Z","lastTransitionTime":"2025-11-27T06:54:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.067071 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.067113 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.067124 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.067140 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.067152 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:05Z","lastTransitionTime":"2025-11-27T06:54:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.170238 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.170287 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.170301 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.170321 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.170335 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:05Z","lastTransitionTime":"2025-11-27T06:54:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.272954 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.273009 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.273031 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.273063 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.273087 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:05Z","lastTransitionTime":"2025-11-27T06:54:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.375513 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.375570 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.375581 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.375597 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.375607 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:05Z","lastTransitionTime":"2025-11-27T06:54:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.478266 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.478312 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.478330 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.478350 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.478365 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:05Z","lastTransitionTime":"2025-11-27T06:54:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.581480 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.581589 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.581623 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.581653 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.581672 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:05Z","lastTransitionTime":"2025-11-27T06:54:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.684596 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.684670 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.684691 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.684718 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.684736 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:05Z","lastTransitionTime":"2025-11-27T06:54:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.788360 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.788412 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.788421 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.788436 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.788446 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:05Z","lastTransitionTime":"2025-11-27T06:54:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.891295 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.891370 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.891393 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.891423 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.891450 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:05Z","lastTransitionTime":"2025-11-27T06:54:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.993624 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.993665 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.993675 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.993691 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:05 crc kubenswrapper[4971]: I1127 06:54:05.993702 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:05Z","lastTransitionTime":"2025-11-27T06:54:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.096158 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.096212 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.096226 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.096245 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.096260 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:06Z","lastTransitionTime":"2025-11-27T06:54:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.198934 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.198979 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.198991 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.199009 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.199022 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:06Z","lastTransitionTime":"2025-11-27T06:54:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.302278 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.302325 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.302343 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.302364 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.302422 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:06Z","lastTransitionTime":"2025-11-27T06:54:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.309961 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:54:06 crc kubenswrapper[4971]: E1127 06:54:06.310209 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:10.310177623 +0000 UTC m=+148.502221581 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.404656 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.404732 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.404743 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.404760 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.404772 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:06Z","lastTransitionTime":"2025-11-27T06:54:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.411362 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.411438 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.411486 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.411527 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:54:06 crc kubenswrapper[4971]: E1127 06:54:06.411622 4971 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 27 06:54:06 crc kubenswrapper[4971]: E1127 06:54:06.411729 4971 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 27 06:54:06 crc kubenswrapper[4971]: E1127 06:54:06.411776 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-27 06:55:10.411748425 +0000 UTC m=+148.603792383 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 27 06:54:06 crc kubenswrapper[4971]: E1127 06:54:06.411874 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-27 06:55:10.411858838 +0000 UTC m=+148.603902786 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 27 06:54:06 crc kubenswrapper[4971]: E1127 06:54:06.411778 4971 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 27 06:54:06 crc kubenswrapper[4971]: E1127 06:54:06.411917 4971 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 27 06:54:06 crc kubenswrapper[4971]: E1127 06:54:06.411937 4971 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 27 06:54:06 crc kubenswrapper[4971]: E1127 06:54:06.411804 4971 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 27 06:54:06 crc kubenswrapper[4971]: E1127 06:54:06.411999 4971 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 27 06:54:06 crc kubenswrapper[4971]: E1127 06:54:06.412022 4971 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 27 06:54:06 crc kubenswrapper[4971]: E1127 06:54:06.412030 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-27 06:55:10.412014773 +0000 UTC m=+148.604058731 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 27 06:54:06 crc kubenswrapper[4971]: E1127 06:54:06.412118 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-27 06:55:10.412098455 +0000 UTC m=+148.604142413 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.507212 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.507255 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.507266 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.507285 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.507295 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:06Z","lastTransitionTime":"2025-11-27T06:54:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.550255 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.550260 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.550328 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:54:06 crc kubenswrapper[4971]: E1127 06:54:06.550401 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.550556 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:54:06 crc kubenswrapper[4971]: E1127 06:54:06.550619 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:54:06 crc kubenswrapper[4971]: E1127 06:54:06.550732 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:54:06 crc kubenswrapper[4971]: E1127 06:54:06.550835 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.609245 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.609291 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.609307 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.609326 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.609341 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:06Z","lastTransitionTime":"2025-11-27T06:54:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.712863 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.712977 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.712995 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.713024 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.713066 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:06Z","lastTransitionTime":"2025-11-27T06:54:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.818247 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.818727 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.818741 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.818758 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.818772 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:06Z","lastTransitionTime":"2025-11-27T06:54:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.922580 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.922748 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.922776 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.922806 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:06 crc kubenswrapper[4971]: I1127 06:54:06.922830 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:06Z","lastTransitionTime":"2025-11-27T06:54:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.025921 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.025969 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.025981 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.025999 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.026011 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:07Z","lastTransitionTime":"2025-11-27T06:54:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.128568 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.128606 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.128637 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.128652 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.128662 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:07Z","lastTransitionTime":"2025-11-27T06:54:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.231759 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.231813 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.231822 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.231838 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.231850 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:07Z","lastTransitionTime":"2025-11-27T06:54:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.338730 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.338832 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.338859 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.338892 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.338924 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:07Z","lastTransitionTime":"2025-11-27T06:54:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.443320 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.443377 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.443392 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.443412 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.443426 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:07Z","lastTransitionTime":"2025-11-27T06:54:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.547265 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.547353 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.547377 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.547424 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.547448 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:07Z","lastTransitionTime":"2025-11-27T06:54:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.652296 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.652347 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.652356 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.652371 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.652381 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:07Z","lastTransitionTime":"2025-11-27T06:54:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.755966 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.756032 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.756051 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.756080 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.756100 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:07Z","lastTransitionTime":"2025-11-27T06:54:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.859081 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.859136 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.859156 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.859182 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.859241 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:07Z","lastTransitionTime":"2025-11-27T06:54:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.962182 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.962224 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.962234 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.962249 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:07 crc kubenswrapper[4971]: I1127 06:54:07.962260 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:07Z","lastTransitionTime":"2025-11-27T06:54:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.065039 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.065079 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.065098 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.065117 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.065128 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:08Z","lastTransitionTime":"2025-11-27T06:54:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.167245 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.168164 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.168234 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.168256 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.168270 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:08Z","lastTransitionTime":"2025-11-27T06:54:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.270300 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.270357 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.270367 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.270382 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.270390 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:08Z","lastTransitionTime":"2025-11-27T06:54:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.372634 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.372700 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.372719 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.372743 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.372760 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:08Z","lastTransitionTime":"2025-11-27T06:54:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.475883 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.475946 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.475968 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.475994 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.476014 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:08Z","lastTransitionTime":"2025-11-27T06:54:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.550379 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.550513 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:54:08 crc kubenswrapper[4971]: E1127 06:54:08.550728 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.550768 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.550840 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:54:08 crc kubenswrapper[4971]: E1127 06:54:08.551049 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:54:08 crc kubenswrapper[4971]: E1127 06:54:08.551216 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:54:08 crc kubenswrapper[4971]: E1127 06:54:08.551306 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.579782 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.579826 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.579837 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.579856 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.579870 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:08Z","lastTransitionTime":"2025-11-27T06:54:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.682599 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.682655 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.682668 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.682687 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.682700 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:08Z","lastTransitionTime":"2025-11-27T06:54:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.785613 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.785651 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.785666 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.785684 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.785695 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:08Z","lastTransitionTime":"2025-11-27T06:54:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.888376 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.888434 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.888448 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.888469 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.888481 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:08Z","lastTransitionTime":"2025-11-27T06:54:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.992229 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.992277 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.992290 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.992312 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:08 crc kubenswrapper[4971]: I1127 06:54:08.992326 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:08Z","lastTransitionTime":"2025-11-27T06:54:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.095313 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.095379 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.095401 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.095427 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.095445 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:09Z","lastTransitionTime":"2025-11-27T06:54:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.198906 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.199019 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.199038 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.199066 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.199084 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:09Z","lastTransitionTime":"2025-11-27T06:54:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.302246 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.302327 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.302347 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.302378 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.302397 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:09Z","lastTransitionTime":"2025-11-27T06:54:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.352668 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.352759 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.352781 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.353157 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.353396 4971 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-27T06:54:09Z","lastTransitionTime":"2025-11-27T06:54:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.410108 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-z9qfd"] Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.410604 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-z9qfd" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.412361 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.412641 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.413141 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.413269 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.446991 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=63.446967294 podStartE2EDuration="1m3.446967294s" podCreationTimestamp="2025-11-27 06:53:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:54:09.445147931 +0000 UTC m=+87.637191919" watchObservedRunningTime="2025-11-27 06:54:09.446967294 +0000 UTC m=+87.639011212" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.451658 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3db31353-0fe7-4f7f-8b68-187fdec1bec3-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-z9qfd\" (UID: \"3db31353-0fe7-4f7f-8b68-187fdec1bec3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-z9qfd" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.451735 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3db31353-0fe7-4f7f-8b68-187fdec1bec3-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-z9qfd\" (UID: \"3db31353-0fe7-4f7f-8b68-187fdec1bec3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-z9qfd" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.451777 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3db31353-0fe7-4f7f-8b68-187fdec1bec3-service-ca\") pod \"cluster-version-operator-5c965bbfc6-z9qfd\" (UID: \"3db31353-0fe7-4f7f-8b68-187fdec1bec3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-z9qfd" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.451795 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/3db31353-0fe7-4f7f-8b68-187fdec1bec3-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-z9qfd\" (UID: \"3db31353-0fe7-4f7f-8b68-187fdec1bec3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-z9qfd" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.451818 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/3db31353-0fe7-4f7f-8b68-187fdec1bec3-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-z9qfd\" (UID: \"3db31353-0fe7-4f7f-8b68-187fdec1bec3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-z9qfd" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.542169 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=19.542155083 podStartE2EDuration="19.542155083s" podCreationTimestamp="2025-11-27 06:53:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:54:09.541553776 +0000 UTC m=+87.733597694" watchObservedRunningTime="2025-11-27 06:54:09.542155083 +0000 UTC m=+87.734199001" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.542300 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-nxjns" podStartSLOduration=66.542295537 podStartE2EDuration="1m6.542295537s" podCreationTimestamp="2025-11-27 06:53:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:54:09.524687032 +0000 UTC m=+87.716730990" watchObservedRunningTime="2025-11-27 06:54:09.542295537 +0000 UTC m=+87.734339455" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.550500 4971 scope.go:117] "RemoveContainer" containerID="91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85" Nov 27 06:54:09 crc kubenswrapper[4971]: E1127 06:54:09.550697 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-56nwb_openshift-ovn-kubernetes(47c0abbf-6e9f-4bca-b3ca-bd896be15f2c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.552577 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3db31353-0fe7-4f7f-8b68-187fdec1bec3-service-ca\") pod \"cluster-version-operator-5c965bbfc6-z9qfd\" (UID: \"3db31353-0fe7-4f7f-8b68-187fdec1bec3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-z9qfd" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.552607 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/3db31353-0fe7-4f7f-8b68-187fdec1bec3-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-z9qfd\" (UID: \"3db31353-0fe7-4f7f-8b68-187fdec1bec3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-z9qfd" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.552627 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/3db31353-0fe7-4f7f-8b68-187fdec1bec3-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-z9qfd\" (UID: \"3db31353-0fe7-4f7f-8b68-187fdec1bec3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-z9qfd" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.552655 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3db31353-0fe7-4f7f-8b68-187fdec1bec3-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-z9qfd\" (UID: \"3db31353-0fe7-4f7f-8b68-187fdec1bec3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-z9qfd" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.552697 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3db31353-0fe7-4f7f-8b68-187fdec1bec3-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-z9qfd\" (UID: \"3db31353-0fe7-4f7f-8b68-187fdec1bec3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-z9qfd" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.552957 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/3db31353-0fe7-4f7f-8b68-187fdec1bec3-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-z9qfd\" (UID: \"3db31353-0fe7-4f7f-8b68-187fdec1bec3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-z9qfd" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.552998 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/3db31353-0fe7-4f7f-8b68-187fdec1bec3-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-z9qfd\" (UID: \"3db31353-0fe7-4f7f-8b68-187fdec1bec3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-z9qfd" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.553831 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3db31353-0fe7-4f7f-8b68-187fdec1bec3-service-ca\") pod \"cluster-version-operator-5c965bbfc6-z9qfd\" (UID: \"3db31353-0fe7-4f7f-8b68-187fdec1bec3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-z9qfd" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.556500 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=36.556480544 podStartE2EDuration="36.556480544s" podCreationTimestamp="2025-11-27 06:53:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:54:09.556228846 +0000 UTC m=+87.748272774" watchObservedRunningTime="2025-11-27 06:54:09.556480544 +0000 UTC m=+87.748524462" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.559754 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3db31353-0fe7-4f7f-8b68-187fdec1bec3-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-z9qfd\" (UID: \"3db31353-0fe7-4f7f-8b68-187fdec1bec3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-z9qfd" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.568180 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3db31353-0fe7-4f7f-8b68-187fdec1bec3-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-z9qfd\" (UID: \"3db31353-0fe7-4f7f-8b68-187fdec1bec3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-z9qfd" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.613799 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podStartSLOduration=67.613770926 podStartE2EDuration="1m7.613770926s" podCreationTimestamp="2025-11-27 06:53:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:54:09.611086989 +0000 UTC m=+87.803130937" watchObservedRunningTime="2025-11-27 06:54:09.613770926 +0000 UTC m=+87.805814874" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.630184 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=67.630157386 podStartE2EDuration="1m7.630157386s" podCreationTimestamp="2025-11-27 06:53:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:54:09.629038004 +0000 UTC m=+87.821081922" watchObservedRunningTime="2025-11-27 06:54:09.630157386 +0000 UTC m=+87.822201314" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.664970 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=66.664948794 podStartE2EDuration="1m6.664948794s" podCreationTimestamp="2025-11-27 06:53:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:54:09.647250446 +0000 UTC m=+87.839294384" watchObservedRunningTime="2025-11-27 06:54:09.664948794 +0000 UTC m=+87.856992712" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.683825 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-lr9p6" podStartSLOduration=67.683808975 podStartE2EDuration="1m7.683808975s" podCreationTimestamp="2025-11-27 06:53:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:54:09.682893008 +0000 UTC m=+87.874936936" watchObservedRunningTime="2025-11-27 06:54:09.683808975 +0000 UTC m=+87.875852893" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.707050 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-ckzrr" podStartSLOduration=67.70703118 podStartE2EDuration="1m7.70703118s" podCreationTimestamp="2025-11-27 06:53:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:54:09.695340765 +0000 UTC m=+87.887384683" watchObservedRunningTime="2025-11-27 06:54:09.70703118 +0000 UTC m=+87.899075098" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.726522 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-z9qfd" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.750248 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-srcg7" podStartSLOduration=67.750231119 podStartE2EDuration="1m7.750231119s" podCreationTimestamp="2025-11-27 06:53:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:54:09.749505978 +0000 UTC m=+87.941549896" watchObservedRunningTime="2025-11-27 06:54:09.750231119 +0000 UTC m=+87.942275037" Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.998987 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-z9qfd" event={"ID":"3db31353-0fe7-4f7f-8b68-187fdec1bec3","Type":"ContainerStarted","Data":"c88b4aa4cb196bc0a331ecf87b8b8226444094a12ab206c0a637f05ab5557770"} Nov 27 06:54:09 crc kubenswrapper[4971]: I1127 06:54:09.999383 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-z9qfd" event={"ID":"3db31353-0fe7-4f7f-8b68-187fdec1bec3","Type":"ContainerStarted","Data":"989a9befcc9e32d3c10e0da49636eb7a8522b8568dfa665de09036ab7eba4096"} Nov 27 06:54:10 crc kubenswrapper[4971]: I1127 06:54:10.013903 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-z9qfd" podStartSLOduration=68.013887449 podStartE2EDuration="1m8.013887449s" podCreationTimestamp="2025-11-27 06:53:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:54:10.013504578 +0000 UTC m=+88.205548536" watchObservedRunningTime="2025-11-27 06:54:10.013887449 +0000 UTC m=+88.205931367" Nov 27 06:54:10 crc kubenswrapper[4971]: I1127 06:54:10.014564 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-w2fnl" podStartSLOduration=68.014557368 podStartE2EDuration="1m8.014557368s" podCreationTimestamp="2025-11-27 06:53:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:54:09.778135209 +0000 UTC m=+87.970179167" watchObservedRunningTime="2025-11-27 06:54:10.014557368 +0000 UTC m=+88.206601296" Nov 27 06:54:10 crc kubenswrapper[4971]: I1127 06:54:10.549572 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:54:10 crc kubenswrapper[4971]: I1127 06:54:10.549675 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:54:10 crc kubenswrapper[4971]: I1127 06:54:10.549821 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:54:10 crc kubenswrapper[4971]: E1127 06:54:10.549963 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:54:10 crc kubenswrapper[4971]: I1127 06:54:10.549991 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:54:10 crc kubenswrapper[4971]: E1127 06:54:10.550120 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:54:10 crc kubenswrapper[4971]: E1127 06:54:10.550256 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:54:10 crc kubenswrapper[4971]: E1127 06:54:10.550317 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:54:12 crc kubenswrapper[4971]: I1127 06:54:12.552555 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:54:12 crc kubenswrapper[4971]: E1127 06:54:12.552661 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:54:12 crc kubenswrapper[4971]: I1127 06:54:12.553249 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:54:12 crc kubenswrapper[4971]: E1127 06:54:12.553969 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:54:12 crc kubenswrapper[4971]: I1127 06:54:12.554114 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:54:12 crc kubenswrapper[4971]: I1127 06:54:12.554149 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:54:12 crc kubenswrapper[4971]: E1127 06:54:12.554198 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:54:12 crc kubenswrapper[4971]: E1127 06:54:12.554361 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:54:14 crc kubenswrapper[4971]: I1127 06:54:14.549601 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:54:14 crc kubenswrapper[4971]: I1127 06:54:14.549662 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:54:14 crc kubenswrapper[4971]: E1127 06:54:14.549735 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:54:14 crc kubenswrapper[4971]: I1127 06:54:14.549656 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:54:14 crc kubenswrapper[4971]: E1127 06:54:14.549807 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:54:14 crc kubenswrapper[4971]: E1127 06:54:14.549881 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:54:14 crc kubenswrapper[4971]: I1127 06:54:14.549906 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:54:14 crc kubenswrapper[4971]: E1127 06:54:14.549982 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:54:16 crc kubenswrapper[4971]: I1127 06:54:16.549259 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:54:16 crc kubenswrapper[4971]: I1127 06:54:16.549360 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:54:16 crc kubenswrapper[4971]: I1127 06:54:16.549480 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:54:16 crc kubenswrapper[4971]: E1127 06:54:16.549574 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:54:16 crc kubenswrapper[4971]: I1127 06:54:16.549696 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:54:16 crc kubenswrapper[4971]: E1127 06:54:16.549841 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:54:16 crc kubenswrapper[4971]: E1127 06:54:16.550054 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:54:16 crc kubenswrapper[4971]: E1127 06:54:16.550292 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:54:18 crc kubenswrapper[4971]: I1127 06:54:18.549255 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:54:18 crc kubenswrapper[4971]: I1127 06:54:18.549332 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:54:18 crc kubenswrapper[4971]: I1127 06:54:18.549360 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:54:18 crc kubenswrapper[4971]: E1127 06:54:18.549438 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:54:18 crc kubenswrapper[4971]: I1127 06:54:18.549487 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:54:18 crc kubenswrapper[4971]: E1127 06:54:18.549591 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:54:18 crc kubenswrapper[4971]: E1127 06:54:18.549696 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:54:18 crc kubenswrapper[4971]: E1127 06:54:18.549757 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:54:20 crc kubenswrapper[4971]: I1127 06:54:20.549499 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:54:20 crc kubenswrapper[4971]: E1127 06:54:20.549655 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:54:20 crc kubenswrapper[4971]: I1127 06:54:20.549899 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:54:20 crc kubenswrapper[4971]: E1127 06:54:20.549993 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:54:20 crc kubenswrapper[4971]: I1127 06:54:20.550123 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:54:20 crc kubenswrapper[4971]: E1127 06:54:20.550176 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:54:20 crc kubenswrapper[4971]: I1127 06:54:20.550393 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:54:20 crc kubenswrapper[4971]: E1127 06:54:20.550460 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:54:20 crc kubenswrapper[4971]: I1127 06:54:20.778464 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e710c835-f600-448f-a110-4ff4cef9d5f6-metrics-certs\") pod \"network-metrics-daemon-pdn5j\" (UID: \"e710c835-f600-448f-a110-4ff4cef9d5f6\") " pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:54:20 crc kubenswrapper[4971]: E1127 06:54:20.778734 4971 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 27 06:54:20 crc kubenswrapper[4971]: E1127 06:54:20.778877 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e710c835-f600-448f-a110-4ff4cef9d5f6-metrics-certs podName:e710c835-f600-448f-a110-4ff4cef9d5f6 nodeName:}" failed. No retries permitted until 2025-11-27 06:55:24.77884229 +0000 UTC m=+162.970886408 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e710c835-f600-448f-a110-4ff4cef9d5f6-metrics-certs") pod "network-metrics-daemon-pdn5j" (UID: "e710c835-f600-448f-a110-4ff4cef9d5f6") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 27 06:54:22 crc kubenswrapper[4971]: I1127 06:54:22.549867 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:54:22 crc kubenswrapper[4971]: I1127 06:54:22.549915 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:54:22 crc kubenswrapper[4971]: I1127 06:54:22.551176 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:54:22 crc kubenswrapper[4971]: E1127 06:54:22.551291 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:54:22 crc kubenswrapper[4971]: I1127 06:54:22.551372 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:54:22 crc kubenswrapper[4971]: E1127 06:54:22.551633 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:54:22 crc kubenswrapper[4971]: E1127 06:54:22.551859 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:54:22 crc kubenswrapper[4971]: E1127 06:54:22.552010 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:54:23 crc kubenswrapper[4971]: I1127 06:54:23.550605 4971 scope.go:117] "RemoveContainer" containerID="91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85" Nov 27 06:54:23 crc kubenswrapper[4971]: E1127 06:54:23.550792 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-56nwb_openshift-ovn-kubernetes(47c0abbf-6e9f-4bca-b3ca-bd896be15f2c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" Nov 27 06:54:24 crc kubenswrapper[4971]: I1127 06:54:24.550185 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:54:24 crc kubenswrapper[4971]: I1127 06:54:24.550350 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:54:24 crc kubenswrapper[4971]: E1127 06:54:24.550500 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:54:24 crc kubenswrapper[4971]: I1127 06:54:24.550653 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:54:24 crc kubenswrapper[4971]: I1127 06:54:24.550695 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:54:24 crc kubenswrapper[4971]: E1127 06:54:24.550745 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:54:24 crc kubenswrapper[4971]: E1127 06:54:24.550807 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:54:24 crc kubenswrapper[4971]: E1127 06:54:24.551350 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:54:26 crc kubenswrapper[4971]: I1127 06:54:26.550113 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:54:26 crc kubenswrapper[4971]: I1127 06:54:26.550200 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:54:26 crc kubenswrapper[4971]: E1127 06:54:26.550312 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:54:26 crc kubenswrapper[4971]: I1127 06:54:26.550338 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:54:26 crc kubenswrapper[4971]: I1127 06:54:26.550370 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:54:26 crc kubenswrapper[4971]: E1127 06:54:26.550568 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:54:26 crc kubenswrapper[4971]: E1127 06:54:26.550726 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:54:26 crc kubenswrapper[4971]: E1127 06:54:26.550859 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:54:28 crc kubenswrapper[4971]: I1127 06:54:28.549411 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:54:28 crc kubenswrapper[4971]: I1127 06:54:28.549472 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:54:28 crc kubenswrapper[4971]: I1127 06:54:28.549406 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:54:28 crc kubenswrapper[4971]: I1127 06:54:28.549490 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:54:28 crc kubenswrapper[4971]: E1127 06:54:28.549623 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:54:28 crc kubenswrapper[4971]: E1127 06:54:28.549738 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:54:28 crc kubenswrapper[4971]: E1127 06:54:28.549930 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:54:28 crc kubenswrapper[4971]: E1127 06:54:28.549973 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:54:30 crc kubenswrapper[4971]: I1127 06:54:30.550264 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:54:30 crc kubenswrapper[4971]: I1127 06:54:30.550396 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:54:30 crc kubenswrapper[4971]: I1127 06:54:30.551356 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:54:30 crc kubenswrapper[4971]: I1127 06:54:30.551587 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:54:30 crc kubenswrapper[4971]: E1127 06:54:30.551845 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:54:30 crc kubenswrapper[4971]: E1127 06:54:30.551649 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:54:30 crc kubenswrapper[4971]: E1127 06:54:30.551596 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:54:30 crc kubenswrapper[4971]: E1127 06:54:30.551957 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:54:32 crc kubenswrapper[4971]: I1127 06:54:32.549856 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:54:32 crc kubenswrapper[4971]: I1127 06:54:32.550076 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:54:32 crc kubenswrapper[4971]: I1127 06:54:32.550125 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:54:32 crc kubenswrapper[4971]: I1127 06:54:32.550167 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:54:32 crc kubenswrapper[4971]: E1127 06:54:32.552045 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:54:32 crc kubenswrapper[4971]: E1127 06:54:32.552112 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:54:32 crc kubenswrapper[4971]: E1127 06:54:32.552200 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:54:32 crc kubenswrapper[4971]: E1127 06:54:32.552394 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:54:34 crc kubenswrapper[4971]: I1127 06:54:34.550244 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:54:34 crc kubenswrapper[4971]: I1127 06:54:34.550248 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:54:34 crc kubenswrapper[4971]: I1127 06:54:34.550316 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:54:34 crc kubenswrapper[4971]: I1127 06:54:34.550718 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:54:34 crc kubenswrapper[4971]: E1127 06:54:34.550802 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:54:34 crc kubenswrapper[4971]: E1127 06:54:34.550975 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:54:34 crc kubenswrapper[4971]: I1127 06:54:34.551021 4971 scope.go:117] "RemoveContainer" containerID="91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85" Nov 27 06:54:34 crc kubenswrapper[4971]: E1127 06:54:34.551035 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:54:34 crc kubenswrapper[4971]: E1127 06:54:34.551091 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:54:34 crc kubenswrapper[4971]: E1127 06:54:34.551178 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-56nwb_openshift-ovn-kubernetes(47c0abbf-6e9f-4bca-b3ca-bd896be15f2c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" Nov 27 06:54:36 crc kubenswrapper[4971]: I1127 06:54:36.549858 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:54:36 crc kubenswrapper[4971]: I1127 06:54:36.549960 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:54:36 crc kubenswrapper[4971]: E1127 06:54:36.549995 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:54:36 crc kubenswrapper[4971]: E1127 06:54:36.550177 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:54:36 crc kubenswrapper[4971]: I1127 06:54:36.550308 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:54:36 crc kubenswrapper[4971]: E1127 06:54:36.550366 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:54:36 crc kubenswrapper[4971]: I1127 06:54:36.550671 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:54:36 crc kubenswrapper[4971]: E1127 06:54:36.550737 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:54:38 crc kubenswrapper[4971]: I1127 06:54:38.550244 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:54:38 crc kubenswrapper[4971]: I1127 06:54:38.550295 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:54:38 crc kubenswrapper[4971]: E1127 06:54:38.550407 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:54:38 crc kubenswrapper[4971]: I1127 06:54:38.550458 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:54:38 crc kubenswrapper[4971]: I1127 06:54:38.550568 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:54:38 crc kubenswrapper[4971]: E1127 06:54:38.550650 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:54:38 crc kubenswrapper[4971]: E1127 06:54:38.550592 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:54:38 crc kubenswrapper[4971]: E1127 06:54:38.550832 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:54:39 crc kubenswrapper[4971]: I1127 06:54:39.125288 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lr9p6_a2136014-aa8f-48e5-bccf-64cdd3cbc5f9/kube-multus/1.log" Nov 27 06:54:39 crc kubenswrapper[4971]: I1127 06:54:39.126571 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lr9p6_a2136014-aa8f-48e5-bccf-64cdd3cbc5f9/kube-multus/0.log" Nov 27 06:54:39 crc kubenswrapper[4971]: I1127 06:54:39.126642 4971 generic.go:334] "Generic (PLEG): container finished" podID="a2136014-aa8f-48e5-bccf-64cdd3cbc5f9" containerID="a2536efee5c523594e02d64a0d1113af4945a84c32d06c78f840ef292143578e" exitCode=1 Nov 27 06:54:39 crc kubenswrapper[4971]: I1127 06:54:39.126694 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-lr9p6" event={"ID":"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9","Type":"ContainerDied","Data":"a2536efee5c523594e02d64a0d1113af4945a84c32d06c78f840ef292143578e"} Nov 27 06:54:39 crc kubenswrapper[4971]: I1127 06:54:39.126748 4971 scope.go:117] "RemoveContainer" containerID="5609ab7b327e7ec603c40b9b27f4ee52b4b49e647d0c6e9db5d360bf686c493c" Nov 27 06:54:39 crc kubenswrapper[4971]: I1127 06:54:39.127582 4971 scope.go:117] "RemoveContainer" containerID="a2536efee5c523594e02d64a0d1113af4945a84c32d06c78f840ef292143578e" Nov 27 06:54:39 crc kubenswrapper[4971]: E1127 06:54:39.127890 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-lr9p6_openshift-multus(a2136014-aa8f-48e5-bccf-64cdd3cbc5f9)\"" pod="openshift-multus/multus-lr9p6" podUID="a2136014-aa8f-48e5-bccf-64cdd3cbc5f9" Nov 27 06:54:40 crc kubenswrapper[4971]: I1127 06:54:40.134241 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lr9p6_a2136014-aa8f-48e5-bccf-64cdd3cbc5f9/kube-multus/1.log" Nov 27 06:54:40 crc kubenswrapper[4971]: I1127 06:54:40.549607 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:54:40 crc kubenswrapper[4971]: I1127 06:54:40.549650 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:54:40 crc kubenswrapper[4971]: I1127 06:54:40.549706 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:54:40 crc kubenswrapper[4971]: E1127 06:54:40.549745 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:54:40 crc kubenswrapper[4971]: I1127 06:54:40.549783 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:54:40 crc kubenswrapper[4971]: E1127 06:54:40.550110 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:54:40 crc kubenswrapper[4971]: E1127 06:54:40.550173 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:54:40 crc kubenswrapper[4971]: E1127 06:54:40.550137 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:54:42 crc kubenswrapper[4971]: E1127 06:54:42.499091 4971 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Nov 27 06:54:42 crc kubenswrapper[4971]: I1127 06:54:42.550002 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:54:42 crc kubenswrapper[4971]: I1127 06:54:42.550064 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:54:42 crc kubenswrapper[4971]: I1127 06:54:42.550004 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:54:42 crc kubenswrapper[4971]: E1127 06:54:42.550906 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:54:42 crc kubenswrapper[4971]: I1127 06:54:42.550937 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:54:42 crc kubenswrapper[4971]: E1127 06:54:42.551005 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:54:42 crc kubenswrapper[4971]: E1127 06:54:42.551121 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:54:42 crc kubenswrapper[4971]: E1127 06:54:42.551258 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:54:42 crc kubenswrapper[4971]: E1127 06:54:42.656654 4971 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 27 06:54:44 crc kubenswrapper[4971]: I1127 06:54:44.549953 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:54:44 crc kubenswrapper[4971]: I1127 06:54:44.549998 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:54:44 crc kubenswrapper[4971]: I1127 06:54:44.550019 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:54:44 crc kubenswrapper[4971]: I1127 06:54:44.550164 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:54:44 crc kubenswrapper[4971]: E1127 06:54:44.550343 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:54:44 crc kubenswrapper[4971]: E1127 06:54:44.550430 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:54:44 crc kubenswrapper[4971]: E1127 06:54:44.550552 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:54:44 crc kubenswrapper[4971]: E1127 06:54:44.551356 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:54:46 crc kubenswrapper[4971]: I1127 06:54:46.549310 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:54:46 crc kubenswrapper[4971]: I1127 06:54:46.549411 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:54:46 crc kubenswrapper[4971]: I1127 06:54:46.549417 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:54:46 crc kubenswrapper[4971]: I1127 06:54:46.549348 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:54:46 crc kubenswrapper[4971]: E1127 06:54:46.549592 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:54:46 crc kubenswrapper[4971]: E1127 06:54:46.549880 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:54:46 crc kubenswrapper[4971]: E1127 06:54:46.550077 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:54:46 crc kubenswrapper[4971]: E1127 06:54:46.550266 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:54:47 crc kubenswrapper[4971]: E1127 06:54:47.658391 4971 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 27 06:54:48 crc kubenswrapper[4971]: I1127 06:54:48.549782 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:54:48 crc kubenswrapper[4971]: I1127 06:54:48.549917 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:54:48 crc kubenswrapper[4971]: I1127 06:54:48.549990 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:54:48 crc kubenswrapper[4971]: I1127 06:54:48.549992 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:54:48 crc kubenswrapper[4971]: E1127 06:54:48.549938 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:54:48 crc kubenswrapper[4971]: E1127 06:54:48.550151 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:54:48 crc kubenswrapper[4971]: E1127 06:54:48.550241 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:54:48 crc kubenswrapper[4971]: E1127 06:54:48.550331 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:54:50 crc kubenswrapper[4971]: I1127 06:54:50.550361 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:54:50 crc kubenswrapper[4971]: I1127 06:54:50.550466 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:54:50 crc kubenswrapper[4971]: E1127 06:54:50.550515 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:54:50 crc kubenswrapper[4971]: I1127 06:54:50.550594 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:54:50 crc kubenswrapper[4971]: I1127 06:54:50.550594 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:54:50 crc kubenswrapper[4971]: E1127 06:54:50.550680 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:54:50 crc kubenswrapper[4971]: E1127 06:54:50.550887 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:54:50 crc kubenswrapper[4971]: I1127 06:54:50.551189 4971 scope.go:117] "RemoveContainer" containerID="a2536efee5c523594e02d64a0d1113af4945a84c32d06c78f840ef292143578e" Nov 27 06:54:50 crc kubenswrapper[4971]: E1127 06:54:50.551303 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:54:50 crc kubenswrapper[4971]: I1127 06:54:50.552374 4971 scope.go:117] "RemoveContainer" containerID="91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85" Nov 27 06:54:51 crc kubenswrapper[4971]: I1127 06:54:51.174440 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lr9p6_a2136014-aa8f-48e5-bccf-64cdd3cbc5f9/kube-multus/1.log" Nov 27 06:54:51 crc kubenswrapper[4971]: I1127 06:54:51.174801 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-lr9p6" event={"ID":"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9","Type":"ContainerStarted","Data":"54adff5ac26e8635c3ca81209e1d7f16afe8025ee101874e4e650bd5caa3120d"} Nov 27 06:54:51 crc kubenswrapper[4971]: I1127 06:54:51.177435 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-56nwb_47c0abbf-6e9f-4bca-b3ca-bd896be15f2c/ovnkube-controller/3.log" Nov 27 06:54:51 crc kubenswrapper[4971]: I1127 06:54:51.180656 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" event={"ID":"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c","Type":"ContainerStarted","Data":"312b81b8085de1c59a0a2fbd3fc064e8f3764519af180bb36f94404a0b7a2853"} Nov 27 06:54:51 crc kubenswrapper[4971]: I1127 06:54:51.181042 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:54:51 crc kubenswrapper[4971]: I1127 06:54:51.555839 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" podStartSLOduration=109.555822012 podStartE2EDuration="1m49.555822012s" podCreationTimestamp="2025-11-27 06:53:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:54:51.225864601 +0000 UTC m=+129.417908529" watchObservedRunningTime="2025-11-27 06:54:51.555822012 +0000 UTC m=+129.747865930" Nov 27 06:54:51 crc kubenswrapper[4971]: I1127 06:54:51.556037 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-pdn5j"] Nov 27 06:54:51 crc kubenswrapper[4971]: I1127 06:54:51.556108 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:54:51 crc kubenswrapper[4971]: E1127 06:54:51.556189 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:54:52 crc kubenswrapper[4971]: I1127 06:54:52.549861 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:54:52 crc kubenswrapper[4971]: I1127 06:54:52.549960 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:54:52 crc kubenswrapper[4971]: E1127 06:54:52.550799 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:54:52 crc kubenswrapper[4971]: I1127 06:54:52.550822 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:54:52 crc kubenswrapper[4971]: E1127 06:54:52.550974 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:54:52 crc kubenswrapper[4971]: E1127 06:54:52.551159 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:54:52 crc kubenswrapper[4971]: E1127 06:54:52.659116 4971 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 27 06:54:53 crc kubenswrapper[4971]: I1127 06:54:53.549353 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:54:53 crc kubenswrapper[4971]: E1127 06:54:53.550247 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:54:54 crc kubenswrapper[4971]: I1127 06:54:54.549581 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:54:54 crc kubenswrapper[4971]: I1127 06:54:54.549581 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:54:54 crc kubenswrapper[4971]: I1127 06:54:54.549719 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:54:54 crc kubenswrapper[4971]: E1127 06:54:54.549814 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:54:54 crc kubenswrapper[4971]: E1127 06:54:54.550140 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:54:54 crc kubenswrapper[4971]: E1127 06:54:54.550231 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:54:55 crc kubenswrapper[4971]: I1127 06:54:55.550032 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:54:55 crc kubenswrapper[4971]: E1127 06:54:55.550177 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:54:56 crc kubenswrapper[4971]: I1127 06:54:56.549896 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:54:56 crc kubenswrapper[4971]: I1127 06:54:56.549932 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:54:56 crc kubenswrapper[4971]: I1127 06:54:56.549946 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:54:56 crc kubenswrapper[4971]: E1127 06:54:56.550052 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 27 06:54:56 crc kubenswrapper[4971]: E1127 06:54:56.550130 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 27 06:54:56 crc kubenswrapper[4971]: E1127 06:54:56.550211 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 27 06:54:57 crc kubenswrapper[4971]: I1127 06:54:57.549417 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:54:57 crc kubenswrapper[4971]: E1127 06:54:57.549579 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pdn5j" podUID="e710c835-f600-448f-a110-4ff4cef9d5f6" Nov 27 06:54:58 crc kubenswrapper[4971]: I1127 06:54:58.549792 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:54:58 crc kubenswrapper[4971]: I1127 06:54:58.549808 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:54:58 crc kubenswrapper[4971]: I1127 06:54:58.550055 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:54:58 crc kubenswrapper[4971]: I1127 06:54:58.551797 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Nov 27 06:54:58 crc kubenswrapper[4971]: I1127 06:54:58.551824 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Nov 27 06:54:58 crc kubenswrapper[4971]: I1127 06:54:58.551918 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Nov 27 06:54:58 crc kubenswrapper[4971]: I1127 06:54:58.554070 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Nov 27 06:54:59 crc kubenswrapper[4971]: I1127 06:54:59.549795 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:54:59 crc kubenswrapper[4971]: I1127 06:54:59.551478 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Nov 27 06:54:59 crc kubenswrapper[4971]: I1127 06:54:59.551664 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.687190 4971 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.735619 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7hzzs"] Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.735957 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7hzzs" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.739341 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-jxflz"] Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.740023 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-jxflz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.741073 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.741339 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.741770 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.742004 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.742754 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-9d7hw"] Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.743839 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-mcsj6"] Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.744769 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mcsj6" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.745592 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-9d7hw" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.746811 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-29jwb"] Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.747475 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-29jwb" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.783773 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.784997 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.798855 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.798927 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-xqhhz"] Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.799750 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.799975 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-xqhhz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.800101 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.800464 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.800826 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.800982 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.801266 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.801466 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.801761 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.801822 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.802234 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.802407 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.802501 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.802600 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.802657 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.802770 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.802846 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.807054 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.807307 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.807662 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.816160 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.818473 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.820909 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-b9pwk"] Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.822946 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-b9pwk" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.823994 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-j6c6b"] Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.824265 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6v87g\" (UniqueName: \"kubernetes.io/projected/fda2f244-1f2f-4a07-915c-586bd4136c02-kube-api-access-6v87g\") pod \"machine-api-operator-5694c8668f-9d7hw\" (UID: \"fda2f244-1f2f-4a07-915c-586bd4136c02\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9d7hw" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.824292 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0-audit\") pod \"apiserver-76f77b778f-jxflz\" (UID: \"b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0\") " pod="openshift-apiserver/apiserver-76f77b778f-jxflz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.824315 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xw4rt\" (UniqueName: \"kubernetes.io/projected/0316d962-5de2-4709-95e7-1679943f35fd-kube-api-access-xw4rt\") pod \"route-controller-manager-6576b87f9c-29jwb\" (UID: \"0316d962-5de2-4709-95e7-1679943f35fd\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-29jwb" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.824334 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/fda2f244-1f2f-4a07-915c-586bd4136c02-images\") pod \"machine-api-operator-5694c8668f-9d7hw\" (UID: \"fda2f244-1f2f-4a07-915c-586bd4136c02\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9d7hw" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.824351 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4130c823-650b-4f98-a3e8-e735f3615b03-serving-cert\") pod \"apiserver-7bbb656c7d-mcsj6\" (UID: \"4130c823-650b-4f98-a3e8-e735f3615b03\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mcsj6" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.824372 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0-audit-dir\") pod \"apiserver-76f77b778f-jxflz\" (UID: \"b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0\") " pod="openshift-apiserver/apiserver-76f77b778f-jxflz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.824392 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5844f6e-6fc6-460c-9116-217112e3e28d-config\") pod \"authentication-operator-69f744f599-xqhhz\" (UID: \"f5844f6e-6fc6-460c-9116-217112e3e28d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xqhhz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.824424 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0316d962-5de2-4709-95e7-1679943f35fd-config\") pod \"route-controller-manager-6576b87f9c-29jwb\" (UID: \"0316d962-5de2-4709-95e7-1679943f35fd\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-29jwb" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.824441 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/4130c823-650b-4f98-a3e8-e735f3615b03-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-mcsj6\" (UID: \"4130c823-650b-4f98-a3e8-e735f3615b03\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mcsj6" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.824459 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f5844f6e-6fc6-460c-9116-217112e3e28d-service-ca-bundle\") pod \"authentication-operator-69f744f599-xqhhz\" (UID: \"f5844f6e-6fc6-460c-9116-217112e3e28d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xqhhz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.824497 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4130c823-650b-4f98-a3e8-e735f3615b03-audit-dir\") pod \"apiserver-7bbb656c7d-mcsj6\" (UID: \"4130c823-650b-4f98-a3e8-e735f3615b03\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mcsj6" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.824513 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0-encryption-config\") pod \"apiserver-76f77b778f-jxflz\" (UID: \"b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0\") " pod="openshift-apiserver/apiserver-76f77b778f-jxflz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.824549 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vqtn\" (UniqueName: \"kubernetes.io/projected/4130c823-650b-4f98-a3e8-e735f3615b03-kube-api-access-4vqtn\") pod \"apiserver-7bbb656c7d-mcsj6\" (UID: \"4130c823-650b-4f98-a3e8-e735f3615b03\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mcsj6" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.824567 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/fda2f244-1f2f-4a07-915c-586bd4136c02-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-9d7hw\" (UID: \"fda2f244-1f2f-4a07-915c-586bd4136c02\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9d7hw" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.824597 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4130c823-650b-4f98-a3e8-e735f3615b03-audit-policies\") pod \"apiserver-7bbb656c7d-mcsj6\" (UID: \"4130c823-650b-4f98-a3e8-e735f3615b03\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mcsj6" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.824614 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/4130c823-650b-4f98-a3e8-e735f3615b03-encryption-config\") pod \"apiserver-7bbb656c7d-mcsj6\" (UID: \"4130c823-650b-4f98-a3e8-e735f3615b03\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mcsj6" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.824634 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f5844f6e-6fc6-460c-9116-217112e3e28d-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-xqhhz\" (UID: \"f5844f6e-6fc6-460c-9116-217112e3e28d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xqhhz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.824661 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78734b35-61db-46e9-b16b-1f03258f9fcb-config\") pod \"controller-manager-879f6c89f-7hzzs\" (UID: \"78734b35-61db-46e9-b16b-1f03258f9fcb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7hzzs" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.824678 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/78734b35-61db-46e9-b16b-1f03258f9fcb-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-7hzzs\" (UID: \"78734b35-61db-46e9-b16b-1f03258f9fcb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7hzzs" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.824698 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ts5r\" (UniqueName: \"kubernetes.io/projected/b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0-kube-api-access-5ts5r\") pod \"apiserver-76f77b778f-jxflz\" (UID: \"b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0\") " pod="openshift-apiserver/apiserver-76f77b778f-jxflz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.824716 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2djsr\" (UniqueName: \"kubernetes.io/projected/78734b35-61db-46e9-b16b-1f03258f9fcb-kube-api-access-2djsr\") pod \"controller-manager-879f6c89f-7hzzs\" (UID: \"78734b35-61db-46e9-b16b-1f03258f9fcb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7hzzs" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.824734 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4130c823-650b-4f98-a3e8-e735f3615b03-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-mcsj6\" (UID: \"4130c823-650b-4f98-a3e8-e735f3615b03\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mcsj6" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.824754 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0-etcd-serving-ca\") pod \"apiserver-76f77b778f-jxflz\" (UID: \"b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0\") " pod="openshift-apiserver/apiserver-76f77b778f-jxflz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.824771 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/4130c823-650b-4f98-a3e8-e735f3615b03-etcd-client\") pod \"apiserver-7bbb656c7d-mcsj6\" (UID: \"4130c823-650b-4f98-a3e8-e735f3615b03\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mcsj6" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.824790 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vnjks\" (UniqueName: \"kubernetes.io/projected/f5844f6e-6fc6-460c-9116-217112e3e28d-kube-api-access-vnjks\") pod \"authentication-operator-69f744f599-xqhhz\" (UID: \"f5844f6e-6fc6-460c-9116-217112e3e28d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xqhhz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.824806 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0-etcd-client\") pod \"apiserver-76f77b778f-jxflz\" (UID: \"b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0\") " pod="openshift-apiserver/apiserver-76f77b778f-jxflz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.824829 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0-node-pullsecrets\") pod \"apiserver-76f77b778f-jxflz\" (UID: \"b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0\") " pod="openshift-apiserver/apiserver-76f77b778f-jxflz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.824845 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0-trusted-ca-bundle\") pod \"apiserver-76f77b778f-jxflz\" (UID: \"b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0\") " pod="openshift-apiserver/apiserver-76f77b778f-jxflz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.824852 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-j6c6b" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.824861 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/78734b35-61db-46e9-b16b-1f03258f9fcb-serving-cert\") pod \"controller-manager-879f6c89f-7hzzs\" (UID: \"78734b35-61db-46e9-b16b-1f03258f9fcb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7hzzs" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.824896 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fda2f244-1f2f-4a07-915c-586bd4136c02-config\") pod \"machine-api-operator-5694c8668f-9d7hw\" (UID: \"fda2f244-1f2f-4a07-915c-586bd4136c02\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9d7hw" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.824918 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0-config\") pod \"apiserver-76f77b778f-jxflz\" (UID: \"b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0\") " pod="openshift-apiserver/apiserver-76f77b778f-jxflz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.824940 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f5844f6e-6fc6-460c-9116-217112e3e28d-serving-cert\") pod \"authentication-operator-69f744f599-xqhhz\" (UID: \"f5844f6e-6fc6-460c-9116-217112e3e28d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xqhhz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.824963 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0-image-import-ca\") pod \"apiserver-76f77b778f-jxflz\" (UID: \"b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0\") " pod="openshift-apiserver/apiserver-76f77b778f-jxflz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.824983 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/78734b35-61db-46e9-b16b-1f03258f9fcb-client-ca\") pod \"controller-manager-879f6c89f-7hzzs\" (UID: \"78734b35-61db-46e9-b16b-1f03258f9fcb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7hzzs" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.825000 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0316d962-5de2-4709-95e7-1679943f35fd-client-ca\") pod \"route-controller-manager-6576b87f9c-29jwb\" (UID: \"0316d962-5de2-4709-95e7-1679943f35fd\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-29jwb" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.825017 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0-serving-cert\") pod \"apiserver-76f77b778f-jxflz\" (UID: \"b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0\") " pod="openshift-apiserver/apiserver-76f77b778f-jxflz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.825034 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0316d962-5de2-4709-95e7-1679943f35fd-serving-cert\") pod \"route-controller-manager-6576b87f9c-29jwb\" (UID: \"0316d962-5de2-4709-95e7-1679943f35fd\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-29jwb" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.825072 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-blqcq"] Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.825525 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-blqcq" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.826763 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-nh6pl"] Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.827352 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-nh6pl" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.864207 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.864229 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.864338 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.866544 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.867281 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.867415 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.867568 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.867872 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.868270 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.868518 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.868640 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jhjgn"] Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.868804 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.869006 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8thn4"] Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.869029 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jhjgn" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.869168 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.869489 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.869758 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8thn4" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.870580 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.876093 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-75prl"] Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.877931 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-75prl" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.881184 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mgswn"] Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.881814 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mgswn" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.887475 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-8hv78"] Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.888045 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8hv78" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.892651 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hjgth"] Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.893422 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hjgth" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.893917 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.894511 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.894614 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.894851 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.894947 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.895066 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.895189 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.895308 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.895522 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.896053 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.896487 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-497pk"] Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.896596 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.896687 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.896796 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.896995 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-497pk" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.897059 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.897348 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.897475 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.894570 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.897737 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.898413 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.898877 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.901837 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-s6mff"] Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.905751 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-rn8q9"] Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.905902 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-s6mff" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.906461 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-jdbbc"] Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.906644 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.908833 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-p5wn9"] Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.918984 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.919827 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.925822 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbk9w"] Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.929006 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jdbbc" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.947086 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.947122 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7hzzs"] Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.947412 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.947572 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.947696 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.947916 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.948054 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.948077 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.948186 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.948236 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.948323 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.948453 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.948568 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.948762 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.948895 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.948983 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949057 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949254 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949308 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0-serving-cert\") pod \"apiserver-76f77b778f-jxflz\" (UID: \"b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0\") " pod="openshift-apiserver/apiserver-76f77b778f-jxflz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949327 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0316d962-5de2-4709-95e7-1679943f35fd-serving-cert\") pod \"route-controller-manager-6576b87f9c-29jwb\" (UID: \"0316d962-5de2-4709-95e7-1679943f35fd\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-29jwb" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949347 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6v87g\" (UniqueName: \"kubernetes.io/projected/fda2f244-1f2f-4a07-915c-586bd4136c02-kube-api-access-6v87g\") pod \"machine-api-operator-5694c8668f-9d7hw\" (UID: \"fda2f244-1f2f-4a07-915c-586bd4136c02\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9d7hw" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949364 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0-audit\") pod \"apiserver-76f77b778f-jxflz\" (UID: \"b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0\") " pod="openshift-apiserver/apiserver-76f77b778f-jxflz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949382 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xw4rt\" (UniqueName: \"kubernetes.io/projected/0316d962-5de2-4709-95e7-1679943f35fd-kube-api-access-xw4rt\") pod \"route-controller-manager-6576b87f9c-29jwb\" (UID: \"0316d962-5de2-4709-95e7-1679943f35fd\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-29jwb" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949391 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949397 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/fda2f244-1f2f-4a07-915c-586bd4136c02-images\") pod \"machine-api-operator-5694c8668f-9d7hw\" (UID: \"fda2f244-1f2f-4a07-915c-586bd4136c02\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9d7hw" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949416 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4130c823-650b-4f98-a3e8-e735f3615b03-serving-cert\") pod \"apiserver-7bbb656c7d-mcsj6\" (UID: \"4130c823-650b-4f98-a3e8-e735f3615b03\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mcsj6" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949437 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b2038ca5-beb4-434e-81a7-16a67ad9382d-console-config\") pod \"console-f9d7485db-blqcq\" (UID: \"b2038ca5-beb4-434e-81a7-16a67ad9382d\") " pod="openshift-console/console-f9d7485db-blqcq" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949455 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0-audit-dir\") pod \"apiserver-76f77b778f-jxflz\" (UID: \"b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0\") " pod="openshift-apiserver/apiserver-76f77b778f-jxflz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949469 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5844f6e-6fc6-460c-9116-217112e3e28d-config\") pod \"authentication-operator-69f744f599-xqhhz\" (UID: \"f5844f6e-6fc6-460c-9116-217112e3e28d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xqhhz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949486 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3e262487-0943-410e-bbcb-11fc42a8ac60-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-jhjgn\" (UID: \"3e262487-0943-410e-bbcb-11fc42a8ac60\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jhjgn" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949503 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0316d962-5de2-4709-95e7-1679943f35fd-config\") pod \"route-controller-manager-6576b87f9c-29jwb\" (UID: \"0316d962-5de2-4709-95e7-1679943f35fd\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-29jwb" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949518 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/4130c823-650b-4f98-a3e8-e735f3615b03-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-mcsj6\" (UID: \"4130c823-650b-4f98-a3e8-e735f3615b03\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mcsj6" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949548 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f5844f6e-6fc6-460c-9116-217112e3e28d-service-ca-bundle\") pod \"authentication-operator-69f744f599-xqhhz\" (UID: \"f5844f6e-6fc6-460c-9116-217112e3e28d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xqhhz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949579 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/2944ff34-c55c-40fa-aacf-f679dd704ad8-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-hjgth\" (UID: \"2944ff34-c55c-40fa-aacf-f679dd704ad8\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hjgth" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949597 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4130c823-650b-4f98-a3e8-e735f3615b03-audit-dir\") pod \"apiserver-7bbb656c7d-mcsj6\" (UID: \"4130c823-650b-4f98-a3e8-e735f3615b03\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mcsj6" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949614 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0-encryption-config\") pod \"apiserver-76f77b778f-jxflz\" (UID: \"b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0\") " pod="openshift-apiserver/apiserver-76f77b778f-jxflz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949623 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949629 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vqtn\" (UniqueName: \"kubernetes.io/projected/4130c823-650b-4f98-a3e8-e735f3615b03-kube-api-access-4vqtn\") pod \"apiserver-7bbb656c7d-mcsj6\" (UID: \"4130c823-650b-4f98-a3e8-e735f3615b03\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mcsj6" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949648 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/fda2f244-1f2f-4a07-915c-586bd4136c02-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-9d7hw\" (UID: \"fda2f244-1f2f-4a07-915c-586bd4136c02\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9d7hw" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949676 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4130c823-650b-4f98-a3e8-e735f3615b03-audit-policies\") pod \"apiserver-7bbb656c7d-mcsj6\" (UID: \"4130c823-650b-4f98-a3e8-e735f3615b03\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mcsj6" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949698 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/4130c823-650b-4f98-a3e8-e735f3615b03-encryption-config\") pod \"apiserver-7bbb656c7d-mcsj6\" (UID: \"4130c823-650b-4f98-a3e8-e735f3615b03\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mcsj6" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949717 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f5844f6e-6fc6-460c-9116-217112e3e28d-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-xqhhz\" (UID: \"f5844f6e-6fc6-460c-9116-217112e3e28d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xqhhz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949740 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7shnv\" (UniqueName: \"kubernetes.io/projected/3e262487-0943-410e-bbcb-11fc42a8ac60-kube-api-access-7shnv\") pod \"cluster-image-registry-operator-dc59b4c8b-jhjgn\" (UID: \"3e262487-0943-410e-bbcb-11fc42a8ac60\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jhjgn" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949766 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78734b35-61db-46e9-b16b-1f03258f9fcb-config\") pod \"controller-manager-879f6c89f-7hzzs\" (UID: \"78734b35-61db-46e9-b16b-1f03258f9fcb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7hzzs" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949782 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/78734b35-61db-46e9-b16b-1f03258f9fcb-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-7hzzs\" (UID: \"78734b35-61db-46e9-b16b-1f03258f9fcb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7hzzs" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949795 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949799 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tsjhd\" (UniqueName: \"kubernetes.io/projected/6dc237a7-62f6-4ee9-99f1-034d6cceb901-kube-api-access-tsjhd\") pod \"openshift-config-operator-7777fb866f-8hv78\" (UID: \"6dc237a7-62f6-4ee9-99f1-034d6cceb901\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8hv78" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949817 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/6dc237a7-62f6-4ee9-99f1-034d6cceb901-available-featuregates\") pod \"openshift-config-operator-7777fb866f-8hv78\" (UID: \"6dc237a7-62f6-4ee9-99f1-034d6cceb901\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8hv78" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949834 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ts5r\" (UniqueName: \"kubernetes.io/projected/b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0-kube-api-access-5ts5r\") pod \"apiserver-76f77b778f-jxflz\" (UID: \"b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0\") " pod="openshift-apiserver/apiserver-76f77b778f-jxflz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949851 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2djsr\" (UniqueName: \"kubernetes.io/projected/78734b35-61db-46e9-b16b-1f03258f9fcb-kube-api-access-2djsr\") pod \"controller-manager-879f6c89f-7hzzs\" (UID: \"78734b35-61db-46e9-b16b-1f03258f9fcb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7hzzs" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949867 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b2038ca5-beb4-434e-81a7-16a67ad9382d-oauth-serving-cert\") pod \"console-f9d7485db-blqcq\" (UID: \"b2038ca5-beb4-434e-81a7-16a67ad9382d\") " pod="openshift-console/console-f9d7485db-blqcq" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949891 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4130c823-650b-4f98-a3e8-e735f3615b03-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-mcsj6\" (UID: \"4130c823-650b-4f98-a3e8-e735f3615b03\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mcsj6" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949915 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6dc237a7-62f6-4ee9-99f1-034d6cceb901-serving-cert\") pod \"openshift-config-operator-7777fb866f-8hv78\" (UID: \"6dc237a7-62f6-4ee9-99f1-034d6cceb901\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8hv78" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949922 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949941 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/4130c823-650b-4f98-a3e8-e735f3615b03-etcd-client\") pod \"apiserver-7bbb656c7d-mcsj6\" (UID: \"4130c823-650b-4f98-a3e8-e735f3615b03\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mcsj6" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949958 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0-etcd-serving-ca\") pod \"apiserver-76f77b778f-jxflz\" (UID: \"b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0\") " pod="openshift-apiserver/apiserver-76f77b778f-jxflz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949975 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vnjks\" (UniqueName: \"kubernetes.io/projected/f5844f6e-6fc6-460c-9116-217112e3e28d-kube-api-access-vnjks\") pod \"authentication-operator-69f744f599-xqhhz\" (UID: \"f5844f6e-6fc6-460c-9116-217112e3e28d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xqhhz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949989 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4130c823-650b-4f98-a3e8-e735f3615b03-audit-dir\") pod \"apiserver-7bbb656c7d-mcsj6\" (UID: \"4130c823-650b-4f98-a3e8-e735f3615b03\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mcsj6" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.949994 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0-etcd-client\") pod \"apiserver-76f77b778f-jxflz\" (UID: \"b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0\") " pod="openshift-apiserver/apiserver-76f77b778f-jxflz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.950068 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b2038ca5-beb4-434e-81a7-16a67ad9382d-console-serving-cert\") pod \"console-f9d7485db-blqcq\" (UID: \"b2038ca5-beb4-434e-81a7-16a67ad9382d\") " pod="openshift-console/console-f9d7485db-blqcq" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.950101 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b2038ca5-beb4-434e-81a7-16a67ad9382d-console-oauth-config\") pod \"console-f9d7485db-blqcq\" (UID: \"b2038ca5-beb4-434e-81a7-16a67ad9382d\") " pod="openshift-console/console-f9d7485db-blqcq" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.950108 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.950140 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0-node-pullsecrets\") pod \"apiserver-76f77b778f-jxflz\" (UID: \"b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0\") " pod="openshift-apiserver/apiserver-76f77b778f-jxflz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.950166 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0-trusted-ca-bundle\") pod \"apiserver-76f77b778f-jxflz\" (UID: \"b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0\") " pod="openshift-apiserver/apiserver-76f77b778f-jxflz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.950190 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/78734b35-61db-46e9-b16b-1f03258f9fcb-serving-cert\") pod \"controller-manager-879f6c89f-7hzzs\" (UID: \"78734b35-61db-46e9-b16b-1f03258f9fcb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7hzzs" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.950209 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/3e262487-0943-410e-bbcb-11fc42a8ac60-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-jhjgn\" (UID: \"3e262487-0943-410e-bbcb-11fc42a8ac60\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jhjgn" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.950227 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3e262487-0943-410e-bbcb-11fc42a8ac60-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-jhjgn\" (UID: \"3e262487-0943-410e-bbcb-11fc42a8ac60\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jhjgn" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.950266 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vjcwc\" (UniqueName: \"kubernetes.io/projected/2944ff34-c55c-40fa-aacf-f679dd704ad8-kube-api-access-vjcwc\") pod \"cluster-samples-operator-665b6dd947-hjgth\" (UID: \"2944ff34-c55c-40fa-aacf-f679dd704ad8\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hjgth" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.950280 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.950288 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fda2f244-1f2f-4a07-915c-586bd4136c02-config\") pod \"machine-api-operator-5694c8668f-9d7hw\" (UID: \"fda2f244-1f2f-4a07-915c-586bd4136c02\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9d7hw" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.950308 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0-config\") pod \"apiserver-76f77b778f-jxflz\" (UID: \"b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0\") " pod="openshift-apiserver/apiserver-76f77b778f-jxflz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.950327 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f5844f6e-6fc6-460c-9116-217112e3e28d-serving-cert\") pod \"authentication-operator-69f744f599-xqhhz\" (UID: \"f5844f6e-6fc6-460c-9116-217112e3e28d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xqhhz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.950346 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b2038ca5-beb4-434e-81a7-16a67ad9382d-service-ca\") pod \"console-f9d7485db-blqcq\" (UID: \"b2038ca5-beb4-434e-81a7-16a67ad9382d\") " pod="openshift-console/console-f9d7485db-blqcq" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.950364 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75btm\" (UniqueName: \"kubernetes.io/projected/b2038ca5-beb4-434e-81a7-16a67ad9382d-kube-api-access-75btm\") pod \"console-f9d7485db-blqcq\" (UID: \"b2038ca5-beb4-434e-81a7-16a67ad9382d\") " pod="openshift-console/console-f9d7485db-blqcq" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.950398 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0-image-import-ca\") pod \"apiserver-76f77b778f-jxflz\" (UID: \"b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0\") " pod="openshift-apiserver/apiserver-76f77b778f-jxflz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.950419 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/78734b35-61db-46e9-b16b-1f03258f9fcb-client-ca\") pod \"controller-manager-879f6c89f-7hzzs\" (UID: \"78734b35-61db-46e9-b16b-1f03258f9fcb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7hzzs" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.950445 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0316d962-5de2-4709-95e7-1679943f35fd-client-ca\") pod \"route-controller-manager-6576b87f9c-29jwb\" (UID: \"0316d962-5de2-4709-95e7-1679943f35fd\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-29jwb" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.950455 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.950462 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b2038ca5-beb4-434e-81a7-16a67ad9382d-trusted-ca-bundle\") pod \"console-f9d7485db-blqcq\" (UID: \"b2038ca5-beb4-434e-81a7-16a67ad9382d\") " pod="openshift-console/console-f9d7485db-blqcq" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.950634 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.950810 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.950937 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.951137 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.951522 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0-audit-dir\") pod \"apiserver-76f77b778f-jxflz\" (UID: \"b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0\") " pod="openshift-apiserver/apiserver-76f77b778f-jxflz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.951890 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.952233 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0-audit\") pod \"apiserver-76f77b778f-jxflz\" (UID: \"b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0\") " pod="openshift-apiserver/apiserver-76f77b778f-jxflz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.952906 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0-node-pullsecrets\") pod \"apiserver-76f77b778f-jxflz\" (UID: \"b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0\") " pod="openshift-apiserver/apiserver-76f77b778f-jxflz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.953226 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5844f6e-6fc6-460c-9116-217112e3e28d-config\") pod \"authentication-operator-69f744f599-xqhhz\" (UID: \"f5844f6e-6fc6-460c-9116-217112e3e28d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xqhhz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.953760 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f5844f6e-6fc6-460c-9116-217112e3e28d-service-ca-bundle\") pod \"authentication-operator-69f744f599-xqhhz\" (UID: \"f5844f6e-6fc6-460c-9116-217112e3e28d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xqhhz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.953895 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/4130c823-650b-4f98-a3e8-e735f3615b03-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-mcsj6\" (UID: \"4130c823-650b-4f98-a3e8-e735f3615b03\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mcsj6" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.953985 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/fda2f244-1f2f-4a07-915c-586bd4136c02-images\") pod \"machine-api-operator-5694c8668f-9d7hw\" (UID: \"fda2f244-1f2f-4a07-915c-586bd4136c02\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9d7hw" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.954133 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.954173 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4130c823-650b-4f98-a3e8-e735f3615b03-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-mcsj6\" (UID: \"4130c823-650b-4f98-a3e8-e735f3615b03\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mcsj6" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.954491 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.955892 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0316d962-5de2-4709-95e7-1679943f35fd-config\") pod \"route-controller-manager-6576b87f9c-29jwb\" (UID: \"0316d962-5de2-4709-95e7-1679943f35fd\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-29jwb" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.956715 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0-etcd-serving-ca\") pod \"apiserver-76f77b778f-jxflz\" (UID: \"b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0\") " pod="openshift-apiserver/apiserver-76f77b778f-jxflz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.957621 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4130c823-650b-4f98-a3e8-e735f3615b03-audit-policies\") pod \"apiserver-7bbb656c7d-mcsj6\" (UID: \"4130c823-650b-4f98-a3e8-e735f3615b03\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mcsj6" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.957752 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.957926 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.958288 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0316d962-5de2-4709-95e7-1679943f35fd-client-ca\") pod \"route-controller-manager-6576b87f9c-29jwb\" (UID: \"0316d962-5de2-4709-95e7-1679943f35fd\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-29jwb" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.958523 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/78734b35-61db-46e9-b16b-1f03258f9fcb-client-ca\") pod \"controller-manager-879f6c89f-7hzzs\" (UID: \"78734b35-61db-46e9-b16b-1f03258f9fcb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7hzzs" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.959383 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/78734b35-61db-46e9-b16b-1f03258f9fcb-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-7hzzs\" (UID: \"78734b35-61db-46e9-b16b-1f03258f9fcb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7hzzs" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.959582 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.957931 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/4130c823-650b-4f98-a3e8-e735f3615b03-encryption-config\") pod \"apiserver-7bbb656c7d-mcsj6\" (UID: \"4130c823-650b-4f98-a3e8-e735f3615b03\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mcsj6" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.960193 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78734b35-61db-46e9-b16b-1f03258f9fcb-config\") pod \"controller-manager-879f6c89f-7hzzs\" (UID: \"78734b35-61db-46e9-b16b-1f03258f9fcb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7hzzs" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.960470 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0-etcd-client\") pod \"apiserver-76f77b778f-jxflz\" (UID: \"b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0\") " pod="openshift-apiserver/apiserver-76f77b778f-jxflz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.960942 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0-image-import-ca\") pod \"apiserver-76f77b778f-jxflz\" (UID: \"b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0\") " pod="openshift-apiserver/apiserver-76f77b778f-jxflz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.961436 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0-encryption-config\") pod \"apiserver-76f77b778f-jxflz\" (UID: \"b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0\") " pod="openshift-apiserver/apiserver-76f77b778f-jxflz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.961748 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4130c823-650b-4f98-a3e8-e735f3615b03-serving-cert\") pod \"apiserver-7bbb656c7d-mcsj6\" (UID: \"4130c823-650b-4f98-a3e8-e735f3615b03\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mcsj6" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.961449 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0316d962-5de2-4709-95e7-1679943f35fd-serving-cert\") pod \"route-controller-manager-6576b87f9c-29jwb\" (UID: \"0316d962-5de2-4709-95e7-1679943f35fd\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-29jwb" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.961849 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.961885 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.962339 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-p5wn9" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.962397 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbk9w" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.962434 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.962619 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.962688 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f5844f6e-6fc6-460c-9116-217112e3e28d-serving-cert\") pod \"authentication-operator-69f744f599-xqhhz\" (UID: \"f5844f6e-6fc6-460c-9116-217112e3e28d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xqhhz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.962725 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.962970 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/4130c823-650b-4f98-a3e8-e735f3615b03-etcd-client\") pod \"apiserver-7bbb656c7d-mcsj6\" (UID: \"4130c823-650b-4f98-a3e8-e735f3615b03\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mcsj6" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.963516 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.964195 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0-config\") pod \"apiserver-76f77b778f-jxflz\" (UID: \"b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0\") " pod="openshift-apiserver/apiserver-76f77b778f-jxflz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.964381 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.964547 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.964843 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.966015 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/78734b35-61db-46e9-b16b-1f03258f9fcb-serving-cert\") pod \"controller-manager-879f6c89f-7hzzs\" (UID: \"78734b35-61db-46e9-b16b-1f03258f9fcb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7hzzs" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.966075 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-j9zw4"] Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.966798 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-h6p78"] Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.967054 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.967347 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.969791 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.969791 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-j9zw4" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.970737 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fda2f244-1f2f-4a07-915c-586bd4136c02-config\") pod \"machine-api-operator-5694c8668f-9d7hw\" (UID: \"fda2f244-1f2f-4a07-915c-586bd4136c02\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9d7hw" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.971552 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/fda2f244-1f2f-4a07-915c-586bd4136c02-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-9d7hw\" (UID: \"fda2f244-1f2f-4a07-915c-586bd4136c02\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9d7hw" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.973735 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hh55n"] Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.974279 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ghv48"] Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.974970 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ghv48" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.975096 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-kg7xs"] Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.975170 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f5844f6e-6fc6-460c-9116-217112e3e28d-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-xqhhz\" (UID: \"f5844f6e-6fc6-460c-9116-217112e3e28d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xqhhz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.975224 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hh55n" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.975918 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-kg7xs" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.976479 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0-trusted-ca-bundle\") pod \"apiserver-76f77b778f-jxflz\" (UID: \"b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0\") " pod="openshift-apiserver/apiserver-76f77b778f-jxflz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.976778 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-r4xbz"] Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.977221 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-r4xbz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.977608 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ht55m"] Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.977942 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.978191 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ht55m" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.979398 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-m7fh9"] Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.982079 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-m7fh9" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.985181 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-7k28d"] Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.985569 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2drfq"] Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.985839 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-9d7hw"] Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.985915 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2drfq" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.986109 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-7k28d" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.986484 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-xhx92"] Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.986973 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-xhx92" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.988559 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0-serving-cert\") pod \"apiserver-76f77b778f-jxflz\" (UID: \"b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0\") " pod="openshift-apiserver/apiserver-76f77b778f-jxflz" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.988603 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-n56lw"] Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.988964 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-n56lw" Nov 27 06:55:00 crc kubenswrapper[4971]: I1127 06:55:00.989922 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-9sjf5"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:00.997887 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9sjf5" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.008862 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-xqhhz"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.013622 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.021767 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-kzfsj"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.023014 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-kzfsj" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.030958 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-f6k2p"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.031823 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-f6k2p" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.033641 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403765-26klv"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.034716 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403765-26klv" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.034801 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-mcsj6"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.035897 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-29jwb"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.037092 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8thn4"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.038287 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-52sbj"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.038935 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-52sbj" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.039393 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-nh6pl"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.040315 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-blqcq"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.041246 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jhjgn"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.042238 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-p6gxf"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.043999 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-p5wn9"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.044100 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-p6gxf" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.044421 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-7k28d"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.046750 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-jdbbc"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.047570 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-rn8q9"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.048564 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hjgth"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.049677 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-h6p78"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.050657 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hh55n"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.051099 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b2038ca5-beb4-434e-81a7-16a67ad9382d-trusted-ca-bundle\") pod \"console-f9d7485db-blqcq\" (UID: \"b2038ca5-beb4-434e-81a7-16a67ad9382d\") " pod="openshift-console/console-f9d7485db-blqcq" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.051136 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b2038ca5-beb4-434e-81a7-16a67ad9382d-console-config\") pod \"console-f9d7485db-blqcq\" (UID: \"b2038ca5-beb4-434e-81a7-16a67ad9382d\") " pod="openshift-console/console-f9d7485db-blqcq" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.051156 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3e262487-0943-410e-bbcb-11fc42a8ac60-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-jhjgn\" (UID: \"3e262487-0943-410e-bbcb-11fc42a8ac60\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jhjgn" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.051854 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b2038ca5-beb4-434e-81a7-16a67ad9382d-console-config\") pod \"console-f9d7485db-blqcq\" (UID: \"b2038ca5-beb4-434e-81a7-16a67ad9382d\") " pod="openshift-console/console-f9d7485db-blqcq" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.052121 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mgswn"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.052175 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/2944ff34-c55c-40fa-aacf-f679dd704ad8-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-hjgth\" (UID: \"2944ff34-c55c-40fa-aacf-f679dd704ad8\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hjgth" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.052228 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7shnv\" (UniqueName: \"kubernetes.io/projected/3e262487-0943-410e-bbcb-11fc42a8ac60-kube-api-access-7shnv\") pod \"cluster-image-registry-operator-dc59b4c8b-jhjgn\" (UID: \"3e262487-0943-410e-bbcb-11fc42a8ac60\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jhjgn" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.052265 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tsjhd\" (UniqueName: \"kubernetes.io/projected/6dc237a7-62f6-4ee9-99f1-034d6cceb901-kube-api-access-tsjhd\") pod \"openshift-config-operator-7777fb866f-8hv78\" (UID: \"6dc237a7-62f6-4ee9-99f1-034d6cceb901\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8hv78" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.052283 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/6dc237a7-62f6-4ee9-99f1-034d6cceb901-available-featuregates\") pod \"openshift-config-operator-7777fb866f-8hv78\" (UID: \"6dc237a7-62f6-4ee9-99f1-034d6cceb901\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8hv78" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.052302 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b2038ca5-beb4-434e-81a7-16a67ad9382d-oauth-serving-cert\") pod \"console-f9d7485db-blqcq\" (UID: \"b2038ca5-beb4-434e-81a7-16a67ad9382d\") " pod="openshift-console/console-f9d7485db-blqcq" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.052322 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6dc237a7-62f6-4ee9-99f1-034d6cceb901-serving-cert\") pod \"openshift-config-operator-7777fb866f-8hv78\" (UID: \"6dc237a7-62f6-4ee9-99f1-034d6cceb901\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8hv78" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.052345 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b2038ca5-beb4-434e-81a7-16a67ad9382d-console-serving-cert\") pod \"console-f9d7485db-blqcq\" (UID: \"b2038ca5-beb4-434e-81a7-16a67ad9382d\") " pod="openshift-console/console-f9d7485db-blqcq" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.052361 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b2038ca5-beb4-434e-81a7-16a67ad9382d-console-oauth-config\") pod \"console-f9d7485db-blqcq\" (UID: \"b2038ca5-beb4-434e-81a7-16a67ad9382d\") " pod="openshift-console/console-f9d7485db-blqcq" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.052377 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/3e262487-0943-410e-bbcb-11fc42a8ac60-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-jhjgn\" (UID: \"3e262487-0943-410e-bbcb-11fc42a8ac60\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jhjgn" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.052396 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3e262487-0943-410e-bbcb-11fc42a8ac60-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-jhjgn\" (UID: \"3e262487-0943-410e-bbcb-11fc42a8ac60\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jhjgn" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.052421 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vjcwc\" (UniqueName: \"kubernetes.io/projected/2944ff34-c55c-40fa-aacf-f679dd704ad8-kube-api-access-vjcwc\") pod \"cluster-samples-operator-665b6dd947-hjgth\" (UID: \"2944ff34-c55c-40fa-aacf-f679dd704ad8\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hjgth" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.052437 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b2038ca5-beb4-434e-81a7-16a67ad9382d-service-ca\") pod \"console-f9d7485db-blqcq\" (UID: \"b2038ca5-beb4-434e-81a7-16a67ad9382d\") " pod="openshift-console/console-f9d7485db-blqcq" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.052451 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75btm\" (UniqueName: \"kubernetes.io/projected/b2038ca5-beb4-434e-81a7-16a67ad9382d-kube-api-access-75btm\") pod \"console-f9d7485db-blqcq\" (UID: \"b2038ca5-beb4-434e-81a7-16a67ad9382d\") " pod="openshift-console/console-f9d7485db-blqcq" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.052653 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3e262487-0943-410e-bbcb-11fc42a8ac60-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-jhjgn\" (UID: \"3e262487-0943-410e-bbcb-11fc42a8ac60\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jhjgn" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.052727 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-r4xbz"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.053848 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b2038ca5-beb4-434e-81a7-16a67ad9382d-trusted-ca-bundle\") pod \"console-f9d7485db-blqcq\" (UID: \"b2038ca5-beb4-434e-81a7-16a67ad9382d\") " pod="openshift-console/console-f9d7485db-blqcq" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.054272 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbk9w"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.055084 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b2038ca5-beb4-434e-81a7-16a67ad9382d-oauth-serving-cert\") pod \"console-f9d7485db-blqcq\" (UID: \"b2038ca5-beb4-434e-81a7-16a67ad9382d\") " pod="openshift-console/console-f9d7485db-blqcq" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.055638 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-jxflz"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.055714 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b2038ca5-beb4-434e-81a7-16a67ad9382d-service-ca\") pod \"console-f9d7485db-blqcq\" (UID: \"b2038ca5-beb4-434e-81a7-16a67ad9382d\") " pod="openshift-console/console-f9d7485db-blqcq" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.057586 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6v87g\" (UniqueName: \"kubernetes.io/projected/fda2f244-1f2f-4a07-915c-586bd4136c02-kube-api-access-6v87g\") pod \"machine-api-operator-5694c8668f-9d7hw\" (UID: \"fda2f244-1f2f-4a07-915c-586bd4136c02\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9d7hw" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.057930 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6dc237a7-62f6-4ee9-99f1-034d6cceb901-serving-cert\") pod \"openshift-config-operator-7777fb866f-8hv78\" (UID: \"6dc237a7-62f6-4ee9-99f1-034d6cceb901\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8hv78" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.058104 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/2944ff34-c55c-40fa-aacf-f679dd704ad8-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-hjgth\" (UID: \"2944ff34-c55c-40fa-aacf-f679dd704ad8\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hjgth" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.058463 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-b9pwk"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.061794 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b2038ca5-beb4-434e-81a7-16a67ad9382d-console-serving-cert\") pod \"console-f9d7485db-blqcq\" (UID: \"b2038ca5-beb4-434e-81a7-16a67ad9382d\") " pod="openshift-console/console-f9d7485db-blqcq" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.061827 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-p6gxf"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.061844 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-497pk"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.064247 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-j9zw4"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.064287 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-8hv78"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.065642 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-75prl"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.069413 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2drfq"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.069448 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-xhx92"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.069459 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ghv48"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.054333 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/6dc237a7-62f6-4ee9-99f1-034d6cceb901-available-featuregates\") pod \"openshift-config-operator-7777fb866f-8hv78\" (UID: \"6dc237a7-62f6-4ee9-99f1-034d6cceb901\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8hv78" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.071216 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-kg7xs"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.072214 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403765-26klv"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.073205 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-m7fh9"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.074084 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b2038ca5-beb4-434e-81a7-16a67ad9382d-console-oauth-config\") pod \"console-f9d7485db-blqcq\" (UID: \"b2038ca5-beb4-434e-81a7-16a67ad9382d\") " pod="openshift-console/console-f9d7485db-blqcq" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.073573 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/3e262487-0943-410e-bbcb-11fc42a8ac60-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-jhjgn\" (UID: \"3e262487-0943-410e-bbcb-11fc42a8ac60\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jhjgn" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.074382 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-n56lw"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.075647 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ht55m"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.076942 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-f6k2p"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.078322 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-9sjf5"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.079733 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-kzfsj"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.080803 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-9snm7"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.081369 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xw4rt\" (UniqueName: \"kubernetes.io/projected/0316d962-5de2-4709-95e7-1679943f35fd-kube-api-access-xw4rt\") pod \"route-controller-manager-6576b87f9c-29jwb\" (UID: \"0316d962-5de2-4709-95e7-1679943f35fd\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-29jwb" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.081634 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-9snm7" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.081746 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-k2ldt"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.082453 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-k2ldt" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.082767 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-k2ldt"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.083745 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-9snm7"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.097839 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ts5r\" (UniqueName: \"kubernetes.io/projected/b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0-kube-api-access-5ts5r\") pod \"apiserver-76f77b778f-jxflz\" (UID: \"b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0\") " pod="openshift-apiserver/apiserver-76f77b778f-jxflz" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.118683 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2djsr\" (UniqueName: \"kubernetes.io/projected/78734b35-61db-46e9-b16b-1f03258f9fcb-kube-api-access-2djsr\") pod \"controller-manager-879f6c89f-7hzzs\" (UID: \"78734b35-61db-46e9-b16b-1f03258f9fcb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7hzzs" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.130337 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-9d7hw" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.133881 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vqtn\" (UniqueName: \"kubernetes.io/projected/4130c823-650b-4f98-a3e8-e735f3615b03-kube-api-access-4vqtn\") pod \"apiserver-7bbb656c7d-mcsj6\" (UID: \"4130c823-650b-4f98-a3e8-e735f3615b03\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mcsj6" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.137705 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-29jwb" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.156590 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vnjks\" (UniqueName: \"kubernetes.io/projected/f5844f6e-6fc6-460c-9116-217112e3e28d-kube-api-access-vnjks\") pod \"authentication-operator-69f744f599-xqhhz\" (UID: \"f5844f6e-6fc6-460c-9116-217112e3e28d\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xqhhz" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.166254 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.198724 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.219115 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.239772 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.258251 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.278974 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.298140 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.319006 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.330213 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-29jwb"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.339138 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.358673 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.372354 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7hzzs" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.376185 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-9d7hw"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.378304 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.382449 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-jxflz" Nov 27 06:55:01 crc kubenswrapper[4971]: W1127 06:55:01.399621 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfda2f244_1f2f_4a07_915c_586bd4136c02.slice/crio-56c3d31123c08a4a18fd0b6230a542cad4517f7ec4c8ee5a86a53bfce4195b99 WatchSource:0}: Error finding container 56c3d31123c08a4a18fd0b6230a542cad4517f7ec4c8ee5a86a53bfce4195b99: Status 404 returned error can't find the container with id 56c3d31123c08a4a18fd0b6230a542cad4517f7ec4c8ee5a86a53bfce4195b99 Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.399957 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.402840 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mcsj6" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.424068 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.440017 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.452109 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-xqhhz" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.459970 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.478729 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.498425 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.519999 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.538688 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.559257 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.578289 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.605922 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.618598 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.638334 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.667781 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.679197 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.697982 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.725724 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.738307 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.758444 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.783777 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.802404 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.810585 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7hzzs"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.818480 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.840939 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.857817 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.879996 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.899138 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.918221 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.938616 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.960803 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.977963 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.981157 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-xqhhz"] Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.996280 4971 request.go:700] Waited for 1.013946426s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/secrets?fieldSelector=metadata.name%3Dmarketplace-operator-dockercfg-5nsgg&limit=500&resourceVersion=0 Nov 27 06:55:01 crc kubenswrapper[4971]: I1127 06:55:01.998160 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.018956 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.043657 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.059418 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.079353 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.100168 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.108754 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-jxflz"] Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.118596 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.122413 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-mcsj6"] Nov 27 06:55:02 crc kubenswrapper[4971]: W1127 06:55:02.137317 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4130c823_650b_4f98_a3e8_e735f3615b03.slice/crio-90014949e0eb9091898f743955356c50f2f62f0a307c6e9d1860b152c116b4cc WatchSource:0}: Error finding container 90014949e0eb9091898f743955356c50f2f62f0a307c6e9d1860b152c116b4cc: Status 404 returned error can't find the container with id 90014949e0eb9091898f743955356c50f2f62f0a307c6e9d1860b152c116b4cc Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.138323 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.159043 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.178863 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.199070 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.218012 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7hzzs" event={"ID":"78734b35-61db-46e9-b16b-1f03258f9fcb","Type":"ContainerStarted","Data":"68db77eb991c63835e0067a546d988680d5df96131649cc1e6d414028039b607"} Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.218053 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7hzzs" event={"ID":"78734b35-61db-46e9-b16b-1f03258f9fcb","Type":"ContainerStarted","Data":"ac2201f0ae22a8468871139832e43fe9e5416597c69482ddb2578663657e4995"} Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.218356 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-7hzzs" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.218577 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.221791 4971 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-7hzzs container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.221874 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-7hzzs" podUID="78734b35-61db-46e9-b16b-1f03258f9fcb" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.222651 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-29jwb" event={"ID":"0316d962-5de2-4709-95e7-1679943f35fd","Type":"ContainerStarted","Data":"8322e3720ea1cd1f04684587515e07e81db27143190cc4163685bd8e1aa24644"} Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.222712 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-29jwb" event={"ID":"0316d962-5de2-4709-95e7-1679943f35fd","Type":"ContainerStarted","Data":"f57ec2aeb5e22f8ef8cdb8aeeb0e3bfb1781fe2a83e9d97f175cd7224037bb5a"} Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.223411 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-29jwb" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.225366 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-xqhhz" event={"ID":"f5844f6e-6fc6-460c-9116-217112e3e28d","Type":"ContainerStarted","Data":"193b5fef5c1323158c994f029bb33c59b1e480128b749c2d8fa86889bba51407"} Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.225969 4971 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-29jwb container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.226027 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-29jwb" podUID="0316d962-5de2-4709-95e7-1679943f35fd" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.229389 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mcsj6" event={"ID":"4130c823-650b-4f98-a3e8-e735f3615b03","Type":"ContainerStarted","Data":"90014949e0eb9091898f743955356c50f2f62f0a307c6e9d1860b152c116b4cc"} Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.231214 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-jxflz" event={"ID":"b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0","Type":"ContainerStarted","Data":"57aca3c4e88a84669229e4033d8ac6651157aae9aa835a814333c9faa43b0002"} Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.236830 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-9d7hw" event={"ID":"fda2f244-1f2f-4a07-915c-586bd4136c02","Type":"ContainerStarted","Data":"63b46b0f3ca48edd8346918502e5226827334d42cceb97c94d47dca725310f47"} Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.236885 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-9d7hw" event={"ID":"fda2f244-1f2f-4a07-915c-586bd4136c02","Type":"ContainerStarted","Data":"affe512e72380714f0b2b78c4af76de79b05a2d63ea09c0a5d4e2baefb34ae65"} Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.236901 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-9d7hw" event={"ID":"fda2f244-1f2f-4a07-915c-586bd4136c02","Type":"ContainerStarted","Data":"56c3d31123c08a4a18fd0b6230a542cad4517f7ec4c8ee5a86a53bfce4195b99"} Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.237932 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.259667 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.279251 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.299038 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.319490 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.338057 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.359089 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.378286 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.398146 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.418563 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.438958 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.458648 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.479503 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.498312 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.518821 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.565359 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.579736 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.580588 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xfdp8\" (UniqueName: \"kubernetes.io/projected/227860ca-baf3-4b07-b0b3-9a29a9eedab6-kube-api-access-xfdp8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.580668 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3b071787-4135-43ce-a44e-68a0be71919e-bound-sa-token\") pod \"ingress-operator-5b745b69d9-jdbbc\" (UID: \"3b071787-4135-43ce-a44e-68a0be71919e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jdbbc" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.580869 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1623d112-895b-42a2-8a76-cf082423e8ac-config\") pod \"console-operator-58897d9998-b9pwk\" (UID: \"1623d112-895b-42a2-8a76-cf082423e8ac\") " pod="openshift-console-operator/console-operator-58897d9998-b9pwk" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.580908 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6cp6v\" (UniqueName: \"kubernetes.io/projected/8651c6d0-c735-4e26-a1aa-cf6090cd469f-kube-api-access-6cp6v\") pod \"dns-operator-744455d44c-nh6pl\" (UID: \"8651c6d0-c735-4e26-a1aa-cf6090cd469f\") " pod="openshift-dns-operator/dns-operator-744455d44c-nh6pl" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.580976 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pfm29\" (UniqueName: \"kubernetes.io/projected/7ca57976-e7d8-485a-8277-a3da59178bf6-kube-api-access-pfm29\") pod \"packageserver-d55dfcdfc-sbk9w\" (UID: \"7ca57976-e7d8-485a-8277-a3da59178bf6\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbk9w" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.581136 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/227860ca-baf3-4b07-b0b3-9a29a9eedab6-installation-pull-secrets\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.581177 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/7ca57976-e7d8-485a-8277-a3da59178bf6-tmpfs\") pod \"packageserver-d55dfcdfc-sbk9w\" (UID: \"7ca57976-e7d8-485a-8277-a3da59178bf6\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbk9w" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.581229 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1623d112-895b-42a2-8a76-cf082423e8ac-trusted-ca\") pod \"console-operator-58897d9998-b9pwk\" (UID: \"1623d112-895b-42a2-8a76-cf082423e8ac\") " pod="openshift-console-operator/console-operator-58897d9998-b9pwk" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.581263 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d8jgk\" (UniqueName: \"kubernetes.io/projected/506301cc-392b-4aad-b7b8-29dbdde4b6fa-kube-api-access-d8jgk\") pod \"openshift-controller-manager-operator-756b6f6bc6-8thn4\" (UID: \"506301cc-392b-4aad-b7b8-29dbdde4b6fa\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8thn4" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.581319 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cw2g4\" (UniqueName: \"kubernetes.io/projected/3f201996-72e7-43c9-b004-47366d4b05e4-kube-api-access-cw2g4\") pod \"etcd-operator-b45778765-497pk\" (UID: \"3f201996-72e7-43c9-b004-47366d4b05e4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-497pk" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.581651 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3f201996-72e7-43c9-b004-47366d4b05e4-serving-cert\") pod \"etcd-operator-b45778765-497pk\" (UID: \"3f201996-72e7-43c9-b004-47366d4b05e4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-497pk" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.581714 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/5c9c5353-0cec-4776-9c51-553e90e76a2a-default-certificate\") pod \"router-default-5444994796-s6mff\" (UID: \"5c9c5353-0cec-4776-9c51-553e90e76a2a\") " pod="openshift-ingress/router-default-5444994796-s6mff" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.581858 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51fe0600-5a21-48b7-9c07-91a11a547256-config\") pod \"openshift-apiserver-operator-796bbdcf4f-mgswn\" (UID: \"51fe0600-5a21-48b7-9c07-91a11a547256\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mgswn" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.581918 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7ca57976-e7d8-485a-8277-a3da59178bf6-apiservice-cert\") pod \"packageserver-d55dfcdfc-sbk9w\" (UID: \"7ca57976-e7d8-485a-8277-a3da59178bf6\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbk9w" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.581972 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7ca57976-e7d8-485a-8277-a3da59178bf6-webhook-cert\") pod \"packageserver-d55dfcdfc-sbk9w\" (UID: \"7ca57976-e7d8-485a-8277-a3da59178bf6\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbk9w" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.582000 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/227860ca-baf3-4b07-b0b3-9a29a9eedab6-ca-trust-extracted\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.582026 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f201996-72e7-43c9-b004-47366d4b05e4-config\") pod \"etcd-operator-b45778765-497pk\" (UID: \"3f201996-72e7-43c9-b004-47366d4b05e4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-497pk" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.582056 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n4thq\" (UniqueName: \"kubernetes.io/projected/3973ab95-1d0e-49d6-aadb-79fde6dd74b8-kube-api-access-n4thq\") pod \"package-server-manager-789f6589d5-p5wn9\" (UID: \"3973ab95-1d0e-49d6-aadb-79fde6dd74b8\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-p5wn9" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.582080 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f50e533-b652-42c8-9f91-124b7be386fc-config\") pod \"machine-approver-56656f9798-j6c6b\" (UID: \"0f50e533-b652-42c8-9f91-124b7be386fc\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-j6c6b" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.582150 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/3f201996-72e7-43c9-b004-47366d4b05e4-etcd-service-ca\") pod \"etcd-operator-b45778765-497pk\" (UID: \"3f201996-72e7-43c9-b004-47366d4b05e4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-497pk" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.582193 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5c9c5353-0cec-4776-9c51-553e90e76a2a-service-ca-bundle\") pod \"router-default-5444994796-s6mff\" (UID: \"5c9c5353-0cec-4776-9c51-553e90e76a2a\") " pod="openshift-ingress/router-default-5444994796-s6mff" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.582237 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/227860ca-baf3-4b07-b0b3-9a29a9eedab6-registry-certificates\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.582260 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/506301cc-392b-4aad-b7b8-29dbdde4b6fa-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-8thn4\" (UID: \"506301cc-392b-4aad-b7b8-29dbdde4b6fa\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8thn4" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.582303 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1623d112-895b-42a2-8a76-cf082423e8ac-serving-cert\") pod \"console-operator-58897d9998-b9pwk\" (UID: \"1623d112-895b-42a2-8a76-cf082423e8ac\") " pod="openshift-console-operator/console-operator-58897d9998-b9pwk" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.582330 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mpfgg\" (UniqueName: \"kubernetes.io/projected/51fe0600-5a21-48b7-9c07-91a11a547256-kube-api-access-mpfgg\") pod \"openshift-apiserver-operator-796bbdcf4f-mgswn\" (UID: \"51fe0600-5a21-48b7-9c07-91a11a547256\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mgswn" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.582444 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0f50e533-b652-42c8-9f91-124b7be386fc-auth-proxy-config\") pod \"machine-approver-56656f9798-j6c6b\" (UID: \"0f50e533-b652-42c8-9f91-124b7be386fc\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-j6c6b" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.582511 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qwxfz\" (UniqueName: \"kubernetes.io/projected/5c9c5353-0cec-4776-9c51-553e90e76a2a-kube-api-access-qwxfz\") pod \"router-default-5444994796-s6mff\" (UID: \"5c9c5353-0cec-4776-9c51-553e90e76a2a\") " pod="openshift-ingress/router-default-5444994796-s6mff" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.583283 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/227860ca-baf3-4b07-b0b3-9a29a9eedab6-registry-tls\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.583351 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/227860ca-baf3-4b07-b0b3-9a29a9eedab6-bound-sa-token\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.583393 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3b071787-4135-43ce-a44e-68a0be71919e-trusted-ca\") pod \"ingress-operator-5b745b69d9-jdbbc\" (UID: \"3b071787-4135-43ce-a44e-68a0be71919e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jdbbc" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.583511 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3f201996-72e7-43c9-b004-47366d4b05e4-etcd-client\") pod \"etcd-operator-b45778765-497pk\" (UID: \"3f201996-72e7-43c9-b004-47366d4b05e4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-497pk" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.583882 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/0f50e533-b652-42c8-9f91-124b7be386fc-machine-approver-tls\") pod \"machine-approver-56656f9798-j6c6b\" (UID: \"0f50e533-b652-42c8-9f91-124b7be386fc\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-j6c6b" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.583933 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5c9c5353-0cec-4776-9c51-553e90e76a2a-metrics-certs\") pod \"router-default-5444994796-s6mff\" (UID: \"5c9c5353-0cec-4776-9c51-553e90e76a2a\") " pod="openshift-ingress/router-default-5444994796-s6mff" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.583957 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3b071787-4135-43ce-a44e-68a0be71919e-metrics-tls\") pod \"ingress-operator-5b745b69d9-jdbbc\" (UID: \"3b071787-4135-43ce-a44e-68a0be71919e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jdbbc" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.584003 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/5c9c5353-0cec-4776-9c51-553e90e76a2a-stats-auth\") pod \"router-default-5444994796-s6mff\" (UID: \"5c9c5353-0cec-4776-9c51-553e90e76a2a\") " pod="openshift-ingress/router-default-5444994796-s6mff" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.584040 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/227860ca-baf3-4b07-b0b3-9a29a9eedab6-trusted-ca\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.584131 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52fzj\" (UniqueName: \"kubernetes.io/projected/1623d112-895b-42a2-8a76-cf082423e8ac-kube-api-access-52fzj\") pod \"console-operator-58897d9998-b9pwk\" (UID: \"1623d112-895b-42a2-8a76-cf082423e8ac\") " pod="openshift-console-operator/console-operator-58897d9998-b9pwk" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.584196 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49q2p\" (UniqueName: \"kubernetes.io/projected/a602a33c-f8ea-45e4-8295-3c2230005ef4-kube-api-access-49q2p\") pod \"downloads-7954f5f757-75prl\" (UID: \"a602a33c-f8ea-45e4-8295-3c2230005ef4\") " pod="openshift-console/downloads-7954f5f757-75prl" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.584249 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8651c6d0-c735-4e26-a1aa-cf6090cd469f-metrics-tls\") pod \"dns-operator-744455d44c-nh6pl\" (UID: \"8651c6d0-c735-4e26-a1aa-cf6090cd469f\") " pod="openshift-dns-operator/dns-operator-744455d44c-nh6pl" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.584364 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bj9jj\" (UniqueName: \"kubernetes.io/projected/3b071787-4135-43ce-a44e-68a0be71919e-kube-api-access-bj9jj\") pod \"ingress-operator-5b745b69d9-jdbbc\" (UID: \"3b071787-4135-43ce-a44e-68a0be71919e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jdbbc" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.584485 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/3f201996-72e7-43c9-b004-47366d4b05e4-etcd-ca\") pod \"etcd-operator-b45778765-497pk\" (UID: \"3f201996-72e7-43c9-b004-47366d4b05e4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-497pk" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.584687 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3973ab95-1d0e-49d6-aadb-79fde6dd74b8-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-p5wn9\" (UID: \"3973ab95-1d0e-49d6-aadb-79fde6dd74b8\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-p5wn9" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.584750 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/51fe0600-5a21-48b7-9c07-91a11a547256-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-mgswn\" (UID: \"51fe0600-5a21-48b7-9c07-91a11a547256\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mgswn" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.585086 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.585173 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/506301cc-392b-4aad-b7b8-29dbdde4b6fa-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-8thn4\" (UID: \"506301cc-392b-4aad-b7b8-29dbdde4b6fa\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8thn4" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.585216 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nhr9v\" (UniqueName: \"kubernetes.io/projected/0f50e533-b652-42c8-9f91-124b7be386fc-kube-api-access-nhr9v\") pod \"machine-approver-56656f9798-j6c6b\" (UID: \"0f50e533-b652-42c8-9f91-124b7be386fc\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-j6c6b" Nov 27 06:55:02 crc kubenswrapper[4971]: E1127 06:55:02.585722 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:03.085694235 +0000 UTC m=+141.277738193 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.600217 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.619214 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.638548 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.659021 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.679663 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.685898 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:02 crc kubenswrapper[4971]: E1127 06:55:02.686111 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:03.186081814 +0000 UTC m=+141.378125732 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.686185 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/0f50e533-b652-42c8-9f91-124b7be386fc-machine-approver-tls\") pod \"machine-approver-56656f9798-j6c6b\" (UID: \"0f50e533-b652-42c8-9f91-124b7be386fc\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-j6c6b" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.686216 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5c9c5353-0cec-4776-9c51-553e90e76a2a-metrics-certs\") pod \"router-default-5444994796-s6mff\" (UID: \"5c9c5353-0cec-4776-9c51-553e90e76a2a\") " pod="openshift-ingress/router-default-5444994796-s6mff" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.686238 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3b071787-4135-43ce-a44e-68a0be71919e-metrics-tls\") pod \"ingress-operator-5b745b69d9-jdbbc\" (UID: \"3b071787-4135-43ce-a44e-68a0be71919e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jdbbc" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.686262 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.686288 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/227860ca-baf3-4b07-b0b3-9a29a9eedab6-trusted-ca\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.686313 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52fzj\" (UniqueName: \"kubernetes.io/projected/1623d112-895b-42a2-8a76-cf082423e8ac-kube-api-access-52fzj\") pod \"console-operator-58897d9998-b9pwk\" (UID: \"1623d112-895b-42a2-8a76-cf082423e8ac\") " pod="openshift-console-operator/console-operator-58897d9998-b9pwk" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.686332 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/5c9c5353-0cec-4776-9c51-553e90e76a2a-stats-auth\") pod \"router-default-5444994796-s6mff\" (UID: \"5c9c5353-0cec-4776-9c51-553e90e76a2a\") " pod="openshift-ingress/router-default-5444994796-s6mff" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.686353 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.686376 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/6df59c92-6515-4bf1-aad4-57daf0d8fc76-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-m7fh9\" (UID: \"6df59c92-6515-4bf1-aad4-57daf0d8fc76\") " pod="openshift-marketplace/marketplace-operator-79b997595-m7fh9" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.686398 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bj9jj\" (UniqueName: \"kubernetes.io/projected/3b071787-4135-43ce-a44e-68a0be71919e-kube-api-access-bj9jj\") pod \"ingress-operator-5b745b69d9-jdbbc\" (UID: \"3b071787-4135-43ce-a44e-68a0be71919e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jdbbc" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.686417 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8651c6d0-c735-4e26-a1aa-cf6090cd469f-metrics-tls\") pod \"dns-operator-744455d44c-nh6pl\" (UID: \"8651c6d0-c735-4e26-a1aa-cf6090cd469f\") " pod="openshift-dns-operator/dns-operator-744455d44c-nh6pl" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.686439 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r67f4\" (UniqueName: \"kubernetes.io/projected/6e183c66-f7ea-4191-9f0d-fb2eb12ee864-kube-api-access-r67f4\") pod \"multus-admission-controller-857f4d67dd-xhx92\" (UID: \"6e183c66-f7ea-4191-9f0d-fb2eb12ee864\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-xhx92" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.686462 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.686499 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/7cc88187-87e4-4fa5-a431-d3b7b4779f56-profile-collector-cert\") pod \"catalog-operator-68c6474976-ghv48\" (UID: \"7cc88187-87e4-4fa5-a431-d3b7b4779f56\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ghv48" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.686526 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.686567 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/b2e2892d-ee1f-4349-b6b5-d0bc908d1397-mountpoint-dir\") pod \"csi-hostpathplugin-p6gxf\" (UID: \"b2e2892d-ee1f-4349-b6b5-d0bc908d1397\") " pod="hostpath-provisioner/csi-hostpathplugin-p6gxf" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.686585 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k564m\" (UniqueName: \"kubernetes.io/projected/7cc88187-87e4-4fa5-a431-d3b7b4779f56-kube-api-access-k564m\") pod \"catalog-operator-68c6474976-ghv48\" (UID: \"7cc88187-87e4-4fa5-a431-d3b7b4779f56\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ghv48" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.686604 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/75f685e8-220b-4588-8f26-66d305e3dfea-images\") pod \"machine-config-operator-74547568cd-j9zw4\" (UID: \"75f685e8-220b-4588-8f26-66d305e3dfea\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-j9zw4" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.686627 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xfdp8\" (UniqueName: \"kubernetes.io/projected/227860ca-baf3-4b07-b0b3-9a29a9eedab6-kube-api-access-xfdp8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.686644 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sclj4\" (UniqueName: \"kubernetes.io/projected/89bb08d2-3252-4d85-91bc-3b595be4c32a-kube-api-access-sclj4\") pod \"collect-profiles-29403765-26klv\" (UID: \"89bb08d2-3252-4d85-91bc-3b595be4c32a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403765-26klv" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.686664 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.686682 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.686700 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/b2e2892d-ee1f-4349-b6b5-d0bc908d1397-plugins-dir\") pod \"csi-hostpathplugin-p6gxf\" (UID: \"b2e2892d-ee1f-4349-b6b5-d0bc908d1397\") " pod="hostpath-provisioner/csi-hostpathplugin-p6gxf" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.686718 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/45462412-2e4e-480d-8874-2949c5dd691e-config\") pod \"kube-controller-manager-operator-78b949d7b-f6k2p\" (UID: \"45462412-2e4e-480d-8874-2949c5dd691e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-f6k2p" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.686741 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1623d112-895b-42a2-8a76-cf082423e8ac-config\") pod \"console-operator-58897d9998-b9pwk\" (UID: \"1623d112-895b-42a2-8a76-cf082423e8ac\") " pod="openshift-console-operator/console-operator-58897d9998-b9pwk" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.686762 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.686779 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/b2e2892d-ee1f-4349-b6b5-d0bc908d1397-socket-dir\") pod \"csi-hostpathplugin-p6gxf\" (UID: \"b2e2892d-ee1f-4349-b6b5-d0bc908d1397\") " pod="hostpath-provisioner/csi-hostpathplugin-p6gxf" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.686799 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/89bb08d2-3252-4d85-91bc-3b595be4c32a-secret-volume\") pod \"collect-profiles-29403765-26klv\" (UID: \"89bb08d2-3252-4d85-91bc-3b595be4c32a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403765-26klv" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.686818 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/b2e2892d-ee1f-4349-b6b5-d0bc908d1397-csi-data-dir\") pod \"csi-hostpathplugin-p6gxf\" (UID: \"b2e2892d-ee1f-4349-b6b5-d0bc908d1397\") " pod="hostpath-provisioner/csi-hostpathplugin-p6gxf" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.686836 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/3a0d9f4d-880f-4189-b479-fa0a1f2fb513-srv-cert\") pod \"olm-operator-6b444d44fb-r4xbz\" (UID: \"3a0d9f4d-880f-4189-b479-fa0a1f2fb513\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-r4xbz" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.686859 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/227860ca-baf3-4b07-b0b3-9a29a9eedab6-installation-pull-secrets\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.686877 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/7ca57976-e7d8-485a-8277-a3da59178bf6-tmpfs\") pod \"packageserver-d55dfcdfc-sbk9w\" (UID: \"7ca57976-e7d8-485a-8277-a3da59178bf6\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbk9w" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.686895 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1623d112-895b-42a2-8a76-cf082423e8ac-trusted-ca\") pod \"console-operator-58897d9998-b9pwk\" (UID: \"1623d112-895b-42a2-8a76-cf082423e8ac\") " pod="openshift-console-operator/console-operator-58897d9998-b9pwk" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.686914 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/7cc88187-87e4-4fa5-a431-d3b7b4779f56-srv-cert\") pod \"catalog-operator-68c6474976-ghv48\" (UID: \"7cc88187-87e4-4fa5-a431-d3b7b4779f56\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ghv48" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.686933 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/6e183c66-f7ea-4191-9f0d-fb2eb12ee864-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-xhx92\" (UID: \"6e183c66-f7ea-4191-9f0d-fb2eb12ee864\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-xhx92" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.687590 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9q5c\" (UniqueName: \"kubernetes.io/projected/6df59c92-6515-4bf1-aad4-57daf0d8fc76-kube-api-access-t9q5c\") pod \"marketplace-operator-79b997595-m7fh9\" (UID: \"6df59c92-6515-4bf1-aad4-57daf0d8fc76\") " pod="openshift-marketplace/marketplace-operator-79b997595-m7fh9" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.687614 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/35a21adf-32b6-4a19-b592-0f941e07a3c5-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-n56lw\" (UID: \"35a21adf-32b6-4a19-b592-0f941e07a3c5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-n56lw" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.687639 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cw2g4\" (UniqueName: \"kubernetes.io/projected/3f201996-72e7-43c9-b004-47366d4b05e4-kube-api-access-cw2g4\") pod \"etcd-operator-b45778765-497pk\" (UID: \"3f201996-72e7-43c9-b004-47366d4b05e4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-497pk" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.687659 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6df59c92-6515-4bf1-aad4-57daf0d8fc76-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-m7fh9\" (UID: \"6df59c92-6515-4bf1-aad4-57daf0d8fc76\") " pod="openshift-marketplace/marketplace-operator-79b997595-m7fh9" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.687679 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f268e2fa-798d-4438-9225-24c4a07ea999-audit-policies\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.687698 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jckhf\" (UniqueName: \"kubernetes.io/projected/75f685e8-220b-4588-8f26-66d305e3dfea-kube-api-access-jckhf\") pod \"machine-config-operator-74547568cd-j9zw4\" (UID: \"75f685e8-220b-4588-8f26-66d305e3dfea\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-j9zw4" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.687721 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51fe0600-5a21-48b7-9c07-91a11a547256-config\") pod \"openshift-apiserver-operator-796bbdcf4f-mgswn\" (UID: \"51fe0600-5a21-48b7-9c07-91a11a547256\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mgswn" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.688021 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7ca57976-e7d8-485a-8277-a3da59178bf6-apiservice-cert\") pod \"packageserver-d55dfcdfc-sbk9w\" (UID: \"7ca57976-e7d8-485a-8277-a3da59178bf6\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbk9w" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.688097 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7ca57976-e7d8-485a-8277-a3da59178bf6-webhook-cert\") pod \"packageserver-d55dfcdfc-sbk9w\" (UID: \"7ca57976-e7d8-485a-8277-a3da59178bf6\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbk9w" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.688152 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6ppfn\" (UniqueName: \"kubernetes.io/projected/cd2e0a36-2e94-4ed6-9b7d-1755d593d805-kube-api-access-6ppfn\") pod \"kube-storage-version-migrator-operator-b67b599dd-2drfq\" (UID: \"cd2e0a36-2e94-4ed6-9b7d-1755d593d805\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2drfq" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.688205 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/227860ca-baf3-4b07-b0b3-9a29a9eedab6-ca-trust-extracted\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.688304 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f201996-72e7-43c9-b004-47366d4b05e4-config\") pod \"etcd-operator-b45778765-497pk\" (UID: \"3f201996-72e7-43c9-b004-47366d4b05e4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-497pk" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.688347 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c9dddee2-cf40-4aa7-9e05-0776bfb4d534-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-hh55n\" (UID: \"c9dddee2-cf40-4aa7-9e05-0776bfb4d534\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hh55n" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.689646 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f201996-72e7-43c9-b004-47366d4b05e4-config\") pod \"etcd-operator-b45778765-497pk\" (UID: \"3f201996-72e7-43c9-b004-47366d4b05e4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-497pk" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.689730 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51fe0600-5a21-48b7-9c07-91a11a547256-config\") pod \"openshift-apiserver-operator-796bbdcf4f-mgswn\" (UID: \"51fe0600-5a21-48b7-9c07-91a11a547256\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mgswn" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.689897 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd2e0a36-2e94-4ed6-9b7d-1755d593d805-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-2drfq\" (UID: \"cd2e0a36-2e94-4ed6-9b7d-1755d593d805\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2drfq" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.689988 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/6aeeb09d-7519-42c7-be1d-2355b9a704e2-signing-cabundle\") pod \"service-ca-9c57cc56f-7k28d\" (UID: \"6aeeb09d-7519-42c7-be1d-2355b9a704e2\") " pod="openshift-service-ca/service-ca-9c57cc56f-7k28d" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.690086 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/3f201996-72e7-43c9-b004-47366d4b05e4-etcd-service-ca\") pod \"etcd-operator-b45778765-497pk\" (UID: \"3f201996-72e7-43c9-b004-47366d4b05e4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-497pk" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.690195 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9q9qk\" (UniqueName: \"kubernetes.io/projected/f268e2fa-798d-4438-9225-24c4a07ea999-kube-api-access-9q9qk\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.690244 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/35a21adf-32b6-4a19-b592-0f941e07a3c5-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-n56lw\" (UID: \"35a21adf-32b6-4a19-b592-0f941e07a3c5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-n56lw" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.690281 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/227860ca-baf3-4b07-b0b3-9a29a9eedab6-ca-trust-extracted\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.690297 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.690340 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kpg86\" (UniqueName: \"kubernetes.io/projected/b28e17fe-93d1-4cdb-a9a1-33c37bbe1877-kube-api-access-kpg86\") pod \"machine-config-server-52sbj\" (UID: \"b28e17fe-93d1-4cdb-a9a1-33c37bbe1877\") " pod="openshift-machine-config-operator/machine-config-server-52sbj" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.690421 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/227860ca-baf3-4b07-b0b3-9a29a9eedab6-registry-certificates\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.690468 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1623d112-895b-42a2-8a76-cf082423e8ac-serving-cert\") pod \"console-operator-58897d9998-b9pwk\" (UID: \"1623d112-895b-42a2-8a76-cf082423e8ac\") " pod="openshift-console-operator/console-operator-58897d9998-b9pwk" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.690686 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f268e2fa-798d-4438-9225-24c4a07ea999-audit-dir\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.690743 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1623d112-895b-42a2-8a76-cf082423e8ac-config\") pod \"console-operator-58897d9998-b9pwk\" (UID: \"1623d112-895b-42a2-8a76-cf082423e8ac\") " pod="openshift-console-operator/console-operator-58897d9998-b9pwk" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.690746 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd2e0a36-2e94-4ed6-9b7d-1755d593d805-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-2drfq\" (UID: \"cd2e0a36-2e94-4ed6-9b7d-1755d593d805\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2drfq" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.690825 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/6aeeb09d-7519-42c7-be1d-2355b9a704e2-signing-key\") pod \"service-ca-9c57cc56f-7k28d\" (UID: \"6aeeb09d-7519-42c7-be1d-2355b9a704e2\") " pod="openshift-service-ca/service-ca-9c57cc56f-7k28d" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.690991 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/3f201996-72e7-43c9-b004-47366d4b05e4-etcd-service-ca\") pod \"etcd-operator-b45778765-497pk\" (UID: \"3f201996-72e7-43c9-b004-47366d4b05e4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-497pk" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.691066 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0f50e533-b652-42c8-9f91-124b7be386fc-auth-proxy-config\") pod \"machine-approver-56656f9798-j6c6b\" (UID: \"0f50e533-b652-42c8-9f91-124b7be386fc\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-j6c6b" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.691097 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qwxfz\" (UniqueName: \"kubernetes.io/projected/5c9c5353-0cec-4776-9c51-553e90e76a2a-kube-api-access-qwxfz\") pod \"router-default-5444994796-s6mff\" (UID: \"5c9c5353-0cec-4776-9c51-553e90e76a2a\") " pod="openshift-ingress/router-default-5444994796-s6mff" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.691114 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/7ca57976-e7d8-485a-8277-a3da59178bf6-tmpfs\") pod \"packageserver-d55dfcdfc-sbk9w\" (UID: \"7ca57976-e7d8-485a-8277-a3da59178bf6\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbk9w" Nov 27 06:55:02 crc kubenswrapper[4971]: E1127 06:55:02.691393 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:03.191372588 +0000 UTC m=+141.383416516 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.692283 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/227860ca-baf3-4b07-b0b3-9a29a9eedab6-trusted-ca\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.693135 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1623d112-895b-42a2-8a76-cf082423e8ac-trusted-ca\") pod \"console-operator-58897d9998-b9pwk\" (UID: \"1623d112-895b-42a2-8a76-cf082423e8ac\") " pod="openshift-console-operator/console-operator-58897d9998-b9pwk" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.693663 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8t4f\" (UniqueName: \"kubernetes.io/projected/05b6cd99-aeb2-4b20-8fb3-c50e72a03611-kube-api-access-m8t4f\") pod \"dns-default-k2ldt\" (UID: \"05b6cd99-aeb2-4b20-8fb3-c50e72a03611\") " pod="openshift-dns/dns-default-k2ldt" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.693795 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0f50e533-b652-42c8-9f91-124b7be386fc-auth-proxy-config\") pod \"machine-approver-56656f9798-j6c6b\" (UID: \"0f50e533-b652-42c8-9f91-124b7be386fc\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-j6c6b" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.693849 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/227860ca-baf3-4b07-b0b3-9a29a9eedab6-registry-certificates\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.693953 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/73452466-9780-461c-abd6-282cda0d9a7e-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-9sjf5\" (UID: \"73452466-9780-461c-abd6-282cda0d9a7e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9sjf5" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.694223 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/45462412-2e4e-480d-8874-2949c5dd691e-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-f6k2p\" (UID: \"45462412-2e4e-480d-8874-2949c5dd691e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-f6k2p" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.694407 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3f201996-72e7-43c9-b004-47366d4b05e4-etcd-client\") pod \"etcd-operator-b45778765-497pk\" (UID: \"3f201996-72e7-43c9-b004-47366d4b05e4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-497pk" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.694583 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/45462412-2e4e-480d-8874-2949c5dd691e-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-f6k2p\" (UID: \"45462412-2e4e-480d-8874-2949c5dd691e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-f6k2p" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.694774 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/85f4323a-d5cf-45bb-b3df-57434dfe4d53-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-ht55m\" (UID: \"85f4323a-d5cf-45bb-b3df-57434dfe4d53\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ht55m" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.694931 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49q2p\" (UniqueName: \"kubernetes.io/projected/a602a33c-f8ea-45e4-8295-3c2230005ef4-kube-api-access-49q2p\") pod \"downloads-7954f5f757-75prl\" (UID: \"a602a33c-f8ea-45e4-8295-3c2230005ef4\") " pod="openshift-console/downloads-7954f5f757-75prl" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.695071 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/3f201996-72e7-43c9-b004-47366d4b05e4-etcd-ca\") pod \"etcd-operator-b45778765-497pk\" (UID: \"3f201996-72e7-43c9-b004-47366d4b05e4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-497pk" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.695209 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3973ab95-1d0e-49d6-aadb-79fde6dd74b8-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-p5wn9\" (UID: \"3973ab95-1d0e-49d6-aadb-79fde6dd74b8\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-p5wn9" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.695350 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7twg\" (UniqueName: \"kubernetes.io/projected/3a0d9f4d-880f-4189-b479-fa0a1f2fb513-kube-api-access-x7twg\") pod \"olm-operator-6b444d44fb-r4xbz\" (UID: \"3a0d9f4d-880f-4189-b479-fa0a1f2fb513\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-r4xbz" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.695502 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/51fe0600-5a21-48b7-9c07-91a11a547256-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-mgswn\" (UID: \"51fe0600-5a21-48b7-9c07-91a11a547256\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mgswn" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.695729 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.696035 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/506301cc-392b-4aad-b7b8-29dbdde4b6fa-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-8thn4\" (UID: \"506301cc-392b-4aad-b7b8-29dbdde4b6fa\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8thn4" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.696200 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nhr9v\" (UniqueName: \"kubernetes.io/projected/0f50e533-b652-42c8-9f91-124b7be386fc-kube-api-access-nhr9v\") pod \"machine-approver-56656f9798-j6c6b\" (UID: \"0f50e533-b652-42c8-9f91-124b7be386fc\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-j6c6b" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.696367 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c9dddee2-cf40-4aa7-9e05-0776bfb4d534-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-hh55n\" (UID: \"c9dddee2-cf40-4aa7-9e05-0776bfb4d534\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hh55n" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.696523 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/75f685e8-220b-4588-8f26-66d305e3dfea-proxy-tls\") pod \"machine-config-operator-74547568cd-j9zw4\" (UID: \"75f685e8-220b-4588-8f26-66d305e3dfea\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-j9zw4" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.696705 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/baace783-c727-4031-bdae-749783258756-config\") pod \"service-ca-operator-777779d784-kzfsj\" (UID: \"baace783-c727-4031-bdae-749783258756\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-kzfsj" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.696849 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtvqb\" (UniqueName: \"kubernetes.io/projected/00fa6f7a-50bb-42e0-8643-a0fae088f528-kube-api-access-jtvqb\") pod \"migrator-59844c95c7-kg7xs\" (UID: \"00fa6f7a-50bb-42e0-8643-a0fae088f528\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-kg7xs" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.697024 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3b071787-4135-43ce-a44e-68a0be71919e-bound-sa-token\") pod \"ingress-operator-5b745b69d9-jdbbc\" (UID: \"3b071787-4135-43ce-a44e-68a0be71919e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jdbbc" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.697184 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.697355 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9dddee2-cf40-4aa7-9e05-0776bfb4d534-config\") pod \"kube-apiserver-operator-766d6c64bb-hh55n\" (UID: \"c9dddee2-cf40-4aa7-9e05-0776bfb4d534\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hh55n" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.697260 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/3f201996-72e7-43c9-b004-47366d4b05e4-etcd-ca\") pod \"etcd-operator-b45778765-497pk\" (UID: \"3f201996-72e7-43c9-b004-47366d4b05e4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-497pk" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.694430 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5c9c5353-0cec-4776-9c51-553e90e76a2a-metrics-certs\") pod \"router-default-5444994796-s6mff\" (UID: \"5c9c5353-0cec-4776-9c51-553e90e76a2a\") " pod="openshift-ingress/router-default-5444994796-s6mff" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.697415 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7ca57976-e7d8-485a-8277-a3da59178bf6-apiservice-cert\") pod \"packageserver-d55dfcdfc-sbk9w\" (UID: \"7ca57976-e7d8-485a-8277-a3da59178bf6\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbk9w" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.696052 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7ca57976-e7d8-485a-8277-a3da59178bf6-webhook-cert\") pod \"packageserver-d55dfcdfc-sbk9w\" (UID: \"7ca57976-e7d8-485a-8277-a3da59178bf6\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbk9w" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.695863 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/5c9c5353-0cec-4776-9c51-553e90e76a2a-stats-auth\") pod \"router-default-5444994796-s6mff\" (UID: \"5c9c5353-0cec-4776-9c51-553e90e76a2a\") " pod="openshift-ingress/router-default-5444994796-s6mff" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.697268 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/0f50e533-b652-42c8-9f91-124b7be386fc-machine-approver-tls\") pod \"machine-approver-56656f9798-j6c6b\" (UID: \"0f50e533-b652-42c8-9f91-124b7be386fc\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-j6c6b" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.698039 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6cp6v\" (UniqueName: \"kubernetes.io/projected/8651c6d0-c735-4e26-a1aa-cf6090cd469f-kube-api-access-6cp6v\") pod \"dns-operator-744455d44c-nh6pl\" (UID: \"8651c6d0-c735-4e26-a1aa-cf6090cd469f\") " pod="openshift-dns-operator/dns-operator-744455d44c-nh6pl" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.699717 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/75f685e8-220b-4588-8f26-66d305e3dfea-auth-proxy-config\") pod \"machine-config-operator-74547568cd-j9zw4\" (UID: \"75f685e8-220b-4588-8f26-66d305e3dfea\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-j9zw4" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.699934 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pfm29\" (UniqueName: \"kubernetes.io/projected/7ca57976-e7d8-485a-8277-a3da59178bf6-kube-api-access-pfm29\") pod \"packageserver-d55dfcdfc-sbk9w\" (UID: \"7ca57976-e7d8-485a-8277-a3da59178bf6\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbk9w" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.700082 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8jgk\" (UniqueName: \"kubernetes.io/projected/506301cc-392b-4aad-b7b8-29dbdde4b6fa-kube-api-access-d8jgk\") pod \"openshift-controller-manager-operator-756b6f6bc6-8thn4\" (UID: \"506301cc-392b-4aad-b7b8-29dbdde4b6fa\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8thn4" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.699257 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3f201996-72e7-43c9-b004-47366d4b05e4-etcd-client\") pod \"etcd-operator-b45778765-497pk\" (UID: \"3f201996-72e7-43c9-b004-47366d4b05e4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-497pk" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.698091 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/506301cc-392b-4aad-b7b8-29dbdde4b6fa-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-8thn4\" (UID: \"506301cc-392b-4aad-b7b8-29dbdde4b6fa\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8thn4" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.698407 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8651c6d0-c735-4e26-a1aa-cf6090cd469f-metrics-tls\") pod \"dns-operator-744455d44c-nh6pl\" (UID: \"8651c6d0-c735-4e26-a1aa-cf6090cd469f\") " pod="openshift-dns-operator/dns-operator-744455d44c-nh6pl" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.699096 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/227860ca-baf3-4b07-b0b3-9a29a9eedab6-installation-pull-secrets\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.700675 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3f201996-72e7-43c9-b004-47366d4b05e4-serving-cert\") pod \"etcd-operator-b45778765-497pk\" (UID: \"3f201996-72e7-43c9-b004-47366d4b05e4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-497pk" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.700837 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/5c9c5353-0cec-4776-9c51-553e90e76a2a-default-certificate\") pod \"router-default-5444994796-s6mff\" (UID: \"5c9c5353-0cec-4776-9c51-553e90e76a2a\") " pod="openshift-ingress/router-default-5444994796-s6mff" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.700946 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.700808 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/51fe0600-5a21-48b7-9c07-91a11a547256-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-mgswn\" (UID: \"51fe0600-5a21-48b7-9c07-91a11a547256\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mgswn" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.701171 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/05b6cd99-aeb2-4b20-8fb3-c50e72a03611-metrics-tls\") pod \"dns-default-k2ldt\" (UID: \"05b6cd99-aeb2-4b20-8fb3-c50e72a03611\") " pod="openshift-dns/dns-default-k2ldt" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.701322 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.701484 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwwfn\" (UniqueName: \"kubernetes.io/projected/26f7ec15-f388-4e06-b038-cc04d0f21c08-kube-api-access-jwwfn\") pod \"ingress-canary-9snm7\" (UID: \"26f7ec15-f388-4e06-b038-cc04d0f21c08\") " pod="openshift-ingress-canary/ingress-canary-9snm7" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.701807 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/baace783-c727-4031-bdae-749783258756-serving-cert\") pod \"service-ca-operator-777779d784-kzfsj\" (UID: \"baace783-c727-4031-bdae-749783258756\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-kzfsj" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.701999 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pqc7t\" (UniqueName: \"kubernetes.io/projected/b2e2892d-ee1f-4349-b6b5-d0bc908d1397-kube-api-access-pqc7t\") pod \"csi-hostpathplugin-p6gxf\" (UID: \"b2e2892d-ee1f-4349-b6b5-d0bc908d1397\") " pod="hostpath-provisioner/csi-hostpathplugin-p6gxf" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.702170 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/73452466-9780-461c-abd6-282cda0d9a7e-proxy-tls\") pod \"machine-config-controller-84d6567774-9sjf5\" (UID: \"73452466-9780-461c-abd6-282cda0d9a7e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9sjf5" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.702339 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35a21adf-32b6-4a19-b592-0f941e07a3c5-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-n56lw\" (UID: \"35a21adf-32b6-4a19-b592-0f941e07a3c5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-n56lw" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.702491 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/b2e2892d-ee1f-4349-b6b5-d0bc908d1397-registration-dir\") pod \"csi-hostpathplugin-p6gxf\" (UID: \"b2e2892d-ee1f-4349-b6b5-d0bc908d1397\") " pod="hostpath-provisioner/csi-hostpathplugin-p6gxf" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.702670 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/b28e17fe-93d1-4cdb-a9a1-33c37bbe1877-certs\") pod \"machine-config-server-52sbj\" (UID: \"b28e17fe-93d1-4cdb-a9a1-33c37bbe1877\") " pod="openshift-machine-config-operator/machine-config-server-52sbj" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.702842 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r9mzr\" (UniqueName: \"kubernetes.io/projected/6aeeb09d-7519-42c7-be1d-2355b9a704e2-kube-api-access-r9mzr\") pod \"service-ca-9c57cc56f-7k28d\" (UID: \"6aeeb09d-7519-42c7-be1d-2355b9a704e2\") " pod="openshift-service-ca/service-ca-9c57cc56f-7k28d" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.703001 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n4thq\" (UniqueName: \"kubernetes.io/projected/3973ab95-1d0e-49d6-aadb-79fde6dd74b8-kube-api-access-n4thq\") pod \"package-server-manager-789f6589d5-p5wn9\" (UID: \"3973ab95-1d0e-49d6-aadb-79fde6dd74b8\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-p5wn9" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.703163 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f50e533-b652-42c8-9f91-124b7be386fc-config\") pod \"machine-approver-56656f9798-j6c6b\" (UID: \"0f50e533-b652-42c8-9f91-124b7be386fc\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-j6c6b" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.703308 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/89bb08d2-3252-4d85-91bc-3b595be4c32a-config-volume\") pod \"collect-profiles-29403765-26klv\" (UID: \"89bb08d2-3252-4d85-91bc-3b595be4c32a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403765-26klv" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.703632 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.703795 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/3a0d9f4d-880f-4189-b479-fa0a1f2fb513-profile-collector-cert\") pod \"olm-operator-6b444d44fb-r4xbz\" (UID: \"3a0d9f4d-880f-4189-b479-fa0a1f2fb513\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-r4xbz" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.703945 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5c9c5353-0cec-4776-9c51-553e90e76a2a-service-ca-bundle\") pod \"router-default-5444994796-s6mff\" (UID: \"5c9c5353-0cec-4776-9c51-553e90e76a2a\") " pod="openshift-ingress/router-default-5444994796-s6mff" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.704125 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/b28e17fe-93d1-4cdb-a9a1-33c37bbe1877-node-bootstrap-token\") pod \"machine-config-server-52sbj\" (UID: \"b28e17fe-93d1-4cdb-a9a1-33c37bbe1877\") " pod="openshift-machine-config-operator/machine-config-server-52sbj" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.704330 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/26f7ec15-f388-4e06-b038-cc04d0f21c08-cert\") pod \"ingress-canary-9snm7\" (UID: \"26f7ec15-f388-4e06-b038-cc04d0f21c08\") " pod="openshift-ingress-canary/ingress-canary-9snm7" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.704589 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/506301cc-392b-4aad-b7b8-29dbdde4b6fa-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-8thn4\" (UID: \"506301cc-392b-4aad-b7b8-29dbdde4b6fa\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8thn4" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.704832 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mpfgg\" (UniqueName: \"kubernetes.io/projected/51fe0600-5a21-48b7-9c07-91a11a547256-kube-api-access-mpfgg\") pod \"openshift-apiserver-operator-796bbdcf4f-mgswn\" (UID: \"51fe0600-5a21-48b7-9c07-91a11a547256\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mgswn" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.704957 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ssk9g\" (UniqueName: \"kubernetes.io/projected/85f4323a-d5cf-45bb-b3df-57434dfe4d53-kube-api-access-ssk9g\") pod \"control-plane-machine-set-operator-78cbb6b69f-ht55m\" (UID: \"85f4323a-d5cf-45bb-b3df-57434dfe4d53\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ht55m" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.705081 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-882xf\" (UniqueName: \"kubernetes.io/projected/73452466-9780-461c-abd6-282cda0d9a7e-kube-api-access-882xf\") pod \"machine-config-controller-84d6567774-9sjf5\" (UID: \"73452466-9780-461c-abd6-282cda0d9a7e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9sjf5" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.705223 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/227860ca-baf3-4b07-b0b3-9a29a9eedab6-registry-tls\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.702943 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1623d112-895b-42a2-8a76-cf082423e8ac-serving-cert\") pod \"console-operator-58897d9998-b9pwk\" (UID: \"1623d112-895b-42a2-8a76-cf082423e8ac\") " pod="openshift-console-operator/console-operator-58897d9998-b9pwk" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.703053 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3973ab95-1d0e-49d6-aadb-79fde6dd74b8-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-p5wn9\" (UID: \"3973ab95-1d0e-49d6-aadb-79fde6dd74b8\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-p5wn9" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.703644 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f50e533-b652-42c8-9f91-124b7be386fc-config\") pod \"machine-approver-56656f9798-j6c6b\" (UID: \"0f50e533-b652-42c8-9f91-124b7be386fc\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-j6c6b" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.705402 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3f201996-72e7-43c9-b004-47366d4b05e4-serving-cert\") pod \"etcd-operator-b45778765-497pk\" (UID: \"3f201996-72e7-43c9-b004-47366d4b05e4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-497pk" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.704781 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5c9c5353-0cec-4776-9c51-553e90e76a2a-service-ca-bundle\") pod \"router-default-5444994796-s6mff\" (UID: \"5c9c5353-0cec-4776-9c51-553e90e76a2a\") " pod="openshift-ingress/router-default-5444994796-s6mff" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.705358 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/227860ca-baf3-4b07-b0b3-9a29a9eedab6-bound-sa-token\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.705130 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/5c9c5353-0cec-4776-9c51-553e90e76a2a-default-certificate\") pod \"router-default-5444994796-s6mff\" (UID: \"5c9c5353-0cec-4776-9c51-553e90e76a2a\") " pod="openshift-ingress/router-default-5444994796-s6mff" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.705600 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3b071787-4135-43ce-a44e-68a0be71919e-trusted-ca\") pod \"ingress-operator-5b745b69d9-jdbbc\" (UID: \"3b071787-4135-43ce-a44e-68a0be71919e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jdbbc" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.705634 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/05b6cd99-aeb2-4b20-8fb3-c50e72a03611-config-volume\") pod \"dns-default-k2ldt\" (UID: \"05b6cd99-aeb2-4b20-8fb3-c50e72a03611\") " pod="openshift-dns/dns-default-k2ldt" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.705662 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fcmzm\" (UniqueName: \"kubernetes.io/projected/baace783-c727-4031-bdae-749783258756-kube-api-access-fcmzm\") pod \"service-ca-operator-777779d784-kzfsj\" (UID: \"baace783-c727-4031-bdae-749783258756\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-kzfsj" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.706891 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3b071787-4135-43ce-a44e-68a0be71919e-metrics-tls\") pod \"ingress-operator-5b745b69d9-jdbbc\" (UID: \"3b071787-4135-43ce-a44e-68a0be71919e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jdbbc" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.707760 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3b071787-4135-43ce-a44e-68a0be71919e-trusted-ca\") pod \"ingress-operator-5b745b69d9-jdbbc\" (UID: \"3b071787-4135-43ce-a44e-68a0be71919e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jdbbc" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.708683 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/227860ca-baf3-4b07-b0b3-9a29a9eedab6-registry-tls\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.708803 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/506301cc-392b-4aad-b7b8-29dbdde4b6fa-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-8thn4\" (UID: \"506301cc-392b-4aad-b7b8-29dbdde4b6fa\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8thn4" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.718846 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.738867 4971 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.758866 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.778583 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.807013 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.807246 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35a21adf-32b6-4a19-b592-0f941e07a3c5-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-n56lw\" (UID: \"35a21adf-32b6-4a19-b592-0f941e07a3c5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-n56lw" Nov 27 06:55:02 crc kubenswrapper[4971]: E1127 06:55:02.807294 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:03.307240925 +0000 UTC m=+141.499284883 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.807389 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/b2e2892d-ee1f-4349-b6b5-d0bc908d1397-registration-dir\") pod \"csi-hostpathplugin-p6gxf\" (UID: \"b2e2892d-ee1f-4349-b6b5-d0bc908d1397\") " pod="hostpath-provisioner/csi-hostpathplugin-p6gxf" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.807514 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/b28e17fe-93d1-4cdb-a9a1-33c37bbe1877-certs\") pod \"machine-config-server-52sbj\" (UID: \"b28e17fe-93d1-4cdb-a9a1-33c37bbe1877\") " pod="openshift-machine-config-operator/machine-config-server-52sbj" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.807599 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r9mzr\" (UniqueName: \"kubernetes.io/projected/6aeeb09d-7519-42c7-be1d-2355b9a704e2-kube-api-access-r9mzr\") pod \"service-ca-9c57cc56f-7k28d\" (UID: \"6aeeb09d-7519-42c7-be1d-2355b9a704e2\") " pod="openshift-service-ca/service-ca-9c57cc56f-7k28d" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.807671 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/89bb08d2-3252-4d85-91bc-3b595be4c32a-config-volume\") pod \"collect-profiles-29403765-26klv\" (UID: \"89bb08d2-3252-4d85-91bc-3b595be4c32a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403765-26klv" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.807719 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.807769 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/3a0d9f4d-880f-4189-b479-fa0a1f2fb513-profile-collector-cert\") pod \"olm-operator-6b444d44fb-r4xbz\" (UID: \"3a0d9f4d-880f-4189-b479-fa0a1f2fb513\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-r4xbz" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.807784 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/b2e2892d-ee1f-4349-b6b5-d0bc908d1397-registration-dir\") pod \"csi-hostpathplugin-p6gxf\" (UID: \"b2e2892d-ee1f-4349-b6b5-d0bc908d1397\") " pod="hostpath-provisioner/csi-hostpathplugin-p6gxf" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.807850 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/b28e17fe-93d1-4cdb-a9a1-33c37bbe1877-node-bootstrap-token\") pod \"machine-config-server-52sbj\" (UID: \"b28e17fe-93d1-4cdb-a9a1-33c37bbe1877\") " pod="openshift-machine-config-operator/machine-config-server-52sbj" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.807919 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/26f7ec15-f388-4e06-b038-cc04d0f21c08-cert\") pod \"ingress-canary-9snm7\" (UID: \"26f7ec15-f388-4e06-b038-cc04d0f21c08\") " pod="openshift-ingress-canary/ingress-canary-9snm7" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.808035 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-882xf\" (UniqueName: \"kubernetes.io/projected/73452466-9780-461c-abd6-282cda0d9a7e-kube-api-access-882xf\") pod \"machine-config-controller-84d6567774-9sjf5\" (UID: \"73452466-9780-461c-abd6-282cda0d9a7e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9sjf5" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.808095 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ssk9g\" (UniqueName: \"kubernetes.io/projected/85f4323a-d5cf-45bb-b3df-57434dfe4d53-kube-api-access-ssk9g\") pod \"control-plane-machine-set-operator-78cbb6b69f-ht55m\" (UID: \"85f4323a-d5cf-45bb-b3df-57434dfe4d53\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ht55m" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.808156 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/05b6cd99-aeb2-4b20-8fb3-c50e72a03611-config-volume\") pod \"dns-default-k2ldt\" (UID: \"05b6cd99-aeb2-4b20-8fb3-c50e72a03611\") " pod="openshift-dns/dns-default-k2ldt" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.808195 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fcmzm\" (UniqueName: \"kubernetes.io/projected/baace783-c727-4031-bdae-749783258756-kube-api-access-fcmzm\") pod \"service-ca-operator-777779d784-kzfsj\" (UID: \"baace783-c727-4031-bdae-749783258756\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-kzfsj" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.808756 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.808847 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.808864 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35a21adf-32b6-4a19-b592-0f941e07a3c5-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-n56lw\" (UID: \"35a21adf-32b6-4a19-b592-0f941e07a3c5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-n56lw" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.808888 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/6df59c92-6515-4bf1-aad4-57daf0d8fc76-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-m7fh9\" (UID: \"6df59c92-6515-4bf1-aad4-57daf0d8fc76\") " pod="openshift-marketplace/marketplace-operator-79b997595-m7fh9" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.808916 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/89bb08d2-3252-4d85-91bc-3b595be4c32a-config-volume\") pod \"collect-profiles-29403765-26klv\" (UID: \"89bb08d2-3252-4d85-91bc-3b595be4c32a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403765-26klv" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.808933 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r67f4\" (UniqueName: \"kubernetes.io/projected/6e183c66-f7ea-4191-9f0d-fb2eb12ee864-kube-api-access-r67f4\") pod \"multus-admission-controller-857f4d67dd-xhx92\" (UID: \"6e183c66-f7ea-4191-9f0d-fb2eb12ee864\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-xhx92" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.809012 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.809041 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/7cc88187-87e4-4fa5-a431-d3b7b4779f56-profile-collector-cert\") pod \"catalog-operator-68c6474976-ghv48\" (UID: \"7cc88187-87e4-4fa5-a431-d3b7b4779f56\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ghv48" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.809074 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.809096 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k564m\" (UniqueName: \"kubernetes.io/projected/7cc88187-87e4-4fa5-a431-d3b7b4779f56-kube-api-access-k564m\") pod \"catalog-operator-68c6474976-ghv48\" (UID: \"7cc88187-87e4-4fa5-a431-d3b7b4779f56\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ghv48" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.809114 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/75f685e8-220b-4588-8f26-66d305e3dfea-images\") pod \"machine-config-operator-74547568cd-j9zw4\" (UID: \"75f685e8-220b-4588-8f26-66d305e3dfea\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-j9zw4" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.809136 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/b2e2892d-ee1f-4349-b6b5-d0bc908d1397-mountpoint-dir\") pod \"csi-hostpathplugin-p6gxf\" (UID: \"b2e2892d-ee1f-4349-b6b5-d0bc908d1397\") " pod="hostpath-provisioner/csi-hostpathplugin-p6gxf" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.809152 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sclj4\" (UniqueName: \"kubernetes.io/projected/89bb08d2-3252-4d85-91bc-3b595be4c32a-kube-api-access-sclj4\") pod \"collect-profiles-29403765-26klv\" (UID: \"89bb08d2-3252-4d85-91bc-3b595be4c32a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403765-26klv" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.809182 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.809200 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.809219 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/b2e2892d-ee1f-4349-b6b5-d0bc908d1397-plugins-dir\") pod \"csi-hostpathplugin-p6gxf\" (UID: \"b2e2892d-ee1f-4349-b6b5-d0bc908d1397\") " pod="hostpath-provisioner/csi-hostpathplugin-p6gxf" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.809248 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/45462412-2e4e-480d-8874-2949c5dd691e-config\") pod \"kube-controller-manager-operator-78b949d7b-f6k2p\" (UID: \"45462412-2e4e-480d-8874-2949c5dd691e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-f6k2p" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.809276 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.809292 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/b2e2892d-ee1f-4349-b6b5-d0bc908d1397-socket-dir\") pod \"csi-hostpathplugin-p6gxf\" (UID: \"b2e2892d-ee1f-4349-b6b5-d0bc908d1397\") " pod="hostpath-provisioner/csi-hostpathplugin-p6gxf" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.809309 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/89bb08d2-3252-4d85-91bc-3b595be4c32a-secret-volume\") pod \"collect-profiles-29403765-26klv\" (UID: \"89bb08d2-3252-4d85-91bc-3b595be4c32a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403765-26klv" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.809324 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/b2e2892d-ee1f-4349-b6b5-d0bc908d1397-csi-data-dir\") pod \"csi-hostpathplugin-p6gxf\" (UID: \"b2e2892d-ee1f-4349-b6b5-d0bc908d1397\") " pod="hostpath-provisioner/csi-hostpathplugin-p6gxf" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.809341 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/3a0d9f4d-880f-4189-b479-fa0a1f2fb513-srv-cert\") pod \"olm-operator-6b444d44fb-r4xbz\" (UID: \"3a0d9f4d-880f-4189-b479-fa0a1f2fb513\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-r4xbz" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.809398 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/7cc88187-87e4-4fa5-a431-d3b7b4779f56-srv-cert\") pod \"catalog-operator-68c6474976-ghv48\" (UID: \"7cc88187-87e4-4fa5-a431-d3b7b4779f56\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ghv48" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.809423 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/6e183c66-f7ea-4191-9f0d-fb2eb12ee864-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-xhx92\" (UID: \"6e183c66-f7ea-4191-9f0d-fb2eb12ee864\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-xhx92" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.809445 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9q5c\" (UniqueName: \"kubernetes.io/projected/6df59c92-6515-4bf1-aad4-57daf0d8fc76-kube-api-access-t9q5c\") pod \"marketplace-operator-79b997595-m7fh9\" (UID: \"6df59c92-6515-4bf1-aad4-57daf0d8fc76\") " pod="openshift-marketplace/marketplace-operator-79b997595-m7fh9" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.809463 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/35a21adf-32b6-4a19-b592-0f941e07a3c5-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-n56lw\" (UID: \"35a21adf-32b6-4a19-b592-0f941e07a3c5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-n56lw" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.809492 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6df59c92-6515-4bf1-aad4-57daf0d8fc76-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-m7fh9\" (UID: \"6df59c92-6515-4bf1-aad4-57daf0d8fc76\") " pod="openshift-marketplace/marketplace-operator-79b997595-m7fh9" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.809516 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jckhf\" (UniqueName: \"kubernetes.io/projected/75f685e8-220b-4588-8f26-66d305e3dfea-kube-api-access-jckhf\") pod \"machine-config-operator-74547568cd-j9zw4\" (UID: \"75f685e8-220b-4588-8f26-66d305e3dfea\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-j9zw4" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.809570 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f268e2fa-798d-4438-9225-24c4a07ea999-audit-policies\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.809598 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6ppfn\" (UniqueName: \"kubernetes.io/projected/cd2e0a36-2e94-4ed6-9b7d-1755d593d805-kube-api-access-6ppfn\") pod \"kube-storage-version-migrator-operator-b67b599dd-2drfq\" (UID: \"cd2e0a36-2e94-4ed6-9b7d-1755d593d805\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2drfq" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.809626 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c9dddee2-cf40-4aa7-9e05-0776bfb4d534-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-hh55n\" (UID: \"c9dddee2-cf40-4aa7-9e05-0776bfb4d534\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hh55n" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.809652 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/6aeeb09d-7519-42c7-be1d-2355b9a704e2-signing-cabundle\") pod \"service-ca-9c57cc56f-7k28d\" (UID: \"6aeeb09d-7519-42c7-be1d-2355b9a704e2\") " pod="openshift-service-ca/service-ca-9c57cc56f-7k28d" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.809671 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd2e0a36-2e94-4ed6-9b7d-1755d593d805-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-2drfq\" (UID: \"cd2e0a36-2e94-4ed6-9b7d-1755d593d805\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2drfq" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.809689 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/35a21adf-32b6-4a19-b592-0f941e07a3c5-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-n56lw\" (UID: \"35a21adf-32b6-4a19-b592-0f941e07a3c5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-n56lw" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.809720 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9q9qk\" (UniqueName: \"kubernetes.io/projected/f268e2fa-798d-4438-9225-24c4a07ea999-kube-api-access-9q9qk\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.809746 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.809770 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kpg86\" (UniqueName: \"kubernetes.io/projected/b28e17fe-93d1-4cdb-a9a1-33c37bbe1877-kube-api-access-kpg86\") pod \"machine-config-server-52sbj\" (UID: \"b28e17fe-93d1-4cdb-a9a1-33c37bbe1877\") " pod="openshift-machine-config-operator/machine-config-server-52sbj" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.809796 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f268e2fa-798d-4438-9225-24c4a07ea999-audit-dir\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.809816 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd2e0a36-2e94-4ed6-9b7d-1755d593d805-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-2drfq\" (UID: \"cd2e0a36-2e94-4ed6-9b7d-1755d593d805\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2drfq" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.809839 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/6aeeb09d-7519-42c7-be1d-2355b9a704e2-signing-key\") pod \"service-ca-9c57cc56f-7k28d\" (UID: \"6aeeb09d-7519-42c7-be1d-2355b9a704e2\") " pod="openshift-service-ca/service-ca-9c57cc56f-7k28d" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.809866 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8t4f\" (UniqueName: \"kubernetes.io/projected/05b6cd99-aeb2-4b20-8fb3-c50e72a03611-kube-api-access-m8t4f\") pod \"dns-default-k2ldt\" (UID: \"05b6cd99-aeb2-4b20-8fb3-c50e72a03611\") " pod="openshift-dns/dns-default-k2ldt" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.809903 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/73452466-9780-461c-abd6-282cda0d9a7e-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-9sjf5\" (UID: \"73452466-9780-461c-abd6-282cda0d9a7e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9sjf5" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.809928 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/45462412-2e4e-480d-8874-2949c5dd691e-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-f6k2p\" (UID: \"45462412-2e4e-480d-8874-2949c5dd691e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-f6k2p" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.809952 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/45462412-2e4e-480d-8874-2949c5dd691e-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-f6k2p\" (UID: \"45462412-2e4e-480d-8874-2949c5dd691e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-f6k2p" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.809986 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/85f4323a-d5cf-45bb-b3df-57434dfe4d53-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-ht55m\" (UID: \"85f4323a-d5cf-45bb-b3df-57434dfe4d53\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ht55m" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.810015 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7twg\" (UniqueName: \"kubernetes.io/projected/3a0d9f4d-880f-4189-b479-fa0a1f2fb513-kube-api-access-x7twg\") pod \"olm-operator-6b444d44fb-r4xbz\" (UID: \"3a0d9f4d-880f-4189-b479-fa0a1f2fb513\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-r4xbz" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.810037 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.810071 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/baace783-c727-4031-bdae-749783258756-config\") pod \"service-ca-operator-777779d784-kzfsj\" (UID: \"baace783-c727-4031-bdae-749783258756\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-kzfsj" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.810096 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtvqb\" (UniqueName: \"kubernetes.io/projected/00fa6f7a-50bb-42e0-8643-a0fae088f528-kube-api-access-jtvqb\") pod \"migrator-59844c95c7-kg7xs\" (UID: \"00fa6f7a-50bb-42e0-8643-a0fae088f528\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-kg7xs" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.810114 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c9dddee2-cf40-4aa7-9e05-0776bfb4d534-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-hh55n\" (UID: \"c9dddee2-cf40-4aa7-9e05-0776bfb4d534\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hh55n" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.810144 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/75f685e8-220b-4588-8f26-66d305e3dfea-proxy-tls\") pod \"machine-config-operator-74547568cd-j9zw4\" (UID: \"75f685e8-220b-4588-8f26-66d305e3dfea\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-j9zw4" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.810171 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.810195 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9dddee2-cf40-4aa7-9e05-0776bfb4d534-config\") pod \"kube-apiserver-operator-766d6c64bb-hh55n\" (UID: \"c9dddee2-cf40-4aa7-9e05-0776bfb4d534\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hh55n" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.810226 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/75f685e8-220b-4588-8f26-66d305e3dfea-auth-proxy-config\") pod \"machine-config-operator-74547568cd-j9zw4\" (UID: \"75f685e8-220b-4588-8f26-66d305e3dfea\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-j9zw4" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.810284 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/05b6cd99-aeb2-4b20-8fb3-c50e72a03611-metrics-tls\") pod \"dns-default-k2ldt\" (UID: \"05b6cd99-aeb2-4b20-8fb3-c50e72a03611\") " pod="openshift-dns/dns-default-k2ldt" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.810305 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwwfn\" (UniqueName: \"kubernetes.io/projected/26f7ec15-f388-4e06-b038-cc04d0f21c08-kube-api-access-jwwfn\") pod \"ingress-canary-9snm7\" (UID: \"26f7ec15-f388-4e06-b038-cc04d0f21c08\") " pod="openshift-ingress-canary/ingress-canary-9snm7" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.810323 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/baace783-c727-4031-bdae-749783258756-serving-cert\") pod \"service-ca-operator-777779d784-kzfsj\" (UID: \"baace783-c727-4031-bdae-749783258756\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-kzfsj" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.810344 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.810362 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pqc7t\" (UniqueName: \"kubernetes.io/projected/b2e2892d-ee1f-4349-b6b5-d0bc908d1397-kube-api-access-pqc7t\") pod \"csi-hostpathplugin-p6gxf\" (UID: \"b2e2892d-ee1f-4349-b6b5-d0bc908d1397\") " pod="hostpath-provisioner/csi-hostpathplugin-p6gxf" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.810382 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/73452466-9780-461c-abd6-282cda0d9a7e-proxy-tls\") pod \"machine-config-controller-84d6567774-9sjf5\" (UID: \"73452466-9780-461c-abd6-282cda0d9a7e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9sjf5" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.810751 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/75f685e8-220b-4588-8f26-66d305e3dfea-images\") pod \"machine-config-operator-74547568cd-j9zw4\" (UID: \"75f685e8-220b-4588-8f26-66d305e3dfea\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-j9zw4" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.810775 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/45462412-2e4e-480d-8874-2949c5dd691e-config\") pod \"kube-controller-manager-operator-78b949d7b-f6k2p\" (UID: \"45462412-2e4e-480d-8874-2949c5dd691e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-f6k2p" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.810873 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/b2e2892d-ee1f-4349-b6b5-d0bc908d1397-mountpoint-dir\") pod \"csi-hostpathplugin-p6gxf\" (UID: \"b2e2892d-ee1f-4349-b6b5-d0bc908d1397\") " pod="hostpath-provisioner/csi-hostpathplugin-p6gxf" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.811113 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.812033 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/b28e17fe-93d1-4cdb-a9a1-33c37bbe1877-certs\") pod \"machine-config-server-52sbj\" (UID: \"b28e17fe-93d1-4cdb-a9a1-33c37bbe1877\") " pod="openshift-machine-config-operator/machine-config-server-52sbj" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.812242 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.812267 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.812347 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/3a0d9f4d-880f-4189-b479-fa0a1f2fb513-profile-collector-cert\") pod \"olm-operator-6b444d44fb-r4xbz\" (UID: \"3a0d9f4d-880f-4189-b479-fa0a1f2fb513\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-r4xbz" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.812433 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/b2e2892d-ee1f-4349-b6b5-d0bc908d1397-plugins-dir\") pod \"csi-hostpathplugin-p6gxf\" (UID: \"b2e2892d-ee1f-4349-b6b5-d0bc908d1397\") " pod="hostpath-provisioner/csi-hostpathplugin-p6gxf" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.812856 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75btm\" (UniqueName: \"kubernetes.io/projected/b2038ca5-beb4-434e-81a7-16a67ad9382d-kube-api-access-75btm\") pod \"console-f9d7485db-blqcq\" (UID: \"b2038ca5-beb4-434e-81a7-16a67ad9382d\") " pod="openshift-console/console-f9d7485db-blqcq" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.813040 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f268e2fa-798d-4438-9225-24c4a07ea999-audit-policies\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.813230 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/6aeeb09d-7519-42c7-be1d-2355b9a704e2-signing-cabundle\") pod \"service-ca-9c57cc56f-7k28d\" (UID: \"6aeeb09d-7519-42c7-be1d-2355b9a704e2\") " pod="openshift-service-ca/service-ca-9c57cc56f-7k28d" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.813374 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/73452466-9780-461c-abd6-282cda0d9a7e-proxy-tls\") pod \"machine-config-controller-84d6567774-9sjf5\" (UID: \"73452466-9780-461c-abd6-282cda0d9a7e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9sjf5" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.813428 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.813911 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/b28e17fe-93d1-4cdb-a9a1-33c37bbe1877-node-bootstrap-token\") pod \"machine-config-server-52sbj\" (UID: \"b28e17fe-93d1-4cdb-a9a1-33c37bbe1877\") " pod="openshift-machine-config-operator/machine-config-server-52sbj" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.815005 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.815319 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/6df59c92-6515-4bf1-aad4-57daf0d8fc76-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-m7fh9\" (UID: \"6df59c92-6515-4bf1-aad4-57daf0d8fc76\") " pod="openshift-marketplace/marketplace-operator-79b997595-m7fh9" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.815388 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6df59c92-6515-4bf1-aad4-57daf0d8fc76-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-m7fh9\" (UID: \"6df59c92-6515-4bf1-aad4-57daf0d8fc76\") " pod="openshift-marketplace/marketplace-operator-79b997595-m7fh9" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.815768 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/b2e2892d-ee1f-4349-b6b5-d0bc908d1397-socket-dir\") pod \"csi-hostpathplugin-p6gxf\" (UID: \"b2e2892d-ee1f-4349-b6b5-d0bc908d1397\") " pod="hostpath-provisioner/csi-hostpathplugin-p6gxf" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.815843 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/b2e2892d-ee1f-4349-b6b5-d0bc908d1397-csi-data-dir\") pod \"csi-hostpathplugin-p6gxf\" (UID: \"b2e2892d-ee1f-4349-b6b5-d0bc908d1397\") " pod="hostpath-provisioner/csi-hostpathplugin-p6gxf" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.815877 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9dddee2-cf40-4aa7-9e05-0776bfb4d534-config\") pod \"kube-apiserver-operator-766d6c64bb-hh55n\" (UID: \"c9dddee2-cf40-4aa7-9e05-0776bfb4d534\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hh55n" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.815965 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/7cc88187-87e4-4fa5-a431-d3b7b4779f56-srv-cert\") pod \"catalog-operator-68c6474976-ghv48\" (UID: \"7cc88187-87e4-4fa5-a431-d3b7b4779f56\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ghv48" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.816128 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd2e0a36-2e94-4ed6-9b7d-1755d593d805-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-2drfq\" (UID: \"cd2e0a36-2e94-4ed6-9b7d-1755d593d805\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2drfq" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.816328 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.816417 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/6e183c66-f7ea-4191-9f0d-fb2eb12ee864-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-xhx92\" (UID: \"6e183c66-f7ea-4191-9f0d-fb2eb12ee864\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-xhx92" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.817057 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/75f685e8-220b-4588-8f26-66d305e3dfea-auth-proxy-config\") pod \"machine-config-operator-74547568cd-j9zw4\" (UID: \"75f685e8-220b-4588-8f26-66d305e3dfea\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-j9zw4" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.817142 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/baace783-c727-4031-bdae-749783258756-config\") pod \"service-ca-operator-777779d784-kzfsj\" (UID: \"baace783-c727-4031-bdae-749783258756\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-kzfsj" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.817572 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: E1127 06:55:02.817780 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:03.317762861 +0000 UTC m=+141.509806789 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.818374 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd2e0a36-2e94-4ed6-9b7d-1755d593d805-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-2drfq\" (UID: \"cd2e0a36-2e94-4ed6-9b7d-1755d593d805\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2drfq" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.818476 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/3a0d9f4d-880f-4189-b479-fa0a1f2fb513-srv-cert\") pod \"olm-operator-6b444d44fb-r4xbz\" (UID: \"3a0d9f4d-880f-4189-b479-fa0a1f2fb513\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-r4xbz" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.818896 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.819023 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.819090 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f268e2fa-798d-4438-9225-24c4a07ea999-audit-dir\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.819188 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/73452466-9780-461c-abd6-282cda0d9a7e-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-9sjf5\" (UID: \"73452466-9780-461c-abd6-282cda0d9a7e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9sjf5" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.819351 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/89bb08d2-3252-4d85-91bc-3b595be4c32a-secret-volume\") pod \"collect-profiles-29403765-26klv\" (UID: \"89bb08d2-3252-4d85-91bc-3b595be4c32a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403765-26klv" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.821159 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/baace783-c727-4031-bdae-749783258756-serving-cert\") pod \"service-ca-operator-777779d784-kzfsj\" (UID: \"baace783-c727-4031-bdae-749783258756\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-kzfsj" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.821420 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c9dddee2-cf40-4aa7-9e05-0776bfb4d534-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-hh55n\" (UID: \"c9dddee2-cf40-4aa7-9e05-0776bfb4d534\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hh55n" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.821524 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/75f685e8-220b-4588-8f26-66d305e3dfea-proxy-tls\") pod \"machine-config-operator-74547568cd-j9zw4\" (UID: \"75f685e8-220b-4588-8f26-66d305e3dfea\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-j9zw4" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.822302 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/35a21adf-32b6-4a19-b592-0f941e07a3c5-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-n56lw\" (UID: \"35a21adf-32b6-4a19-b592-0f941e07a3c5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-n56lw" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.822621 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/45462412-2e4e-480d-8874-2949c5dd691e-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-f6k2p\" (UID: \"45462412-2e4e-480d-8874-2949c5dd691e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-f6k2p" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.823128 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.823721 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/7cc88187-87e4-4fa5-a431-d3b7b4779f56-profile-collector-cert\") pod \"catalog-operator-68c6474976-ghv48\" (UID: \"7cc88187-87e4-4fa5-a431-d3b7b4779f56\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ghv48" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.824437 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.825254 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/85f4323a-d5cf-45bb-b3df-57434dfe4d53-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-ht55m\" (UID: \"85f4323a-d5cf-45bb-b3df-57434dfe4d53\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ht55m" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.828187 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/6aeeb09d-7519-42c7-be1d-2355b9a704e2-signing-key\") pod \"service-ca-9c57cc56f-7k28d\" (UID: \"6aeeb09d-7519-42c7-be1d-2355b9a704e2\") " pod="openshift-service-ca/service-ca-9c57cc56f-7k28d" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.832579 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3e262487-0943-410e-bbcb-11fc42a8ac60-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-jhjgn\" (UID: \"3e262487-0943-410e-bbcb-11fc42a8ac60\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jhjgn" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.851838 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vjcwc\" (UniqueName: \"kubernetes.io/projected/2944ff34-c55c-40fa-aacf-f679dd704ad8-kube-api-access-vjcwc\") pod \"cluster-samples-operator-665b6dd947-hjgth\" (UID: \"2944ff34-c55c-40fa-aacf-f679dd704ad8\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hjgth" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.874732 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7shnv\" (UniqueName: \"kubernetes.io/projected/3e262487-0943-410e-bbcb-11fc42a8ac60-kube-api-access-7shnv\") pod \"cluster-image-registry-operator-dc59b4c8b-jhjgn\" (UID: \"3e262487-0943-410e-bbcb-11fc42a8ac60\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jhjgn" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.892508 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tsjhd\" (UniqueName: \"kubernetes.io/projected/6dc237a7-62f6-4ee9-99f1-034d6cceb901-kube-api-access-tsjhd\") pod \"openshift-config-operator-7777fb866f-8hv78\" (UID: \"6dc237a7-62f6-4ee9-99f1-034d6cceb901\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8hv78" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.899561 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.911923 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:02 crc kubenswrapper[4971]: E1127 06:55:02.912264 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:03.412220346 +0000 UTC m=+141.604264304 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.912917 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:02 crc kubenswrapper[4971]: E1127 06:55:02.913433 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:03.413419213 +0000 UTC m=+141.605463141 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.913937 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/26f7ec15-f388-4e06-b038-cc04d0f21c08-cert\") pod \"ingress-canary-9snm7\" (UID: \"26f7ec15-f388-4e06-b038-cc04d0f21c08\") " pod="openshift-ingress-canary/ingress-canary-9snm7" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.918514 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.939147 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.958811 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.977762 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-blqcq" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.980971 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.991462 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/05b6cd99-aeb2-4b20-8fb3-c50e72a03611-config-volume\") pod \"dns-default-k2ldt\" (UID: \"05b6cd99-aeb2-4b20-8fb3-c50e72a03611\") " pod="openshift-dns/dns-default-k2ldt" Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.997022 4971 request.go:700] Waited for 1.914342215s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-dns/secrets?fieldSelector=metadata.name%3Ddns-default-metrics-tls&limit=500&resourceVersion=0 Nov 27 06:55:02 crc kubenswrapper[4971]: I1127 06:55:02.999555 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.003981 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jhjgn" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.013252 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/05b6cd99-aeb2-4b20-8fb3-c50e72a03611-metrics-tls\") pod \"dns-default-k2ldt\" (UID: \"05b6cd99-aeb2-4b20-8fb3-c50e72a03611\") " pod="openshift-dns/dns-default-k2ldt" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.013927 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:03 crc kubenswrapper[4971]: E1127 06:55:03.014025 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:03.514005317 +0000 UTC m=+141.706049245 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.014200 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:03 crc kubenswrapper[4971]: E1127 06:55:03.014569 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:03.514527734 +0000 UTC m=+141.706571672 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.019821 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.084246 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xfdp8\" (UniqueName: \"kubernetes.io/projected/227860ca-baf3-4b07-b0b3-9a29a9eedab6-kube-api-access-xfdp8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.099922 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52fzj\" (UniqueName: \"kubernetes.io/projected/1623d112-895b-42a2-8a76-cf082423e8ac-kube-api-access-52fzj\") pod \"console-operator-58897d9998-b9pwk\" (UID: \"1623d112-895b-42a2-8a76-cf082423e8ac\") " pod="openshift-console-operator/console-operator-58897d9998-b9pwk" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.115065 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.115915 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8hv78" Nov 27 06:55:03 crc kubenswrapper[4971]: E1127 06:55:03.116004 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:03.615970285 +0000 UTC m=+141.808014203 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.126731 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bj9jj\" (UniqueName: \"kubernetes.io/projected/3b071787-4135-43ce-a44e-68a0be71919e-kube-api-access-bj9jj\") pod \"ingress-operator-5b745b69d9-jdbbc\" (UID: \"3b071787-4135-43ce-a44e-68a0be71919e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jdbbc" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.141342 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hjgth" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.144326 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cw2g4\" (UniqueName: \"kubernetes.io/projected/3f201996-72e7-43c9-b004-47366d4b05e4-kube-api-access-cw2g4\") pod \"etcd-operator-b45778765-497pk\" (UID: \"3f201996-72e7-43c9-b004-47366d4b05e4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-497pk" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.169627 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qwxfz\" (UniqueName: \"kubernetes.io/projected/5c9c5353-0cec-4776-9c51-553e90e76a2a-kube-api-access-qwxfz\") pod \"router-default-5444994796-s6mff\" (UID: \"5c9c5353-0cec-4776-9c51-553e90e76a2a\") " pod="openshift-ingress/router-default-5444994796-s6mff" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.176698 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49q2p\" (UniqueName: \"kubernetes.io/projected/a602a33c-f8ea-45e4-8295-3c2230005ef4-kube-api-access-49q2p\") pod \"downloads-7954f5f757-75prl\" (UID: \"a602a33c-f8ea-45e4-8295-3c2230005ef4\") " pod="openshift-console/downloads-7954f5f757-75prl" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.195020 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nhr9v\" (UniqueName: \"kubernetes.io/projected/0f50e533-b652-42c8-9f91-124b7be386fc-kube-api-access-nhr9v\") pod \"machine-approver-56656f9798-j6c6b\" (UID: \"0f50e533-b652-42c8-9f91-124b7be386fc\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-j6c6b" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.214771 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3b071787-4135-43ce-a44e-68a0be71919e-bound-sa-token\") pod \"ingress-operator-5b745b69d9-jdbbc\" (UID: \"3b071787-4135-43ce-a44e-68a0be71919e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jdbbc" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.218047 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:03 crc kubenswrapper[4971]: E1127 06:55:03.218943 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:03.718918352 +0000 UTC m=+141.910962270 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.241269 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6cp6v\" (UniqueName: \"kubernetes.io/projected/8651c6d0-c735-4e26-a1aa-cf6090cd469f-kube-api-access-6cp6v\") pod \"dns-operator-744455d44c-nh6pl\" (UID: \"8651c6d0-c735-4e26-a1aa-cf6090cd469f\") " pod="openshift-dns-operator/dns-operator-744455d44c-nh6pl" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.256820 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pfm29\" (UniqueName: \"kubernetes.io/projected/7ca57976-e7d8-485a-8277-a3da59178bf6-kube-api-access-pfm29\") pod \"packageserver-d55dfcdfc-sbk9w\" (UID: \"7ca57976-e7d8-485a-8277-a3da59178bf6\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbk9w" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.259952 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-b9pwk" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.266755 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-xqhhz" event={"ID":"f5844f6e-6fc6-460c-9116-217112e3e28d","Type":"ContainerStarted","Data":"aeea9e668277864410bf273c1ae0cf7c1161f838733eb05d546793b75353ccda"} Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.268423 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-j6c6b" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.270218 4971 generic.go:334] "Generic (PLEG): container finished" podID="4130c823-650b-4f98-a3e8-e735f3615b03" containerID="d0311648358bffe8b3b24b2163f752a4bb908ca3b7754b7e6088677847da6b68" exitCode=0 Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.271157 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mcsj6" event={"ID":"4130c823-650b-4f98-a3e8-e735f3615b03","Type":"ContainerDied","Data":"d0311648358bffe8b3b24b2163f752a4bb908ca3b7754b7e6088677847da6b68"} Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.273981 4971 generic.go:334] "Generic (PLEG): container finished" podID="b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0" containerID="22c605007a009f620ec3307e2bc5f02b2d7e6e557edad7cd42d408022c01e88f" exitCode=0 Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.274157 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-jxflz" event={"ID":"b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0","Type":"ContainerDied","Data":"22c605007a009f620ec3307e2bc5f02b2d7e6e557edad7cd42d408022c01e88f"} Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.275262 4971 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-7hzzs container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.275297 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-7hzzs" podUID="78734b35-61db-46e9-b16b-1f03258f9fcb" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.276188 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d8jgk\" (UniqueName: \"kubernetes.io/projected/506301cc-392b-4aad-b7b8-29dbdde4b6fa-kube-api-access-d8jgk\") pod \"openshift-controller-manager-operator-756b6f6bc6-8thn4\" (UID: \"506301cc-392b-4aad-b7b8-29dbdde4b6fa\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8thn4" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.296921 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-nh6pl" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.300796 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n4thq\" (UniqueName: \"kubernetes.io/projected/3973ab95-1d0e-49d6-aadb-79fde6dd74b8-kube-api-access-n4thq\") pod \"package-server-manager-789f6589d5-p5wn9\" (UID: \"3973ab95-1d0e-49d6-aadb-79fde6dd74b8\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-p5wn9" Nov 27 06:55:03 crc kubenswrapper[4971]: W1127 06:55:03.318217 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0f50e533_b652_42c8_9f91_124b7be386fc.slice/crio-fe74de787d641a69d73f02d9ce6365c1d39dd63fb5427870d5638b3328c5c4ab WatchSource:0}: Error finding container fe74de787d641a69d73f02d9ce6365c1d39dd63fb5427870d5638b3328c5c4ab: Status 404 returned error can't find the container with id fe74de787d641a69d73f02d9ce6365c1d39dd63fb5427870d5638b3328c5c4ab Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.319073 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:03 crc kubenswrapper[4971]: E1127 06:55:03.319286 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:03.819235109 +0000 UTC m=+142.011279027 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.321348 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:03 crc kubenswrapper[4971]: E1127 06:55:03.321828 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:03.821817339 +0000 UTC m=+142.013861257 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.324738 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mpfgg\" (UniqueName: \"kubernetes.io/projected/51fe0600-5a21-48b7-9c07-91a11a547256-kube-api-access-mpfgg\") pod \"openshift-apiserver-operator-796bbdcf4f-mgswn\" (UID: \"51fe0600-5a21-48b7-9c07-91a11a547256\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mgswn" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.334502 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/227860ca-baf3-4b07-b0b3-9a29a9eedab6-bound-sa-token\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.348591 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8thn4" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.353273 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r67f4\" (UniqueName: \"kubernetes.io/projected/6e183c66-f7ea-4191-9f0d-fb2eb12ee864-kube-api-access-r67f4\") pod \"multus-admission-controller-857f4d67dd-xhx92\" (UID: \"6e183c66-f7ea-4191-9f0d-fb2eb12ee864\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-xhx92" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.360212 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hjgth"] Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.379845 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-882xf\" (UniqueName: \"kubernetes.io/projected/73452466-9780-461c-abd6-282cda0d9a7e-kube-api-access-882xf\") pod \"machine-config-controller-84d6567774-9sjf5\" (UID: \"73452466-9780-461c-abd6-282cda0d9a7e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9sjf5" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.392337 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-29jwb" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.392928 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-75prl" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.402461 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mgswn" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.415355 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ssk9g\" (UniqueName: \"kubernetes.io/projected/85f4323a-d5cf-45bb-b3df-57434dfe4d53-kube-api-access-ssk9g\") pod \"control-plane-machine-set-operator-78cbb6b69f-ht55m\" (UID: \"85f4323a-d5cf-45bb-b3df-57434dfe4d53\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ht55m" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.422369 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:03 crc kubenswrapper[4971]: E1127 06:55:03.424187 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:03.924163808 +0000 UTC m=+142.116207726 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.444314 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-497pk" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.445922 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fcmzm\" (UniqueName: \"kubernetes.io/projected/baace783-c727-4031-bdae-749783258756-kube-api-access-fcmzm\") pod \"service-ca-operator-777779d784-kzfsj\" (UID: \"baace783-c727-4031-bdae-749783258756\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-kzfsj" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.449374 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-s6mff" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.450081 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-8hv78"] Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.452272 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sclj4\" (UniqueName: \"kubernetes.io/projected/89bb08d2-3252-4d85-91bc-3b595be4c32a-kube-api-access-sclj4\") pod \"collect-profiles-29403765-26klv\" (UID: \"89bb08d2-3252-4d85-91bc-3b595be4c32a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403765-26klv" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.465741 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jhjgn"] Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.472469 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7twg\" (UniqueName: \"kubernetes.io/projected/3a0d9f4d-880f-4189-b479-fa0a1f2fb513-kube-api-access-x7twg\") pod \"olm-operator-6b444d44fb-r4xbz\" (UID: \"3a0d9f4d-880f-4189-b479-fa0a1f2fb513\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-r4xbz" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.474919 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-blqcq"] Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.487699 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jckhf\" (UniqueName: \"kubernetes.io/projected/75f685e8-220b-4588-8f26-66d305e3dfea-kube-api-access-jckhf\") pod \"machine-config-operator-74547568cd-j9zw4\" (UID: \"75f685e8-220b-4588-8f26-66d305e3dfea\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-j9zw4" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.498986 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-p5wn9" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.507877 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jdbbc" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.510277 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbk9w" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.520979 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9q5c\" (UniqueName: \"kubernetes.io/projected/6df59c92-6515-4bf1-aad4-57daf0d8fc76-kube-api-access-t9q5c\") pod \"marketplace-operator-79b997595-m7fh9\" (UID: \"6df59c92-6515-4bf1-aad4-57daf0d8fc76\") " pod="openshift-marketplace/marketplace-operator-79b997595-m7fh9" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.524436 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:03 crc kubenswrapper[4971]: E1127 06:55:03.524882 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:04.024869666 +0000 UTC m=+142.216913584 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.529269 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-j9zw4" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.545636 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r9mzr\" (UniqueName: \"kubernetes.io/projected/6aeeb09d-7519-42c7-be1d-2355b9a704e2-kube-api-access-r9mzr\") pod \"service-ca-9c57cc56f-7k28d\" (UID: \"6aeeb09d-7519-42c7-be1d-2355b9a704e2\") " pod="openshift-service-ca/service-ca-9c57cc56f-7k28d" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.553740 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6ppfn\" (UniqueName: \"kubernetes.io/projected/cd2e0a36-2e94-4ed6-9b7d-1755d593d805-kube-api-access-6ppfn\") pod \"kube-storage-version-migrator-operator-b67b599dd-2drfq\" (UID: \"cd2e0a36-2e94-4ed6-9b7d-1755d593d805\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2drfq" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.564087 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8t4f\" (UniqueName: \"kubernetes.io/projected/05b6cd99-aeb2-4b20-8fb3-c50e72a03611-kube-api-access-m8t4f\") pod \"dns-default-k2ldt\" (UID: \"05b6cd99-aeb2-4b20-8fb3-c50e72a03611\") " pod="openshift-dns/dns-default-k2ldt" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.569327 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-r4xbz" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.574446 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-b9pwk"] Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.575250 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/35a21adf-32b6-4a19-b592-0f941e07a3c5-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-n56lw\" (UID: \"35a21adf-32b6-4a19-b592-0f941e07a3c5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-n56lw" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.576965 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ht55m" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.590176 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-m7fh9" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.593028 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwwfn\" (UniqueName: \"kubernetes.io/projected/26f7ec15-f388-4e06-b038-cc04d0f21c08-kube-api-access-jwwfn\") pod \"ingress-canary-9snm7\" (UID: \"26f7ec15-f388-4e06-b038-cc04d0f21c08\") " pod="openshift-ingress-canary/ingress-canary-9snm7" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.596776 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2drfq" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.605373 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-7k28d" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.613103 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-xhx92" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.618253 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kpg86\" (UniqueName: \"kubernetes.io/projected/b28e17fe-93d1-4cdb-a9a1-33c37bbe1877-kube-api-access-kpg86\") pod \"machine-config-server-52sbj\" (UID: \"b28e17fe-93d1-4cdb-a9a1-33c37bbe1877\") " pod="openshift-machine-config-operator/machine-config-server-52sbj" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.625312 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:03 crc kubenswrapper[4971]: E1127 06:55:03.625783 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:04.12576276 +0000 UTC m=+142.317806678 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.632263 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-n56lw" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.637095 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9q9qk\" (UniqueName: \"kubernetes.io/projected/f268e2fa-798d-4438-9225-24c4a07ea999-kube-api-access-9q9qk\") pod \"oauth-openshift-558db77b4-h6p78\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.650198 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9sjf5" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.662051 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k564m\" (UniqueName: \"kubernetes.io/projected/7cc88187-87e4-4fa5-a431-d3b7b4779f56-kube-api-access-k564m\") pod \"catalog-operator-68c6474976-ghv48\" (UID: \"7cc88187-87e4-4fa5-a431-d3b7b4779f56\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ghv48" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.675092 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-kzfsj" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.696866 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8thn4"] Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.698740 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403765-26klv" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.700350 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c9dddee2-cf40-4aa7-9e05-0776bfb4d534-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-hh55n\" (UID: \"c9dddee2-cf40-4aa7-9e05-0776bfb4d534\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hh55n" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.715316 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-52sbj" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.718099 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/45462412-2e4e-480d-8874-2949c5dd691e-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-f6k2p\" (UID: \"45462412-2e4e-480d-8874-2949c5dd691e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-f6k2p" Nov 27 06:55:03 crc kubenswrapper[4971]: W1127 06:55:03.718334 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1623d112_895b_42a2_8a76_cf082423e8ac.slice/crio-8382b227542f720a5b887f9a32151b2f786a9bb2482277b3126616abaab85f4c WatchSource:0}: Error finding container 8382b227542f720a5b887f9a32151b2f786a9bb2482277b3126616abaab85f4c: Status 404 returned error can't find the container with id 8382b227542f720a5b887f9a32151b2f786a9bb2482277b3126616abaab85f4c Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.727122 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-nh6pl"] Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.727940 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:03 crc kubenswrapper[4971]: E1127 06:55:03.728578 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:04.228522022 +0000 UTC m=+142.420565940 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.731996 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pqc7t\" (UniqueName: \"kubernetes.io/projected/b2e2892d-ee1f-4349-b6b5-d0bc908d1397-kube-api-access-pqc7t\") pod \"csi-hostpathplugin-p6gxf\" (UID: \"b2e2892d-ee1f-4349-b6b5-d0bc908d1397\") " pod="hostpath-provisioner/csi-hostpathplugin-p6gxf" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.734784 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtvqb\" (UniqueName: \"kubernetes.io/projected/00fa6f7a-50bb-42e0-8643-a0fae088f528-kube-api-access-jtvqb\") pod \"migrator-59844c95c7-kg7xs\" (UID: \"00fa6f7a-50bb-42e0-8643-a0fae088f528\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-kg7xs" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.739770 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-k2ldt" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.750546 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-9snm7" Nov 27 06:55:03 crc kubenswrapper[4971]: W1127 06:55:03.758340 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod506301cc_392b_4aad_b7b8_29dbdde4b6fa.slice/crio-747db0c41c1669653c8310158b3302781a5bed1cd7dc5fd445f09f2e26bed8aa WatchSource:0}: Error finding container 747db0c41c1669653c8310158b3302781a5bed1cd7dc5fd445f09f2e26bed8aa: Status 404 returned error can't find the container with id 747db0c41c1669653c8310158b3302781a5bed1cd7dc5fd445f09f2e26bed8aa Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.812549 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mgswn"] Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.818044 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.828583 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:03 crc kubenswrapper[4971]: E1127 06:55:03.829006 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:04.328990542 +0000 UTC m=+142.521034460 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.841289 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ghv48" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.862673 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-kg7xs" Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.863798 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hh55n" Nov 27 06:55:03 crc kubenswrapper[4971]: W1127 06:55:03.879606 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5c9c5353_0cec_4776_9c51_553e90e76a2a.slice/crio-82cbd72ff883d8418db279bdacb9ba0a5b58cdfc765824a3da1827e29a87641a WatchSource:0}: Error finding container 82cbd72ff883d8418db279bdacb9ba0a5b58cdfc765824a3da1827e29a87641a: Status 404 returned error can't find the container with id 82cbd72ff883d8418db279bdacb9ba0a5b58cdfc765824a3da1827e29a87641a Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.932356 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:03 crc kubenswrapper[4971]: E1127 06:55:03.932935 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:04.43291815 +0000 UTC m=+142.624962068 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.937066 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-75prl"] Nov 27 06:55:03 crc kubenswrapper[4971]: W1127 06:55:03.995379 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda602a33c_f8ea_45e4_8295_3c2230005ef4.slice/crio-14551af235e3f61ea88d228cab9a1f1c833319a4856bcaa7f3c0b85cb20d7aef WatchSource:0}: Error finding container 14551af235e3f61ea88d228cab9a1f1c833319a4856bcaa7f3c0b85cb20d7aef: Status 404 returned error can't find the container with id 14551af235e3f61ea88d228cab9a1f1c833319a4856bcaa7f3c0b85cb20d7aef Nov 27 06:55:03 crc kubenswrapper[4971]: I1127 06:55:03.995927 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-f6k2p" Nov 27 06:55:04 crc kubenswrapper[4971]: I1127 06:55:04.029329 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-p6gxf" Nov 27 06:55:04 crc kubenswrapper[4971]: I1127 06:55:04.033798 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:04 crc kubenswrapper[4971]: E1127 06:55:04.033966 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:04.533938478 +0000 UTC m=+142.725982396 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:04 crc kubenswrapper[4971]: I1127 06:55:04.034067 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:04 crc kubenswrapper[4971]: E1127 06:55:04.034338 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:04.53431674 +0000 UTC m=+142.726360658 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:04 crc kubenswrapper[4971]: I1127 06:55:04.137065 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:04 crc kubenswrapper[4971]: E1127 06:55:04.137489 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:04.637470423 +0000 UTC m=+142.829514341 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:04 crc kubenswrapper[4971]: I1127 06:55:04.229131 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbk9w"] Nov 27 06:55:04 crc kubenswrapper[4971]: I1127 06:55:04.238896 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:04 crc kubenswrapper[4971]: E1127 06:55:04.239168 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:04.739156422 +0000 UTC m=+142.931200340 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:04 crc kubenswrapper[4971]: I1127 06:55:04.317107 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8thn4" event={"ID":"506301cc-392b-4aad-b7b8-29dbdde4b6fa","Type":"ContainerStarted","Data":"747db0c41c1669653c8310158b3302781a5bed1cd7dc5fd445f09f2e26bed8aa"} Nov 27 06:55:04 crc kubenswrapper[4971]: I1127 06:55:04.337682 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-s6mff" event={"ID":"5c9c5353-0cec-4776-9c51-553e90e76a2a","Type":"ContainerStarted","Data":"82cbd72ff883d8418db279bdacb9ba0a5b58cdfc765824a3da1827e29a87641a"} Nov 27 06:55:04 crc kubenswrapper[4971]: I1127 06:55:04.342478 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:04 crc kubenswrapper[4971]: E1127 06:55:04.343127 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:04.84310266 +0000 UTC m=+143.035146578 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:04 crc kubenswrapper[4971]: I1127 06:55:04.350993 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mgswn" event={"ID":"51fe0600-5a21-48b7-9c07-91a11a547256","Type":"ContainerStarted","Data":"705129e081a8c41cf6d5cbf37cb87ee5e0150117c323a0aef04a0f73731c0969"} Nov 27 06:55:04 crc kubenswrapper[4971]: I1127 06:55:04.360783 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-nh6pl" event={"ID":"8651c6d0-c735-4e26-a1aa-cf6090cd469f","Type":"ContainerStarted","Data":"b31300f77502a23726381958b3d2df3e003c1ecd7b5eba465d9f52a5f7a3d37f"} Nov 27 06:55:04 crc kubenswrapper[4971]: I1127 06:55:04.377727 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-blqcq" event={"ID":"b2038ca5-beb4-434e-81a7-16a67ad9382d","Type":"ContainerStarted","Data":"7da05e0af60eef4024acf504cb088644335393c753db592c3cbb247548a97f44"} Nov 27 06:55:04 crc kubenswrapper[4971]: I1127 06:55:04.404337 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hjgth" event={"ID":"2944ff34-c55c-40fa-aacf-f679dd704ad8","Type":"ContainerStarted","Data":"53060518018e529e7b00b8f68c63cf34f0f8316ab8b5ecd5af6d4aaf035f0350"} Nov 27 06:55:04 crc kubenswrapper[4971]: I1127 06:55:04.405836 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-b9pwk" event={"ID":"1623d112-895b-42a2-8a76-cf082423e8ac","Type":"ContainerStarted","Data":"8382b227542f720a5b887f9a32151b2f786a9bb2482277b3126616abaab85f4c"} Nov 27 06:55:04 crc kubenswrapper[4971]: I1127 06:55:04.433231 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8hv78" event={"ID":"6dc237a7-62f6-4ee9-99f1-034d6cceb901","Type":"ContainerStarted","Data":"ccedcb5e323f668e629277591e1743916fb6a334f2af95abcfa9ae730d59d8e9"} Nov 27 06:55:04 crc kubenswrapper[4971]: I1127 06:55:04.437249 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mcsj6" event={"ID":"4130c823-650b-4f98-a3e8-e735f3615b03","Type":"ContainerStarted","Data":"1e594e4c67b8717d68ffdd0778734c8e5634fdefdd9b6b6e83a38f4bfdad75fe"} Nov 27 06:55:04 crc kubenswrapper[4971]: I1127 06:55:04.440514 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jhjgn" event={"ID":"3e262487-0943-410e-bbcb-11fc42a8ac60","Type":"ContainerStarted","Data":"85cd01417110ca3e9612a4a1bcf7b88bc67d69770a6c9cd37b015fc64d28bfa5"} Nov 27 06:55:04 crc kubenswrapper[4971]: I1127 06:55:04.444938 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:04 crc kubenswrapper[4971]: E1127 06:55:04.445314 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:04.945303025 +0000 UTC m=+143.137346943 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:04 crc kubenswrapper[4971]: I1127 06:55:04.452472 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-75prl" event={"ID":"a602a33c-f8ea-45e4-8295-3c2230005ef4","Type":"ContainerStarted","Data":"14551af235e3f61ea88d228cab9a1f1c833319a4856bcaa7f3c0b85cb20d7aef"} Nov 27 06:55:04 crc kubenswrapper[4971]: I1127 06:55:04.463451 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-j6c6b" event={"ID":"0f50e533-b652-42c8-9f91-124b7be386fc","Type":"ContainerStarted","Data":"fe74de787d641a69d73f02d9ce6365c1d39dd63fb5427870d5638b3328c5c4ab"} Nov 27 06:55:04 crc kubenswrapper[4971]: I1127 06:55:04.468488 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-jxflz" event={"ID":"b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0","Type":"ContainerStarted","Data":"78326d304bbb332100cadc981dd2431928849a24b2666f97a663435dc3c8da6b"} Nov 27 06:55:04 crc kubenswrapper[4971]: I1127 06:55:04.541060 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mcsj6" podStartSLOduration=121.541037929 podStartE2EDuration="2m1.541037929s" podCreationTimestamp="2025-11-27 06:53:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:04.541037489 +0000 UTC m=+142.733081407" watchObservedRunningTime="2025-11-27 06:55:04.541037929 +0000 UTC m=+142.733081847" Nov 27 06:55:04 crc kubenswrapper[4971]: I1127 06:55:04.541991 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-7hzzs" podStartSLOduration=122.541985019 podStartE2EDuration="2m2.541985019s" podCreationTimestamp="2025-11-27 06:53:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:04.494908021 +0000 UTC m=+142.686951949" watchObservedRunningTime="2025-11-27 06:55:04.541985019 +0000 UTC m=+142.734028937" Nov 27 06:55:04 crc kubenswrapper[4971]: I1127 06:55:04.545732 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:04 crc kubenswrapper[4971]: E1127 06:55:04.547523 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:05.047491769 +0000 UTC m=+143.239535677 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:04 crc kubenswrapper[4971]: I1127 06:55:04.658266 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-29jwb" podStartSLOduration=121.658246088 podStartE2EDuration="2m1.658246088s" podCreationTimestamp="2025-11-27 06:53:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:04.65733804 +0000 UTC m=+142.849381978" watchObservedRunningTime="2025-11-27 06:55:04.658246088 +0000 UTC m=+142.850290026" Nov 27 06:55:04 crc kubenswrapper[4971]: I1127 06:55:04.663587 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:04 crc kubenswrapper[4971]: E1127 06:55:04.664136 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:05.16411157 +0000 UTC m=+143.356155488 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:04 crc kubenswrapper[4971]: I1127 06:55:04.764665 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:04 crc kubenswrapper[4971]: E1127 06:55:04.765436 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:05.265420747 +0000 UTC m=+143.457464665 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:04 crc kubenswrapper[4971]: I1127 06:55:04.866180 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:04 crc kubenswrapper[4971]: E1127 06:55:04.866511 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:05.366498857 +0000 UTC m=+143.558542775 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:04 crc kubenswrapper[4971]: I1127 06:55:04.967246 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:04 crc kubenswrapper[4971]: E1127 06:55:04.967829 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:05.467811324 +0000 UTC m=+143.659855242 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.068643 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:05 crc kubenswrapper[4971]: E1127 06:55:05.069164 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:05.569146641 +0000 UTC m=+143.761190629 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:05 crc kubenswrapper[4971]: E1127 06:55:05.170937 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:05.670912193 +0000 UTC m=+143.862956111 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.172483 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.173047 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:05 crc kubenswrapper[4971]: E1127 06:55:05.173452 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:05.673436471 +0000 UTC m=+143.865480399 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.274197 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:05 crc kubenswrapper[4971]: E1127 06:55:05.274603 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:05.774585683 +0000 UTC m=+143.966629601 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.302790 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-xqhhz" podStartSLOduration=123.302769385 podStartE2EDuration="2m3.302769385s" podCreationTimestamp="2025-11-27 06:53:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:05.301925139 +0000 UTC m=+143.493969057" watchObservedRunningTime="2025-11-27 06:55:05.302769385 +0000 UTC m=+143.494813303" Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.375927 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:05 crc kubenswrapper[4971]: E1127 06:55:05.376219 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:05.876207459 +0000 UTC m=+144.068251367 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.476916 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:05 crc kubenswrapper[4971]: E1127 06:55:05.477337 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:05.97730819 +0000 UTC m=+144.169352108 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.477560 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:05 crc kubenswrapper[4971]: E1127 06:55:05.477939 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:05.977925619 +0000 UTC m=+144.169969537 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.522136 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-b9pwk" event={"ID":"1623d112-895b-42a2-8a76-cf082423e8ac","Type":"ContainerStarted","Data":"990482d3c15da44e0cbfbbc2b524d22b2034d9f73d5fe970f7a83b47d8ccd494"} Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.523568 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-b9pwk" Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.527588 4971 patch_prober.go:28] interesting pod/console-operator-58897d9998-b9pwk container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.15:8443/readyz\": dial tcp 10.217.0.15:8443: connect: connection refused" start-of-body= Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.527640 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-b9pwk" podUID="1623d112-895b-42a2-8a76-cf082423e8ac" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.15:8443/readyz\": dial tcp 10.217.0.15:8443: connect: connection refused" Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.530173 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-blqcq" event={"ID":"b2038ca5-beb4-434e-81a7-16a67ad9382d","Type":"ContainerStarted","Data":"f85c70c2928efa4403f20f7bddd4bd8272de9de70c00d6bb4dd59b51f02b83f7"} Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.541788 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jhjgn" event={"ID":"3e262487-0943-410e-bbcb-11fc42a8ac60","Type":"ContainerStarted","Data":"4f49afe05c1e550bf91be2dcc6995f8029af62ced5cf6ac55793e11b58705b2a"} Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.551557 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-s6mff" event={"ID":"5c9c5353-0cec-4776-9c51-553e90e76a2a","Type":"ContainerStarted","Data":"9322091dadfc5aabcd3164237ad2b73a47b9b799af710e1d1e2eb2bbfe5e3c30"} Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.559096 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8thn4" event={"ID":"506301cc-392b-4aad-b7b8-29dbdde4b6fa","Type":"ContainerStarted","Data":"beea5f165b11620c095135f3cd94b4618448cded7465cc5619dc17a35a33d6c2"} Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.562455 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hjgth" event={"ID":"2944ff34-c55c-40fa-aacf-f679dd704ad8","Type":"ContainerStarted","Data":"2acf3e612ffa1f26894aca9ccb442ab07e91dc9523aacccd6c5ba8406dbd894c"} Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.569965 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-75prl" event={"ID":"a602a33c-f8ea-45e4-8295-3c2230005ef4","Type":"ContainerStarted","Data":"9dc0d2a8376c72f98822960d5c0d1aa80490fd84722512a95ecfdda772b04ac3"} Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.571823 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-75prl" Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.578859 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:05 crc kubenswrapper[4971]: E1127 06:55:05.579223 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:06.079206035 +0000 UTC m=+144.271249953 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.581250 4971 patch_prober.go:28] interesting pod/downloads-7954f5f757-75prl container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.581287 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-75prl" podUID="a602a33c-f8ea-45e4-8295-3c2230005ef4" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.587161 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-52sbj" event={"ID":"b28e17fe-93d1-4cdb-a9a1-33c37bbe1877","Type":"ContainerStarted","Data":"fa876390762afc38a94be8004164c6febbe2f7f895ad4d8800b9e9d1f8ff0b44"} Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.616415 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbk9w" event={"ID":"7ca57976-e7d8-485a-8277-a3da59178bf6","Type":"ContainerStarted","Data":"4c917c64e54e315db1db26c998558a8392add932ecc8d6fdfb090a86f55f2b35"} Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.616459 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbk9w" event={"ID":"7ca57976-e7d8-485a-8277-a3da59178bf6","Type":"ContainerStarted","Data":"f2cf724032e0b640b5d83ddf379280268870a3b9a93b6e81f1b54107306dc844"} Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.617292 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbk9w" Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.618850 4971 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-sbk9w container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.19:5443/healthz\": dial tcp 10.217.0.19:5443: connect: connection refused" start-of-body= Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.618983 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbk9w" podUID="7ca57976-e7d8-485a-8277-a3da59178bf6" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.19:5443/healthz\": dial tcp 10.217.0.19:5443: connect: connection refused" Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.628756 4971 generic.go:334] "Generic (PLEG): container finished" podID="6dc237a7-62f6-4ee9-99f1-034d6cceb901" containerID="6593306804db8837934e0c18a5f5e93407f15da505163ff52bed3fb49039950c" exitCode=0 Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.628844 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8hv78" event={"ID":"6dc237a7-62f6-4ee9-99f1-034d6cceb901","Type":"ContainerDied","Data":"6593306804db8837934e0c18a5f5e93407f15da505163ff52bed3fb49039950c"} Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.642936 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-j6c6b" event={"ID":"0f50e533-b652-42c8-9f91-124b7be386fc","Type":"ContainerStarted","Data":"9d19652e763dba4a8347fb22d5760a44943cdc739692a383f010043bd1237fb9"} Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.643200 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-9d7hw" podStartSLOduration=122.643184906 podStartE2EDuration="2m2.643184906s" podCreationTimestamp="2025-11-27 06:53:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:05.642128853 +0000 UTC m=+143.834172781" watchObservedRunningTime="2025-11-27 06:55:05.643184906 +0000 UTC m=+143.835228824" Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.682106 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:05 crc kubenswrapper[4971]: E1127 06:55:05.686604 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:06.186578139 +0000 UTC m=+144.378622057 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.756444 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-b9pwk" podStartSLOduration=123.756394941 podStartE2EDuration="2m3.756394941s" podCreationTimestamp="2025-11-27 06:53:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:05.718707934 +0000 UTC m=+143.910751862" watchObservedRunningTime="2025-11-27 06:55:05.756394941 +0000 UTC m=+143.948438879" Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.783687 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:05 crc kubenswrapper[4971]: E1127 06:55:05.784940 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:06.284918944 +0000 UTC m=+144.476962862 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.795070 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-j6c6b" podStartSLOduration=123.795055548 podStartE2EDuration="2m3.795055548s" podCreationTimestamp="2025-11-27 06:53:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:05.792910572 +0000 UTC m=+143.984954500" watchObservedRunningTime="2025-11-27 06:55:05.795055548 +0000 UTC m=+143.987099466" Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.852912 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8thn4" podStartSLOduration=123.852896759 podStartE2EDuration="2m3.852896759s" podCreationTimestamp="2025-11-27 06:53:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:05.852808337 +0000 UTC m=+144.044852265" watchObservedRunningTime="2025-11-27 06:55:05.852896759 +0000 UTC m=+144.044940677" Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.887156 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:05 crc kubenswrapper[4971]: E1127 06:55:05.887475 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:06.38746192 +0000 UTC m=+144.579505838 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.896589 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbk9w" podStartSLOduration=122.896574272 podStartE2EDuration="2m2.896574272s" podCreationTimestamp="2025-11-27 06:53:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:05.895251411 +0000 UTC m=+144.087295329" watchObservedRunningTime="2025-11-27 06:55:05.896574272 +0000 UTC m=+144.088618190" Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.920909 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-blqcq" podStartSLOduration=123.920892805 podStartE2EDuration="2m3.920892805s" podCreationTimestamp="2025-11-27 06:53:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:05.920272506 +0000 UTC m=+144.112316424" watchObservedRunningTime="2025-11-27 06:55:05.920892805 +0000 UTC m=+144.112936723" Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.987697 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.987763 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jhjgn" podStartSLOduration=122.987739335 podStartE2EDuration="2m2.987739335s" podCreationTimestamp="2025-11-27 06:53:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:05.954009 +0000 UTC m=+144.146052918" watchObservedRunningTime="2025-11-27 06:55:05.987739335 +0000 UTC m=+144.179783263" Nov 27 06:55:05 crc kubenswrapper[4971]: E1127 06:55:05.987888 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:06.487872209 +0000 UTC m=+144.679916127 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.988120 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:05 crc kubenswrapper[4971]: E1127 06:55:05.988498 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:06.488487888 +0000 UTC m=+144.680531806 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:05 crc kubenswrapper[4971]: I1127 06:55:05.989028 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-75prl" podStartSLOduration=123.989016974 podStartE2EDuration="2m3.989016974s" podCreationTimestamp="2025-11-27 06:53:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:05.987821407 +0000 UTC m=+144.179865325" watchObservedRunningTime="2025-11-27 06:55:05.989016974 +0000 UTC m=+144.181060892" Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.046056 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-52sbj" podStartSLOduration=6.04604 podStartE2EDuration="6.04604s" podCreationTimestamp="2025-11-27 06:55:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:06.044088439 +0000 UTC m=+144.236132357" watchObservedRunningTime="2025-11-27 06:55:06.04604 +0000 UTC m=+144.238083928" Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.081318 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-s6mff" podStartSLOduration=123.081295461 podStartE2EDuration="2m3.081295461s" podCreationTimestamp="2025-11-27 06:53:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:06.081273641 +0000 UTC m=+144.273317559" watchObservedRunningTime="2025-11-27 06:55:06.081295461 +0000 UTC m=+144.273339379" Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.089427 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:06 crc kubenswrapper[4971]: E1127 06:55:06.089859 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:06.589814835 +0000 UTC m=+144.781858793 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.090150 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:06 crc kubenswrapper[4971]: E1127 06:55:06.090643 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:06.59063301 +0000 UTC m=+144.782676928 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.191374 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:06 crc kubenswrapper[4971]: E1127 06:55:06.192092 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:06.692060951 +0000 UTC m=+144.884104879 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.293699 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:06 crc kubenswrapper[4971]: E1127 06:55:06.294693 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:06.794674808 +0000 UTC m=+144.986718726 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.340116 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-p5wn9"] Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.394162 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-r4xbz"] Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.398138 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:06 crc kubenswrapper[4971]: E1127 06:55:06.398595 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:06.898572935 +0000 UTC m=+145.090616853 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:06 crc kubenswrapper[4971]: W1127 06:55:06.402873 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3a0d9f4d_880f_4189_b479_fa0a1f2fb513.slice/crio-09c67d27eace3f8d13871950d6a58363b52b67d71f58657ea231dfb1d27bd2af WatchSource:0}: Error finding container 09c67d27eace3f8d13871950d6a58363b52b67d71f58657ea231dfb1d27bd2af: Status 404 returned error can't find the container with id 09c67d27eace3f8d13871950d6a58363b52b67d71f58657ea231dfb1d27bd2af Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.402959 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mcsj6" Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.403821 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mcsj6" Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.416483 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mcsj6" Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.450867 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-s6mff" Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.453288 4971 patch_prober.go:28] interesting pod/router-default-5444994796-s6mff container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.453359 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-s6mff" podUID="5c9c5353-0cec-4776-9c51-553e90e76a2a" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.499754 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:06 crc kubenswrapper[4971]: E1127 06:55:06.500112 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:07.000096989 +0000 UTC m=+145.192140907 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.593452 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-m7fh9"] Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.600352 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:06 crc kubenswrapper[4971]: E1127 06:55:06.600668 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:07.100639932 +0000 UTC m=+145.292683850 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:06 crc kubenswrapper[4971]: W1127 06:55:06.638866 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6df59c92_6515_4bf1_aad4_57daf0d8fc76.slice/crio-c71f053172ce11236b4467cdc2d146fba4751dc7c438d7e279921e05375653f9 WatchSource:0}: Error finding container c71f053172ce11236b4467cdc2d146fba4751dc7c438d7e279921e05375653f9: Status 404 returned error can't find the container with id c71f053172ce11236b4467cdc2d146fba4751dc7c438d7e279921e05375653f9 Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.687033 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-497pk"] Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.691038 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ht55m"] Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.702107 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:06 crc kubenswrapper[4971]: E1127 06:55:06.702577 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:07.202563098 +0000 UTC m=+145.394607016 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.705505 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-jxflz" event={"ID":"b6d0b1a1-21f0-40bd-91a6-1d8a8078c6c0","Type":"ContainerStarted","Data":"26cff60ed762aa7f33718fc721da91e040567a52f711d8c5efc86946c175fb6a"} Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.747868 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-h6p78"] Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.747896 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-p5wn9" event={"ID":"3973ab95-1d0e-49d6-aadb-79fde6dd74b8","Type":"ContainerStarted","Data":"26b3f1208bd6b4285624d08a6ab55c5a8d34ba165fdc66a6183c059fc151e8be"} Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.747910 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-p5wn9" event={"ID":"3973ab95-1d0e-49d6-aadb-79fde6dd74b8","Type":"ContainerStarted","Data":"f83e9c8875e13d6c35d9bc5685d1e2c8c6c0878f0075985fab375efa620e2f06"} Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.773055 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-j9zw4"] Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.780308 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-k2ldt"] Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.793035 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-9sjf5"] Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.793073 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-m7fh9" event={"ID":"6df59c92-6515-4bf1-aad4-57daf0d8fc76","Type":"ContainerStarted","Data":"c71f053172ce11236b4467cdc2d146fba4751dc7c438d7e279921e05375653f9"} Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.800871 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-f6k2p"] Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.805340 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:06 crc kubenswrapper[4971]: E1127 06:55:06.806637 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:07.30661691 +0000 UTC m=+145.498660828 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.810950 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-kzfsj"] Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.811002 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-r4xbz" event={"ID":"3a0d9f4d-880f-4189-b479-fa0a1f2fb513","Type":"ContainerStarted","Data":"09c67d27eace3f8d13871950d6a58363b52b67d71f58657ea231dfb1d27bd2af"} Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.811595 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-r4xbz" Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.816605 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-n56lw"] Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.819314 4971 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-r4xbz container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.25:8443/healthz\": dial tcp 10.217.0.25:8443: connect: connection refused" start-of-body= Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.819365 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-r4xbz" podUID="3a0d9f4d-880f-4189-b479-fa0a1f2fb513" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.25:8443/healthz\": dial tcp 10.217.0.25:8443: connect: connection refused" Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.820135 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-7k28d"] Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.829218 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-jxflz" podStartSLOduration=124.829194199 podStartE2EDuration="2m4.829194199s" podCreationTimestamp="2025-11-27 06:53:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:06.782105671 +0000 UTC m=+144.974149589" watchObservedRunningTime="2025-11-27 06:55:06.829194199 +0000 UTC m=+145.021238117" Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.829359 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-jdbbc"] Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.844166 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hh55n"] Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.845236 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-r4xbz" podStartSLOduration=123.845218105 podStartE2EDuration="2m3.845218105s" podCreationTimestamp="2025-11-27 06:53:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:06.840092247 +0000 UTC m=+145.032136175" watchObservedRunningTime="2025-11-27 06:55:06.845218105 +0000 UTC m=+145.037262023" Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.846303 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8hv78" event={"ID":"6dc237a7-62f6-4ee9-99f1-034d6cceb901","Type":"ContainerStarted","Data":"e936b62deb59bd2dc9c599e5aec3602fdc7518ddb2ba92b89baa57f173089e1e"} Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.846474 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8hv78" Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.875372 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-j6c6b" event={"ID":"0f50e533-b652-42c8-9f91-124b7be386fc","Type":"ContainerStarted","Data":"201302c0207d12b86ef1c0969172dd0e7589510c235573fdd127bc2c45a05be8"} Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.889214 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8hv78" podStartSLOduration=124.889194337 podStartE2EDuration="2m4.889194337s" podCreationTimestamp="2025-11-27 06:53:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:06.874803261 +0000 UTC m=+145.066847169" watchObservedRunningTime="2025-11-27 06:55:06.889194337 +0000 UTC m=+145.081238255" Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.890545 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-kg7xs"] Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.893262 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2drfq"] Nov 27 06:55:06 crc kubenswrapper[4971]: W1127 06:55:06.902741 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc9dddee2_cf40_4aa7_9e05_0776bfb4d534.slice/crio-05198b24e5761937a0436d2feaea05fd28e8ffc29a9c26e7978659b9c61f64a4 WatchSource:0}: Error finding container 05198b24e5761937a0436d2feaea05fd28e8ffc29a9c26e7978659b9c61f64a4: Status 404 returned error can't find the container with id 05198b24e5761937a0436d2feaea05fd28e8ffc29a9c26e7978659b9c61f64a4 Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.910038 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.910709 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hjgth" event={"ID":"2944ff34-c55c-40fa-aacf-f679dd704ad8","Type":"ContainerStarted","Data":"9fc01a916c0abcb5133c449fd9156c19d513cec3cd9772198ee2a9109313185d"} Nov 27 06:55:06 crc kubenswrapper[4971]: E1127 06:55:06.914304 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:07.414288984 +0000 UTC m=+145.606332902 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.917389 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-p6gxf"] Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.938522 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-9snm7"] Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.943179 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hjgth" podStartSLOduration=124.943158528 podStartE2EDuration="2m4.943158528s" podCreationTimestamp="2025-11-27 06:53:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:06.942855028 +0000 UTC m=+145.134898966" watchObservedRunningTime="2025-11-27 06:55:06.943158528 +0000 UTC m=+145.135202446" Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.947151 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-xhx92"] Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.982104 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-52sbj" event={"ID":"b28e17fe-93d1-4cdb-a9a1-33c37bbe1877","Type":"ContainerStarted","Data":"dc1b103b230c2b1fdc830d2c174d94fb383c1eb4ebacf73986f291205d70eab3"} Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.996339 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-nh6pl" event={"ID":"8651c6d0-c735-4e26-a1aa-cf6090cd469f","Type":"ContainerStarted","Data":"c47406c636db9f32ee75ffa1a0e12c60ffee4029e6697d83580440b6bc79fe4b"} Nov 27 06:55:06 crc kubenswrapper[4971]: I1127 06:55:06.996387 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-nh6pl" event={"ID":"8651c6d0-c735-4e26-a1aa-cf6090cd469f","Type":"ContainerStarted","Data":"b1d7345a7e3472eb69c056b34370c42bcaa3e73294a0660c7ab5849769d11fdd"} Nov 27 06:55:07 crc kubenswrapper[4971]: I1127 06:55:07.004298 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403765-26klv"] Nov 27 06:55:07 crc kubenswrapper[4971]: I1127 06:55:07.004374 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ghv48"] Nov 27 06:55:07 crc kubenswrapper[4971]: I1127 06:55:07.012721 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:07 crc kubenswrapper[4971]: E1127 06:55:07.014125 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:07.514103045 +0000 UTC m=+145.706146963 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:07 crc kubenswrapper[4971]: I1127 06:55:07.027613 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-nh6pl" podStartSLOduration=125.027586532 podStartE2EDuration="2m5.027586532s" podCreationTimestamp="2025-11-27 06:53:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:07.026427176 +0000 UTC m=+145.218471104" watchObservedRunningTime="2025-11-27 06:55:07.027586532 +0000 UTC m=+145.219630460" Nov 27 06:55:07 crc kubenswrapper[4971]: W1127 06:55:07.038769 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod26f7ec15_f388_4e06_b038_cc04d0f21c08.slice/crio-ff2137154979a0e76cc19adc2cb9577a295bbe69d3718a93ec4c86a45e6b149b WatchSource:0}: Error finding container ff2137154979a0e76cc19adc2cb9577a295bbe69d3718a93ec4c86a45e6b149b: Status 404 returned error can't find the container with id ff2137154979a0e76cc19adc2cb9577a295bbe69d3718a93ec4c86a45e6b149b Nov 27 06:55:07 crc kubenswrapper[4971]: I1127 06:55:07.074650 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mgswn" event={"ID":"51fe0600-5a21-48b7-9c07-91a11a547256","Type":"ContainerStarted","Data":"4795d622ca8e711b41679165bab3ae49aad68f7accfc76bc9ed64e07c5620c55"} Nov 27 06:55:07 crc kubenswrapper[4971]: I1127 06:55:07.076718 4971 patch_prober.go:28] interesting pod/downloads-7954f5f757-75prl container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Nov 27 06:55:07 crc kubenswrapper[4971]: I1127 06:55:07.076778 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-75prl" podUID="a602a33c-f8ea-45e4-8295-3c2230005ef4" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Nov 27 06:55:07 crc kubenswrapper[4971]: W1127 06:55:07.084322 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod89bb08d2_3252_4d85_91bc_3b595be4c32a.slice/crio-18e93e9d0d294263f33043c9717f895703af7c7058d51b8305c52d82e0c95826 WatchSource:0}: Error finding container 18e93e9d0d294263f33043c9717f895703af7c7058d51b8305c52d82e0c95826: Status 404 returned error can't find the container with id 18e93e9d0d294263f33043c9717f895703af7c7058d51b8305c52d82e0c95826 Nov 27 06:55:07 crc kubenswrapper[4971]: I1127 06:55:07.091168 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mcsj6" Nov 27 06:55:07 crc kubenswrapper[4971]: I1127 06:55:07.107314 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mgswn" podStartSLOduration=125.10728805 podStartE2EDuration="2m5.10728805s" podCreationTimestamp="2025-11-27 06:53:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:07.105397791 +0000 UTC m=+145.297441709" watchObservedRunningTime="2025-11-27 06:55:07.10728805 +0000 UTC m=+145.299331968" Nov 27 06:55:07 crc kubenswrapper[4971]: I1127 06:55:07.123113 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:07 crc kubenswrapper[4971]: E1127 06:55:07.129202 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:07.629185108 +0000 UTC m=+145.821229026 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:07 crc kubenswrapper[4971]: W1127 06:55:07.163077 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7cc88187_87e4_4fa5_a431_d3b7b4779f56.slice/crio-d107738a3f882d143a9024181014cb460f47951c6e67b23afdcedde789006d22 WatchSource:0}: Error finding container d107738a3f882d143a9024181014cb460f47951c6e67b23afdcedde789006d22: Status 404 returned error can't find the container with id d107738a3f882d143a9024181014cb460f47951c6e67b23afdcedde789006d22 Nov 27 06:55:07 crc kubenswrapper[4971]: I1127 06:55:07.231819 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:07 crc kubenswrapper[4971]: E1127 06:55:07.233971 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:07.733944202 +0000 UTC m=+145.925988120 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:07 crc kubenswrapper[4971]: I1127 06:55:07.334462 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:07 crc kubenswrapper[4971]: E1127 06:55:07.335407 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:07.835394663 +0000 UTC m=+146.027438581 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:07 crc kubenswrapper[4971]: I1127 06:55:07.435941 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:07 crc kubenswrapper[4971]: E1127 06:55:07.436394 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:07.93637837 +0000 UTC m=+146.128422288 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:07 crc kubenswrapper[4971]: I1127 06:55:07.463028 4971 patch_prober.go:28] interesting pod/router-default-5444994796-s6mff container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 27 06:55:07 crc kubenswrapper[4971]: [-]has-synced failed: reason withheld Nov 27 06:55:07 crc kubenswrapper[4971]: [+]process-running ok Nov 27 06:55:07 crc kubenswrapper[4971]: healthz check failed Nov 27 06:55:07 crc kubenswrapper[4971]: I1127 06:55:07.463081 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-s6mff" podUID="5c9c5353-0cec-4776-9c51-553e90e76a2a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 27 06:55:07 crc kubenswrapper[4971]: I1127 06:55:07.538153 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:07 crc kubenswrapper[4971]: E1127 06:55:07.538685 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:08.038667567 +0000 UTC m=+146.230711485 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:07 crc kubenswrapper[4971]: I1127 06:55:07.639710 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:07 crc kubenswrapper[4971]: E1127 06:55:07.640139 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:08.140111507 +0000 UTC m=+146.332155425 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:07 crc kubenswrapper[4971]: I1127 06:55:07.687519 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-b9pwk" Nov 27 06:55:07 crc kubenswrapper[4971]: I1127 06:55:07.739801 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbk9w" Nov 27 06:55:07 crc kubenswrapper[4971]: I1127 06:55:07.743491 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:07 crc kubenswrapper[4971]: E1127 06:55:07.744965 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:08.244949123 +0000 UTC m=+146.436993041 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:07 crc kubenswrapper[4971]: I1127 06:55:07.845827 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:07 crc kubenswrapper[4971]: E1127 06:55:07.846364 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:08.346341213 +0000 UTC m=+146.538385131 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:07 crc kubenswrapper[4971]: I1127 06:55:07.948725 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:07 crc kubenswrapper[4971]: E1127 06:55:07.949193 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:08.449176897 +0000 UTC m=+146.641220805 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.053328 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:08 crc kubenswrapper[4971]: E1127 06:55:08.053450 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:08.553423375 +0000 UTC m=+146.745467293 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.053684 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:08 crc kubenswrapper[4971]: E1127 06:55:08.054149 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:08.554134257 +0000 UTC m=+146.746178175 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.111485 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-k2ldt" event={"ID":"05b6cd99-aeb2-4b20-8fb3-c50e72a03611","Type":"ContainerStarted","Data":"6f2a2ef8c182ebe84a8aa1d7156538e0aa424d4e0ff35529d584da07890d9f6c"} Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.111563 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-k2ldt" event={"ID":"05b6cd99-aeb2-4b20-8fb3-c50e72a03611","Type":"ContainerStarted","Data":"142860042dd3b0ac6540b00154d29ed2594d1ac5eb2a8491a0877410d36957ac"} Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.119088 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-p6gxf" event={"ID":"b2e2892d-ee1f-4349-b6b5-d0bc908d1397","Type":"ContainerStarted","Data":"dcf31b659c4725032123369ae4909947253acd6485d1fe88d138edcf76afe9bf"} Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.152869 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-xhx92" event={"ID":"6e183c66-f7ea-4191-9f0d-fb2eb12ee864","Type":"ContainerStarted","Data":"9ae3863e31a3d43e6749af2576218d3ee5e2cf9fdf5c32adcf7ec9957f00d3b0"} Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.155775 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:08 crc kubenswrapper[4971]: E1127 06:55:08.155891 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:08.655872637 +0000 UTC m=+146.847916555 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.156232 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:08 crc kubenswrapper[4971]: E1127 06:55:08.156597 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:08.656581139 +0000 UTC m=+146.848625057 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.157186 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-9snm7" event={"ID":"26f7ec15-f388-4e06-b038-cc04d0f21c08","Type":"ContainerStarted","Data":"b03ba314698b8948f5b2b85d628783dfee89d4a31ce3442f4352209b18a06a8a"} Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.157212 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-9snm7" event={"ID":"26f7ec15-f388-4e06-b038-cc04d0f21c08","Type":"ContainerStarted","Data":"ff2137154979a0e76cc19adc2cb9577a295bbe69d3718a93ec4c86a45e6b149b"} Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.162312 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" event={"ID":"f268e2fa-798d-4438-9225-24c4a07ea999","Type":"ContainerStarted","Data":"38490efdf6206f990845a6d39d721e3a5d0d5bd0f79f3e9993915aa041f8faaf"} Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.162352 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" event={"ID":"f268e2fa-798d-4438-9225-24c4a07ea999","Type":"ContainerStarted","Data":"f3c7a695152dd73ff1f1887a8e142968a5fcdc5d0c0573b8a5c5473994381203"} Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.163390 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.166389 4971 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-h6p78 container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.20:6443/healthz\": dial tcp 10.217.0.20:6443: connect: connection refused" start-of-body= Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.166418 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" podUID="f268e2fa-798d-4438-9225-24c4a07ea999" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.20:6443/healthz\": dial tcp 10.217.0.20:6443: connect: connection refused" Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.193230 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ht55m" event={"ID":"85f4323a-d5cf-45bb-b3df-57434dfe4d53","Type":"ContainerStarted","Data":"7a347071853fa8c87f1827373377c3d4721fc68f3ca28aa6f0a80e182a70ba42"} Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.193270 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ht55m" event={"ID":"85f4323a-d5cf-45bb-b3df-57434dfe4d53","Type":"ContainerStarted","Data":"c9fec059014a894bc43e9e28e2589107c54d3011a11a32603d9a54f84fa968d2"} Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.195244 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-9snm7" podStartSLOduration=8.195232356 podStartE2EDuration="8.195232356s" podCreationTimestamp="2025-11-27 06:55:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:08.194764551 +0000 UTC m=+146.386808489" watchObservedRunningTime="2025-11-27 06:55:08.195232356 +0000 UTC m=+146.387276274" Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.218574 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ht55m" podStartSLOduration=125.218555118 podStartE2EDuration="2m5.218555118s" podCreationTimestamp="2025-11-27 06:53:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:08.217899188 +0000 UTC m=+146.409943106" watchObservedRunningTime="2025-11-27 06:55:08.218555118 +0000 UTC m=+146.410599076" Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.258264 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:08 crc kubenswrapper[4971]: E1127 06:55:08.260093 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:08.760060343 +0000 UTC m=+146.952104251 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.260732 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-497pk" event={"ID":"3f201996-72e7-43c9-b004-47366d4b05e4","Type":"ContainerStarted","Data":"2fbec1edeb9c0c247169769bc2ec969fc2c49327a873ca00b45dc7f5de40f37b"} Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.260775 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-497pk" event={"ID":"3f201996-72e7-43c9-b004-47366d4b05e4","Type":"ContainerStarted","Data":"241ffe2a0d170e566f3d74f733b8438c5b904ae1d72aabe57a630b0662145b78"} Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.285281 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" podStartSLOduration=126.285248483 podStartE2EDuration="2m6.285248483s" podCreationTimestamp="2025-11-27 06:53:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:08.273124358 +0000 UTC m=+146.465168276" watchObservedRunningTime="2025-11-27 06:55:08.285248483 +0000 UTC m=+146.477292401" Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.288746 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hh55n" event={"ID":"c9dddee2-cf40-4aa7-9e05-0776bfb4d534","Type":"ContainerStarted","Data":"68ffb56f5efe1a93f3bda56ba67e8c1ca98cd20021e481a52b443e1b3afb9ba6"} Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.288816 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hh55n" event={"ID":"c9dddee2-cf40-4aa7-9e05-0776bfb4d534","Type":"ContainerStarted","Data":"05198b24e5761937a0436d2feaea05fd28e8ffc29a9c26e7978659b9c61f64a4"} Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.318009 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-p5wn9" event={"ID":"3973ab95-1d0e-49d6-aadb-79fde6dd74b8","Type":"ContainerStarted","Data":"b05a7365c69b1886c9a2982a211796cffdccaedba24adf6ac9deed05350ab182"} Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.318956 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-p5wn9" Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.341569 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-497pk" podStartSLOduration=125.341547896 podStartE2EDuration="2m5.341547896s" podCreationTimestamp="2025-11-27 06:53:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:08.302778076 +0000 UTC m=+146.494822014" watchObservedRunningTime="2025-11-27 06:55:08.341547896 +0000 UTC m=+146.533591814" Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.354669 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-n56lw" event={"ID":"35a21adf-32b6-4a19-b592-0f941e07a3c5","Type":"ContainerStarted","Data":"73bdb9850029d3cb5766df998b1505a8dc0101898b6289a055a4c196f3521517"} Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.360346 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:08 crc kubenswrapper[4971]: E1127 06:55:08.362198 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:08.862178525 +0000 UTC m=+147.054222553 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.362465 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-kzfsj" event={"ID":"baace783-c727-4031-bdae-749783258756","Type":"ContainerStarted","Data":"a5ee34abf206ba8ae87566b171c3d1acc2ddf8b5a29a0f1ec7893e5f97ff7af7"} Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.362495 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-kzfsj" event={"ID":"baace783-c727-4031-bdae-749783258756","Type":"ContainerStarted","Data":"c3f7dbcf0cc4c7c891c5982ec2c10cb75bf25f46f06a3cf16081d87e2c9b1937"} Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.386921 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-p5wn9" podStartSLOduration=125.38688892 podStartE2EDuration="2m5.38688892s" podCreationTimestamp="2025-11-27 06:53:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:08.384765624 +0000 UTC m=+146.576809552" watchObservedRunningTime="2025-11-27 06:55:08.38688892 +0000 UTC m=+146.578932838" Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.388480 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hh55n" podStartSLOduration=125.388472259 podStartE2EDuration="2m5.388472259s" podCreationTimestamp="2025-11-27 06:53:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:08.3419931 +0000 UTC m=+146.534037038" watchObservedRunningTime="2025-11-27 06:55:08.388472259 +0000 UTC m=+146.580516177" Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.392911 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403765-26klv" event={"ID":"89bb08d2-3252-4d85-91bc-3b595be4c32a","Type":"ContainerStarted","Data":"39a1a798b99bdec77c86ab43b9169bbc01a2272afe93d1204b239bfde5fade05"} Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.393312 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403765-26klv" event={"ID":"89bb08d2-3252-4d85-91bc-3b595be4c32a","Type":"ContainerStarted","Data":"18e93e9d0d294263f33043c9717f895703af7c7058d51b8305c52d82e0c95826"} Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.422275 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-7k28d" event={"ID":"6aeeb09d-7519-42c7-be1d-2355b9a704e2","Type":"ContainerStarted","Data":"e80c29cfda7acd6bcfd63be9e9baef1ddd996adb7138b659e3b764b91271509f"} Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.440576 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-r4xbz" event={"ID":"3a0d9f4d-880f-4189-b479-fa0a1f2fb513","Type":"ContainerStarted","Data":"da8001e0211f7e3e65dd059db1e047d3a5f29c51db071aa258b6782b8e80ef63"} Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.449377 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-kzfsj" podStartSLOduration=125.449360555 podStartE2EDuration="2m5.449360555s" podCreationTimestamp="2025-11-27 06:53:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:08.449185319 +0000 UTC m=+146.641229237" watchObservedRunningTime="2025-11-27 06:55:08.449360555 +0000 UTC m=+146.641404473" Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.456680 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ghv48" event={"ID":"7cc88187-87e4-4fa5-a431-d3b7b4779f56","Type":"ContainerStarted","Data":"e6e6b034d754bab261b86671e0fd1bcbc2b571c5927f93403c924a0d8404d279"} Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.456730 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ghv48" event={"ID":"7cc88187-87e4-4fa5-a431-d3b7b4779f56","Type":"ContainerStarted","Data":"d107738a3f882d143a9024181014cb460f47951c6e67b23afdcedde789006d22"} Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.457633 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ghv48" Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.461487 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:08 crc kubenswrapper[4971]: E1127 06:55:08.463596 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:08.963574135 +0000 UTC m=+147.155618053 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.465561 4971 patch_prober.go:28] interesting pod/router-default-5444994796-s6mff container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 27 06:55:08 crc kubenswrapper[4971]: [-]has-synced failed: reason withheld Nov 27 06:55:08 crc kubenswrapper[4971]: [+]process-running ok Nov 27 06:55:08 crc kubenswrapper[4971]: healthz check failed Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.465616 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-s6mff" podUID="5c9c5353-0cec-4776-9c51-553e90e76a2a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.471087 4971 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-ghv48 container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.33:8443/healthz\": dial tcp 10.217.0.33:8443: connect: connection refused" start-of-body= Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.471137 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ghv48" podUID="7cc88187-87e4-4fa5-a431-d3b7b4779f56" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.33:8443/healthz\": dial tcp 10.217.0.33:8443: connect: connection refused" Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.484328 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2drfq" event={"ID":"cd2e0a36-2e94-4ed6-9b7d-1755d593d805","Type":"ContainerStarted","Data":"7c43dbfd342b66831bd19b054bfa1bf4cf9a529ea2a049ce1e7e237bc2ad8eb5"} Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.484401 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2drfq" event={"ID":"cd2e0a36-2e94-4ed6-9b7d-1755d593d805","Type":"ContainerStarted","Data":"5566d089bfd892ac4b92450cd56b07624a47b4041e7945afd079759595ff6411"} Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.516298 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-r4xbz" Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.524291 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9sjf5" event={"ID":"73452466-9780-461c-abd6-282cda0d9a7e","Type":"ContainerStarted","Data":"c737a386071df94a88f366aa1de2a9cd9ea65adb0ffa375139cd9dff8936e6d6"} Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.524364 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9sjf5" event={"ID":"73452466-9780-461c-abd6-282cda0d9a7e","Type":"ContainerStarted","Data":"250ff0614ea7d626c25f51a3556e22307333fea23a424df1b5009dad71ed9cd0"} Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.533806 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-7k28d" podStartSLOduration=125.533790379 podStartE2EDuration="2m5.533790379s" podCreationTimestamp="2025-11-27 06:53:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:08.53319318 +0000 UTC m=+146.725237088" watchObservedRunningTime="2025-11-27 06:55:08.533790379 +0000 UTC m=+146.725834297" Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.542337 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29403765-26klv" podStartSLOduration=125.542318633 podStartE2EDuration="2m5.542318633s" podCreationTimestamp="2025-11-27 06:53:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:08.499251419 +0000 UTC m=+146.691295337" watchObservedRunningTime="2025-11-27 06:55:08.542318633 +0000 UTC m=+146.734362551" Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.566071 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:08 crc kubenswrapper[4971]: E1127 06:55:08.569831 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:09.069759883 +0000 UTC m=+147.261803801 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.571730 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-m7fh9" event={"ID":"6df59c92-6515-4bf1-aad4-57daf0d8fc76","Type":"ContainerStarted","Data":"96b79214b2981b78df80e586577ef173ce2597f65a1f86b3440300d2f27dd60e"} Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.571837 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-m7fh9" Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.581496 4971 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-m7fh9 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.36:8080/healthz\": dial tcp 10.217.0.36:8080: connect: connection refused" start-of-body= Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.581567 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-m7fh9" podUID="6df59c92-6515-4bf1-aad4-57daf0d8fc76" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.36:8080/healthz\": dial tcp 10.217.0.36:8080: connect: connection refused" Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.583478 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-kg7xs" event={"ID":"00fa6f7a-50bb-42e0-8643-a0fae088f528","Type":"ContainerStarted","Data":"1ee730980c1ac86ea1a98b98b76ec1cdf9d0bf1bc53a7af1ffd54638219118a0"} Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.583549 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-kg7xs" event={"ID":"00fa6f7a-50bb-42e0-8643-a0fae088f528","Type":"ContainerStarted","Data":"e59e64250e4d6733b7addae526b975d7d1bf8e228e6b9aad2dcabbfac5893cc3"} Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.597994 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-f6k2p" event={"ID":"45462412-2e4e-480d-8874-2949c5dd691e","Type":"ContainerStarted","Data":"ac367474bb69429bd987bfa10ef9e78530cb9936425855abfce4f67bcb337cc7"} Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.598056 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-f6k2p" event={"ID":"45462412-2e4e-480d-8874-2949c5dd691e","Type":"ContainerStarted","Data":"fd269cda5c526574729d226ec1946f12cd6df173ce8a2ee9cef2628da5f80be2"} Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.600893 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-j9zw4" event={"ID":"75f685e8-220b-4588-8f26-66d305e3dfea","Type":"ContainerStarted","Data":"a5af222ae41b67cf83623ae12ea27cacd75257d11a10d18b2955aabe52e6e945"} Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.600929 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-j9zw4" event={"ID":"75f685e8-220b-4588-8f26-66d305e3dfea","Type":"ContainerStarted","Data":"73eac7fc80fec462f3c5536c18e21e2430e41c7f2c02e9481f2117738b469c66"} Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.604883 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2drfq" podStartSLOduration=125.604855259 podStartE2EDuration="2m5.604855259s" podCreationTimestamp="2025-11-27 06:53:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:08.59680206 +0000 UTC m=+146.788845978" watchObservedRunningTime="2025-11-27 06:55:08.604855259 +0000 UTC m=+146.796899177" Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.628927 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jdbbc" event={"ID":"3b071787-4135-43ce-a44e-68a0be71919e","Type":"ContainerStarted","Data":"a26175d54378a1803986f7411eea38bb00c2049dc4210a6f85b589fe7f84d82b"} Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.628971 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jdbbc" event={"ID":"3b071787-4135-43ce-a44e-68a0be71919e","Type":"ContainerStarted","Data":"360116a31bfae30656b6aec597abe2111bfa767319c4799a7dcfa2f3ffe58ada"} Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.635319 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ghv48" podStartSLOduration=125.635300602 podStartE2EDuration="2m5.635300602s" podCreationTimestamp="2025-11-27 06:53:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:08.632709962 +0000 UTC m=+146.824753910" watchObservedRunningTime="2025-11-27 06:55:08.635300602 +0000 UTC m=+146.827344520" Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.673106 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:08 crc kubenswrapper[4971]: E1127 06:55:08.674544 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:09.174501116 +0000 UTC m=+147.366545094 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.699507 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9sjf5" podStartSLOduration=125.69948984 podStartE2EDuration="2m5.69948984s" podCreationTimestamp="2025-11-27 06:53:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:08.696428095 +0000 UTC m=+146.888472013" watchObservedRunningTime="2025-11-27 06:55:08.69948984 +0000 UTC m=+146.891533758" Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.775083 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-kg7xs" podStartSLOduration=125.775052539 podStartE2EDuration="2m5.775052539s" podCreationTimestamp="2025-11-27 06:53:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:08.742715948 +0000 UTC m=+146.934759876" watchObservedRunningTime="2025-11-27 06:55:08.775052539 +0000 UTC m=+146.967096457" Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.775716 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:08 crc kubenswrapper[4971]: E1127 06:55:08.778561 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:09.278547127 +0000 UTC m=+147.470591045 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.817101 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-j9zw4" podStartSLOduration=125.815493811 podStartE2EDuration="2m5.815493811s" podCreationTimestamp="2025-11-27 06:53:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:08.777368681 +0000 UTC m=+146.969412599" watchObservedRunningTime="2025-11-27 06:55:08.815493811 +0000 UTC m=+147.007537749" Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.818265 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-f6k2p" podStartSLOduration=125.818255967 podStartE2EDuration="2m5.818255967s" podCreationTimestamp="2025-11-27 06:53:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:08.811107726 +0000 UTC m=+147.003151654" watchObservedRunningTime="2025-11-27 06:55:08.818255967 +0000 UTC m=+147.010299885" Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.863006 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jdbbc" podStartSLOduration=125.862990032 podStartE2EDuration="2m5.862990032s" podCreationTimestamp="2025-11-27 06:53:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:08.840925509 +0000 UTC m=+147.032969427" watchObservedRunningTime="2025-11-27 06:55:08.862990032 +0000 UTC m=+147.055033940" Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.863418 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-m7fh9" podStartSLOduration=125.863409525 podStartE2EDuration="2m5.863409525s" podCreationTimestamp="2025-11-27 06:53:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:08.862021102 +0000 UTC m=+147.054065040" watchObservedRunningTime="2025-11-27 06:55:08.863409525 +0000 UTC m=+147.055453463" Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.882003 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:08 crc kubenswrapper[4971]: E1127 06:55:08.882336 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:09.382320621 +0000 UTC m=+147.574364539 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:08 crc kubenswrapper[4971]: I1127 06:55:08.983219 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:08 crc kubenswrapper[4971]: E1127 06:55:08.983623 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:09.483607657 +0000 UTC m=+147.675651585 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:09 crc kubenswrapper[4971]: I1127 06:55:09.084003 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:09 crc kubenswrapper[4971]: E1127 06:55:09.084232 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:09.584190071 +0000 UTC m=+147.776233979 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:09 crc kubenswrapper[4971]: I1127 06:55:09.084856 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:09 crc kubenswrapper[4971]: E1127 06:55:09.085306 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:09.585285335 +0000 UTC m=+147.777329253 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:09 crc kubenswrapper[4971]: I1127 06:55:09.138981 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8hv78" Nov 27 06:55:09 crc kubenswrapper[4971]: I1127 06:55:09.189417 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:09 crc kubenswrapper[4971]: E1127 06:55:09.189631 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:09.689601805 +0000 UTC m=+147.881645723 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:09 crc kubenswrapper[4971]: I1127 06:55:09.189803 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:09 crc kubenswrapper[4971]: E1127 06:55:09.190171 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:09.690161582 +0000 UTC m=+147.882205500 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:09 crc kubenswrapper[4971]: I1127 06:55:09.290788 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:09 crc kubenswrapper[4971]: E1127 06:55:09.291008 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:09.790965354 +0000 UTC m=+147.983009262 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:09 crc kubenswrapper[4971]: I1127 06:55:09.291230 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:09 crc kubenswrapper[4971]: E1127 06:55:09.291584 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:09.791562052 +0000 UTC m=+147.983606160 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:09 crc kubenswrapper[4971]: I1127 06:55:09.392989 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:09 crc kubenswrapper[4971]: E1127 06:55:09.393270 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:09.89322757 +0000 UTC m=+148.085271478 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:09 crc kubenswrapper[4971]: I1127 06:55:09.393682 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:09 crc kubenswrapper[4971]: E1127 06:55:09.394076 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:09.894057316 +0000 UTC m=+148.086101244 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:09 crc kubenswrapper[4971]: I1127 06:55:09.454458 4971 patch_prober.go:28] interesting pod/router-default-5444994796-s6mff container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 27 06:55:09 crc kubenswrapper[4971]: [-]has-synced failed: reason withheld Nov 27 06:55:09 crc kubenswrapper[4971]: [+]process-running ok Nov 27 06:55:09 crc kubenswrapper[4971]: healthz check failed Nov 27 06:55:09 crc kubenswrapper[4971]: I1127 06:55:09.454585 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-s6mff" podUID="5c9c5353-0cec-4776-9c51-553e90e76a2a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 27 06:55:09 crc kubenswrapper[4971]: I1127 06:55:09.494269 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:09 crc kubenswrapper[4971]: E1127 06:55:09.494501 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:09.994475115 +0000 UTC m=+148.186519033 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:09 crc kubenswrapper[4971]: I1127 06:55:09.494648 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:09 crc kubenswrapper[4971]: E1127 06:55:09.494965 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:09.99495096 +0000 UTC m=+148.186994878 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:09 crc kubenswrapper[4971]: I1127 06:55:09.596050 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:09 crc kubenswrapper[4971]: E1127 06:55:09.596226 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:10.096196055 +0000 UTC m=+148.288239983 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:09 crc kubenswrapper[4971]: I1127 06:55:09.596695 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:09 crc kubenswrapper[4971]: E1127 06:55:09.597110 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:10.097098473 +0000 UTC m=+148.289142451 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:09 crc kubenswrapper[4971]: I1127 06:55:09.634630 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-k2ldt" event={"ID":"05b6cd99-aeb2-4b20-8fb3-c50e72a03611","Type":"ContainerStarted","Data":"650694f39f277582c52b595b33fd848c137ce7eb9e22caa42478770344d8b7a6"} Nov 27 06:55:09 crc kubenswrapper[4971]: I1127 06:55:09.634854 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-k2ldt" Nov 27 06:55:09 crc kubenswrapper[4971]: I1127 06:55:09.636238 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-p6gxf" event={"ID":"b2e2892d-ee1f-4349-b6b5-d0bc908d1397","Type":"ContainerStarted","Data":"20f4d441679ebbf55942e3cee377a3bbc043e561485225702c18a0d328cc0523"} Nov 27 06:55:09 crc kubenswrapper[4971]: I1127 06:55:09.637984 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-kg7xs" event={"ID":"00fa6f7a-50bb-42e0-8643-a0fae088f528","Type":"ContainerStarted","Data":"be0aa90ff07b9736e9b9ae2ac4f4af5b324700950be90dcf82a9914020b1197d"} Nov 27 06:55:09 crc kubenswrapper[4971]: I1127 06:55:09.639324 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-j9zw4" event={"ID":"75f685e8-220b-4588-8f26-66d305e3dfea","Type":"ContainerStarted","Data":"949cf1c4c81ccecbd125acd10c7b680c626cc667403705562069ad596ceffec1"} Nov 27 06:55:09 crc kubenswrapper[4971]: I1127 06:55:09.640856 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-n56lw" event={"ID":"35a21adf-32b6-4a19-b592-0f941e07a3c5","Type":"ContainerStarted","Data":"54dfb271efcbbaa57cb4c1f1e361e4bab54888ed9d41fdc50fd1140708785bb4"} Nov 27 06:55:09 crc kubenswrapper[4971]: I1127 06:55:09.642791 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jdbbc" event={"ID":"3b071787-4135-43ce-a44e-68a0be71919e","Type":"ContainerStarted","Data":"5c2f21ea4bbe2efa5f47a74674539e0f465b67fb7de333048bb8f9f23acbac1d"} Nov 27 06:55:09 crc kubenswrapper[4971]: I1127 06:55:09.646617 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-xhx92" event={"ID":"6e183c66-f7ea-4191-9f0d-fb2eb12ee864","Type":"ContainerStarted","Data":"2b5a1b1825bfdc2691fae4261e5e440a8515b34642d1dc958d5b035e0e032ea1"} Nov 27 06:55:09 crc kubenswrapper[4971]: I1127 06:55:09.646870 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-xhx92" event={"ID":"6e183c66-f7ea-4191-9f0d-fb2eb12ee864","Type":"ContainerStarted","Data":"5118f8fdeebd5eef9b7c0cd27544bac22b1f7d4c327af3d580b8ec984ba2402d"} Nov 27 06:55:09 crc kubenswrapper[4971]: I1127 06:55:09.650685 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9sjf5" event={"ID":"73452466-9780-461c-abd6-282cda0d9a7e","Type":"ContainerStarted","Data":"b01b7ca6776ad0d2b06c5270f4c0edacf01919e1f70c458f5644e315b84aba48"} Nov 27 06:55:09 crc kubenswrapper[4971]: I1127 06:55:09.655110 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-7k28d" event={"ID":"6aeeb09d-7519-42c7-be1d-2355b9a704e2","Type":"ContainerStarted","Data":"b782b8ef335ea82baebbd7e5f19dc1a904ddf125f650e1209b4c452e560bcd0f"} Nov 27 06:55:09 crc kubenswrapper[4971]: I1127 06:55:09.656293 4971 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-m7fh9 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.36:8080/healthz\": dial tcp 10.217.0.36:8080: connect: connection refused" start-of-body= Nov 27 06:55:09 crc kubenswrapper[4971]: I1127 06:55:09.656348 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-m7fh9" podUID="6df59c92-6515-4bf1-aad4-57daf0d8fc76" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.36:8080/healthz\": dial tcp 10.217.0.36:8080: connect: connection refused" Nov 27 06:55:09 crc kubenswrapper[4971]: I1127 06:55:09.673958 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ghv48" Nov 27 06:55:09 crc kubenswrapper[4971]: I1127 06:55:09.698084 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:09 crc kubenswrapper[4971]: E1127 06:55:09.698414 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:10.198392969 +0000 UTC m=+148.390436887 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:09 crc kubenswrapper[4971]: I1127 06:55:09.698861 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:09 crc kubenswrapper[4971]: E1127 06:55:09.699334 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:10.199324308 +0000 UTC m=+148.391368226 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:09 crc kubenswrapper[4971]: I1127 06:55:09.709702 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-k2ldt" podStartSLOduration=9.709680309 podStartE2EDuration="9.709680309s" podCreationTimestamp="2025-11-27 06:55:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:09.681867278 +0000 UTC m=+147.873911216" watchObservedRunningTime="2025-11-27 06:55:09.709680309 +0000 UTC m=+147.901724227" Nov 27 06:55:09 crc kubenswrapper[4971]: I1127 06:55:09.746406 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-n56lw" podStartSLOduration=126.746389555 podStartE2EDuration="2m6.746389555s" podCreationTimestamp="2025-11-27 06:53:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:09.713462266 +0000 UTC m=+147.905506204" watchObservedRunningTime="2025-11-27 06:55:09.746389555 +0000 UTC m=+147.938433473" Nov 27 06:55:09 crc kubenswrapper[4971]: I1127 06:55:09.749822 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-xhx92" podStartSLOduration=126.749803101 podStartE2EDuration="2m6.749803101s" podCreationTimestamp="2025-11-27 06:53:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:09.746020014 +0000 UTC m=+147.938063942" watchObservedRunningTime="2025-11-27 06:55:09.749803101 +0000 UTC m=+147.941847019" Nov 27 06:55:09 crc kubenswrapper[4971]: I1127 06:55:09.799954 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:09 crc kubenswrapper[4971]: E1127 06:55:09.800285 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:10.300264444 +0000 UTC m=+148.492308382 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:09 crc kubenswrapper[4971]: I1127 06:55:09.801126 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:09 crc kubenswrapper[4971]: E1127 06:55:09.806925 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:10.306898519 +0000 UTC m=+148.498942447 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:09 crc kubenswrapper[4971]: I1127 06:55:09.937257 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:09 crc kubenswrapper[4971]: E1127 06:55:09.937655 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:10.437622367 +0000 UTC m=+148.629666275 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:10 crc kubenswrapper[4971]: I1127 06:55:10.038566 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:10 crc kubenswrapper[4971]: E1127 06:55:10.039006 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:10.538989896 +0000 UTC m=+148.731033814 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:10 crc kubenswrapper[4971]: I1127 06:55:10.139450 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:10 crc kubenswrapper[4971]: E1127 06:55:10.139703 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:10.639670863 +0000 UTC m=+148.831714791 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:10 crc kubenswrapper[4971]: I1127 06:55:10.140063 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:10 crc kubenswrapper[4971]: E1127 06:55:10.140364 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:10.640351884 +0000 UTC m=+148.832395802 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:10 crc kubenswrapper[4971]: I1127 06:55:10.241223 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:10 crc kubenswrapper[4971]: E1127 06:55:10.241414 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:10.741386492 +0000 UTC m=+148.933430410 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:10 crc kubenswrapper[4971]: I1127 06:55:10.241683 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:10 crc kubenswrapper[4971]: E1127 06:55:10.241975 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:10.74196806 +0000 UTC m=+148.934011978 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:10 crc kubenswrapper[4971]: I1127 06:55:10.342741 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:10 crc kubenswrapper[4971]: E1127 06:55:10.342927 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:10.842901846 +0000 UTC m=+149.034945764 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:10 crc kubenswrapper[4971]: I1127 06:55:10.343401 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:10 crc kubenswrapper[4971]: E1127 06:55:10.343735 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:10.843726451 +0000 UTC m=+149.035770369 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:10 crc kubenswrapper[4971]: I1127 06:55:10.444841 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:10 crc kubenswrapper[4971]: E1127 06:55:10.445033 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:10.945005527 +0000 UTC m=+149.137049435 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:10 crc kubenswrapper[4971]: I1127 06:55:10.445078 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:10 crc kubenswrapper[4971]: I1127 06:55:10.445162 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:55:10 crc kubenswrapper[4971]: I1127 06:55:10.445223 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:55:10 crc kubenswrapper[4971]: I1127 06:55:10.445284 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:55:10 crc kubenswrapper[4971]: I1127 06:55:10.445303 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:55:10 crc kubenswrapper[4971]: E1127 06:55:10.445358 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:10.945347628 +0000 UTC m=+149.137391546 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:10 crc kubenswrapper[4971]: I1127 06:55:10.450476 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:55:10 crc kubenswrapper[4971]: I1127 06:55:10.451672 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:55:10 crc kubenswrapper[4971]: I1127 06:55:10.453519 4971 patch_prober.go:28] interesting pod/router-default-5444994796-s6mff container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 27 06:55:10 crc kubenswrapper[4971]: [-]has-synced failed: reason withheld Nov 27 06:55:10 crc kubenswrapper[4971]: [+]process-running ok Nov 27 06:55:10 crc kubenswrapper[4971]: healthz check failed Nov 27 06:55:10 crc kubenswrapper[4971]: I1127 06:55:10.453570 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-s6mff" podUID="5c9c5353-0cec-4776-9c51-553e90e76a2a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 27 06:55:10 crc kubenswrapper[4971]: I1127 06:55:10.455705 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:55:10 crc kubenswrapper[4971]: I1127 06:55:10.468793 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:55:10 crc kubenswrapper[4971]: I1127 06:55:10.546098 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:10 crc kubenswrapper[4971]: E1127 06:55:10.546191 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:11.04617409 +0000 UTC m=+149.238218008 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:10 crc kubenswrapper[4971]: I1127 06:55:10.546763 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:10 crc kubenswrapper[4971]: E1127 06:55:10.547046 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:11.047036116 +0000 UTC m=+149.239080034 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:10 crc kubenswrapper[4971]: I1127 06:55:10.568154 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:55:10 crc kubenswrapper[4971]: I1127 06:55:10.575052 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 27 06:55:10 crc kubenswrapper[4971]: I1127 06:55:10.580759 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:55:10 crc kubenswrapper[4971]: I1127 06:55:10.586351 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 27 06:55:10 crc kubenswrapper[4971]: I1127 06:55:10.651040 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:10 crc kubenswrapper[4971]: E1127 06:55:10.651363 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:11.151345236 +0000 UTC m=+149.343389154 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:10 crc kubenswrapper[4971]: I1127 06:55:10.754147 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:10 crc kubenswrapper[4971]: E1127 06:55:10.754820 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:11.25480622 +0000 UTC m=+149.446850138 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:10 crc kubenswrapper[4971]: I1127 06:55:10.757232 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-p6gxf" event={"ID":"b2e2892d-ee1f-4349-b6b5-d0bc908d1397","Type":"ContainerStarted","Data":"626a4eda319264699848103a67315ada709472ded3c0a8ae28f9ac82379cb141"} Nov 27 06:55:10 crc kubenswrapper[4971]: I1127 06:55:10.860454 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:10 crc kubenswrapper[4971]: E1127 06:55:10.862331 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:11.362293788 +0000 UTC m=+149.554337706 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:10 crc kubenswrapper[4971]: I1127 06:55:10.975210 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:10 crc kubenswrapper[4971]: E1127 06:55:10.975523 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:11.475509414 +0000 UTC m=+149.667553332 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:11 crc kubenswrapper[4971]: W1127 06:55:11.029481 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-83990a520e13b8de2e8fde3c0a0638bc72b4b1a17e440ede9ba4d6469164c8df WatchSource:0}: Error finding container 83990a520e13b8de2e8fde3c0a0638bc72b4b1a17e440ede9ba4d6469164c8df: Status 404 returned error can't find the container with id 83990a520e13b8de2e8fde3c0a0638bc72b4b1a17e440ede9ba4d6469164c8df Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.077046 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:11 crc kubenswrapper[4971]: E1127 06:55:11.077495 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:11.577475911 +0000 UTC m=+149.769519829 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.082290 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-rnpmr"] Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.083261 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rnpmr" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.090621 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.096181 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rnpmr"] Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.098477 4971 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.178721 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thx9h\" (UniqueName: \"kubernetes.io/projected/2bec9dd3-b528-4b67-b949-8e9eb074e222-kube-api-access-thx9h\") pod \"community-operators-rnpmr\" (UID: \"2bec9dd3-b528-4b67-b949-8e9eb074e222\") " pod="openshift-marketplace/community-operators-rnpmr" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.179090 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.179122 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2bec9dd3-b528-4b67-b949-8e9eb074e222-utilities\") pod \"community-operators-rnpmr\" (UID: \"2bec9dd3-b528-4b67-b949-8e9eb074e222\") " pod="openshift-marketplace/community-operators-rnpmr" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.179139 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2bec9dd3-b528-4b67-b949-8e9eb074e222-catalog-content\") pod \"community-operators-rnpmr\" (UID: \"2bec9dd3-b528-4b67-b949-8e9eb074e222\") " pod="openshift-marketplace/community-operators-rnpmr" Nov 27 06:55:11 crc kubenswrapper[4971]: E1127 06:55:11.179432 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:11.679421157 +0000 UTC m=+149.871465075 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.267324 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-fhgrm"] Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.268323 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fhgrm" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.270432 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.280269 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.280519 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thx9h\" (UniqueName: \"kubernetes.io/projected/2bec9dd3-b528-4b67-b949-8e9eb074e222-kube-api-access-thx9h\") pod \"community-operators-rnpmr\" (UID: \"2bec9dd3-b528-4b67-b949-8e9eb074e222\") " pod="openshift-marketplace/community-operators-rnpmr" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.280639 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2bec9dd3-b528-4b67-b949-8e9eb074e222-utilities\") pod \"community-operators-rnpmr\" (UID: \"2bec9dd3-b528-4b67-b949-8e9eb074e222\") " pod="openshift-marketplace/community-operators-rnpmr" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.280665 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2bec9dd3-b528-4b67-b949-8e9eb074e222-catalog-content\") pod \"community-operators-rnpmr\" (UID: \"2bec9dd3-b528-4b67-b949-8e9eb074e222\") " pod="openshift-marketplace/community-operators-rnpmr" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.281107 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2bec9dd3-b528-4b67-b949-8e9eb074e222-catalog-content\") pod \"community-operators-rnpmr\" (UID: \"2bec9dd3-b528-4b67-b949-8e9eb074e222\") " pod="openshift-marketplace/community-operators-rnpmr" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.281373 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2bec9dd3-b528-4b67-b949-8e9eb074e222-utilities\") pod \"community-operators-rnpmr\" (UID: \"2bec9dd3-b528-4b67-b949-8e9eb074e222\") " pod="openshift-marketplace/community-operators-rnpmr" Nov 27 06:55:11 crc kubenswrapper[4971]: E1127 06:55:11.281414 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:11.781394804 +0000 UTC m=+149.973438722 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.285650 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fhgrm"] Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.298895 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thx9h\" (UniqueName: \"kubernetes.io/projected/2bec9dd3-b528-4b67-b949-8e9eb074e222-kube-api-access-thx9h\") pod \"community-operators-rnpmr\" (UID: \"2bec9dd3-b528-4b67-b949-8e9eb074e222\") " pod="openshift-marketplace/community-operators-rnpmr" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.377907 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-7hzzs" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.381646 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8978d4d3-664f-40f4-b33c-83cd92577bc3-utilities\") pod \"certified-operators-fhgrm\" (UID: \"8978d4d3-664f-40f4-b33c-83cd92577bc3\") " pod="openshift-marketplace/certified-operators-fhgrm" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.381748 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.381776 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gftcn\" (UniqueName: \"kubernetes.io/projected/8978d4d3-664f-40f4-b33c-83cd92577bc3-kube-api-access-gftcn\") pod \"certified-operators-fhgrm\" (UID: \"8978d4d3-664f-40f4-b33c-83cd92577bc3\") " pod="openshift-marketplace/certified-operators-fhgrm" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.381809 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8978d4d3-664f-40f4-b33c-83cd92577bc3-catalog-content\") pod \"certified-operators-fhgrm\" (UID: \"8978d4d3-664f-40f4-b33c-83cd92577bc3\") " pod="openshift-marketplace/certified-operators-fhgrm" Nov 27 06:55:11 crc kubenswrapper[4971]: E1127 06:55:11.382055 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-27 06:55:11.88204258 +0000 UTC m=+150.074086498 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rn8q9" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.383485 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-jxflz" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.383595 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-jxflz" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.390399 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-jxflz" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.460794 4971 patch_prober.go:28] interesting pod/router-default-5444994796-s6mff container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 27 06:55:11 crc kubenswrapper[4971]: [-]has-synced failed: reason withheld Nov 27 06:55:11 crc kubenswrapper[4971]: [+]process-running ok Nov 27 06:55:11 crc kubenswrapper[4971]: healthz check failed Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.460861 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-s6mff" podUID="5c9c5353-0cec-4776-9c51-553e90e76a2a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.465744 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-bhc8q"] Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.466761 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bhc8q" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.475959 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rnpmr" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.476052 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bhc8q"] Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.487548 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.487778 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8978d4d3-664f-40f4-b33c-83cd92577bc3-utilities\") pod \"certified-operators-fhgrm\" (UID: \"8978d4d3-664f-40f4-b33c-83cd92577bc3\") " pod="openshift-marketplace/certified-operators-fhgrm" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.487866 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gftcn\" (UniqueName: \"kubernetes.io/projected/8978d4d3-664f-40f4-b33c-83cd92577bc3-kube-api-access-gftcn\") pod \"certified-operators-fhgrm\" (UID: \"8978d4d3-664f-40f4-b33c-83cd92577bc3\") " pod="openshift-marketplace/certified-operators-fhgrm" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.488054 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8978d4d3-664f-40f4-b33c-83cd92577bc3-catalog-content\") pod \"certified-operators-fhgrm\" (UID: \"8978d4d3-664f-40f4-b33c-83cd92577bc3\") " pod="openshift-marketplace/certified-operators-fhgrm" Nov 27 06:55:11 crc kubenswrapper[4971]: E1127 06:55:11.488924 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-27 06:55:11.988898499 +0000 UTC m=+150.180942417 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.489762 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8978d4d3-664f-40f4-b33c-83cd92577bc3-utilities\") pod \"certified-operators-fhgrm\" (UID: \"8978d4d3-664f-40f4-b33c-83cd92577bc3\") " pod="openshift-marketplace/certified-operators-fhgrm" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.492257 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8978d4d3-664f-40f4-b33c-83cd92577bc3-catalog-content\") pod \"certified-operators-fhgrm\" (UID: \"8978d4d3-664f-40f4-b33c-83cd92577bc3\") " pod="openshift-marketplace/certified-operators-fhgrm" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.509702 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gftcn\" (UniqueName: \"kubernetes.io/projected/8978d4d3-664f-40f4-b33c-83cd92577bc3-kube-api-access-gftcn\") pod \"certified-operators-fhgrm\" (UID: \"8978d4d3-664f-40f4-b33c-83cd92577bc3\") " pod="openshift-marketplace/certified-operators-fhgrm" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.543052 4971 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-11-27T06:55:11.098493812Z","Handler":null,"Name":""} Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.546867 4971 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.546926 4971 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.581861 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fhgrm" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.589385 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dda2cad1-cdbe-48f8-ba4c-3074565de24b-utilities\") pod \"community-operators-bhc8q\" (UID: \"dda2cad1-cdbe-48f8-ba4c-3074565de24b\") " pod="openshift-marketplace/community-operators-bhc8q" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.589506 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dda2cad1-cdbe-48f8-ba4c-3074565de24b-catalog-content\") pod \"community-operators-bhc8q\" (UID: \"dda2cad1-cdbe-48f8-ba4c-3074565de24b\") " pod="openshift-marketplace/community-operators-bhc8q" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.589544 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2cll\" (UniqueName: \"kubernetes.io/projected/dda2cad1-cdbe-48f8-ba4c-3074565de24b-kube-api-access-k2cll\") pod \"community-operators-bhc8q\" (UID: \"dda2cad1-cdbe-48f8-ba4c-3074565de24b\") " pod="openshift-marketplace/community-operators-bhc8q" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.589576 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.592323 4971 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.592362 4971 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.623065 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rn8q9\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.665913 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-dvz2k"] Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.666816 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dvz2k" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.680888 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rnpmr"] Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.682596 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dvz2k"] Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.691090 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.691408 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dda2cad1-cdbe-48f8-ba4c-3074565de24b-utilities\") pod \"community-operators-bhc8q\" (UID: \"dda2cad1-cdbe-48f8-ba4c-3074565de24b\") " pod="openshift-marketplace/community-operators-bhc8q" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.691469 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dda2cad1-cdbe-48f8-ba4c-3074565de24b-catalog-content\") pod \"community-operators-bhc8q\" (UID: \"dda2cad1-cdbe-48f8-ba4c-3074565de24b\") " pod="openshift-marketplace/community-operators-bhc8q" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.691498 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2cll\" (UniqueName: \"kubernetes.io/projected/dda2cad1-cdbe-48f8-ba4c-3074565de24b-kube-api-access-k2cll\") pod \"community-operators-bhc8q\" (UID: \"dda2cad1-cdbe-48f8-ba4c-3074565de24b\") " pod="openshift-marketplace/community-operators-bhc8q" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.692617 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dda2cad1-cdbe-48f8-ba4c-3074565de24b-catalog-content\") pod \"community-operators-bhc8q\" (UID: \"dda2cad1-cdbe-48f8-ba4c-3074565de24b\") " pod="openshift-marketplace/community-operators-bhc8q" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.692810 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dda2cad1-cdbe-48f8-ba4c-3074565de24b-utilities\") pod \"community-operators-bhc8q\" (UID: \"dda2cad1-cdbe-48f8-ba4c-3074565de24b\") " pod="openshift-marketplace/community-operators-bhc8q" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.699983 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Nov 27 06:55:11 crc kubenswrapper[4971]: W1127 06:55:11.708246 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2bec9dd3_b528_4b67_b949_8e9eb074e222.slice/crio-a67bf21a6930c219142d4109f1a4c16f549c8dce2190a58c46bc0aef7c26cd8b WatchSource:0}: Error finding container a67bf21a6930c219142d4109f1a4c16f549c8dce2190a58c46bc0aef7c26cd8b: Status 404 returned error can't find the container with id a67bf21a6930c219142d4109f1a4c16f549c8dce2190a58c46bc0aef7c26cd8b Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.713913 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2cll\" (UniqueName: \"kubernetes.io/projected/dda2cad1-cdbe-48f8-ba4c-3074565de24b-kube-api-access-k2cll\") pod \"community-operators-bhc8q\" (UID: \"dda2cad1-cdbe-48f8-ba4c-3074565de24b\") " pod="openshift-marketplace/community-operators-bhc8q" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.765794 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"3a78e54eb98c5f82740ecde63d05defc5618f762927d6ed1f3adeaea7693373f"} Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.765838 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"05782d46a1735f13e37863aedbb15841d0a487bfe60d370f4e0e19310893c2c3"} Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.769425 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rnpmr" event={"ID":"2bec9dd3-b528-4b67-b949-8e9eb074e222","Type":"ContainerStarted","Data":"a67bf21a6930c219142d4109f1a4c16f549c8dce2190a58c46bc0aef7c26cd8b"} Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.774324 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"8b5e2cb83e78bcf427b655dbad6699c5930942a23062f4ee1f334da9e62903b7"} Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.774362 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"272d0610bd9404f0c0cfc07d5b351747cec9d72766b9d6d328edda361ae45f86"} Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.775259 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.792772 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vmhq\" (UniqueName: \"kubernetes.io/projected/75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c-kube-api-access-7vmhq\") pod \"certified-operators-dvz2k\" (UID: \"75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c\") " pod="openshift-marketplace/certified-operators-dvz2k" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.792830 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c-utilities\") pod \"certified-operators-dvz2k\" (UID: \"75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c\") " pod="openshift-marketplace/certified-operators-dvz2k" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.792938 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c-catalog-content\") pod \"certified-operators-dvz2k\" (UID: \"75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c\") " pod="openshift-marketplace/certified-operators-dvz2k" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.795984 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"7ff41e2b161b443307c886b2ccd9486bcb0d77d9b62a3d390c1e8376a8c04b0d"} Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.796029 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"83990a520e13b8de2e8fde3c0a0638bc72b4b1a17e440ede9ba4d6469164c8df"} Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.799305 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fhgrm"] Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.799591 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-p6gxf" event={"ID":"b2e2892d-ee1f-4349-b6b5-d0bc908d1397","Type":"ContainerStarted","Data":"3e92650f7d055a34b060711a4ffc3b0e4a03e5449f4bb89ba5c7d9795ca3d745"} Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.799612 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-p6gxf" event={"ID":"b2e2892d-ee1f-4349-b6b5-d0bc908d1397","Type":"ContainerStarted","Data":"b955d46c1aa75a29b2dd858391528c00d64efba84fa7c0a3c68ec1ec3d8972f1"} Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.803439 4971 generic.go:334] "Generic (PLEG): container finished" podID="89bb08d2-3252-4d85-91bc-3b595be4c32a" containerID="39a1a798b99bdec77c86ab43b9169bbc01a2272afe93d1204b239bfde5fade05" exitCode=0 Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.804945 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403765-26klv" event={"ID":"89bb08d2-3252-4d85-91bc-3b595be4c32a","Type":"ContainerDied","Data":"39a1a798b99bdec77c86ab43b9169bbc01a2272afe93d1204b239bfde5fade05"} Nov 27 06:55:11 crc kubenswrapper[4971]: W1127 06:55:11.810956 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8978d4d3_664f_40f4_b33c_83cd92577bc3.slice/crio-017b5f39748f3450e81a862768bae7d3e1c0528e4668861949318bdd353cf4ae WatchSource:0}: Error finding container 017b5f39748f3450e81a862768bae7d3e1c0528e4668861949318bdd353cf4ae: Status 404 returned error can't find the container with id 017b5f39748f3450e81a862768bae7d3e1c0528e4668861949318bdd353cf4ae Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.811053 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-jxflz" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.828319 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bhc8q" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.849462 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-p6gxf" podStartSLOduration=11.849441913 podStartE2EDuration="11.849441913s" podCreationTimestamp="2025-11-27 06:55:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:11.845950065 +0000 UTC m=+150.037993983" watchObservedRunningTime="2025-11-27 06:55:11.849441913 +0000 UTC m=+150.041485831" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.859327 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.901031 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c-catalog-content\") pod \"certified-operators-dvz2k\" (UID: \"75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c\") " pod="openshift-marketplace/certified-operators-dvz2k" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.901437 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vmhq\" (UniqueName: \"kubernetes.io/projected/75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c-kube-api-access-7vmhq\") pod \"certified-operators-dvz2k\" (UID: \"75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c\") " pod="openshift-marketplace/certified-operators-dvz2k" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.901521 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c-utilities\") pod \"certified-operators-dvz2k\" (UID: \"75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c\") " pod="openshift-marketplace/certified-operators-dvz2k" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.902612 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c-catalog-content\") pod \"certified-operators-dvz2k\" (UID: \"75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c\") " pod="openshift-marketplace/certified-operators-dvz2k" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.924659 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c-utilities\") pod \"certified-operators-dvz2k\" (UID: \"75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c\") " pod="openshift-marketplace/certified-operators-dvz2k" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.957117 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vmhq\" (UniqueName: \"kubernetes.io/projected/75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c-kube-api-access-7vmhq\") pod \"certified-operators-dvz2k\" (UID: \"75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c\") " pod="openshift-marketplace/certified-operators-dvz2k" Nov 27 06:55:11 crc kubenswrapper[4971]: I1127 06:55:11.990862 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dvz2k" Nov 27 06:55:12 crc kubenswrapper[4971]: I1127 06:55:12.145862 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bhc8q"] Nov 27 06:55:12 crc kubenswrapper[4971]: W1127 06:55:12.151700 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddda2cad1_cdbe_48f8_ba4c_3074565de24b.slice/crio-219669685de829f3c01aa8f24f1893f9ee9b2a58d545816b0c09d0019ce0d04e WatchSource:0}: Error finding container 219669685de829f3c01aa8f24f1893f9ee9b2a58d545816b0c09d0019ce0d04e: Status 404 returned error can't find the container with id 219669685de829f3c01aa8f24f1893f9ee9b2a58d545816b0c09d0019ce0d04e Nov 27 06:55:12 crc kubenswrapper[4971]: I1127 06:55:12.185110 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-rn8q9"] Nov 27 06:55:12 crc kubenswrapper[4971]: I1127 06:55:12.352619 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dvz2k"] Nov 27 06:55:12 crc kubenswrapper[4971]: I1127 06:55:12.480199 4971 patch_prober.go:28] interesting pod/router-default-5444994796-s6mff container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 27 06:55:12 crc kubenswrapper[4971]: [-]has-synced failed: reason withheld Nov 27 06:55:12 crc kubenswrapper[4971]: [+]process-running ok Nov 27 06:55:12 crc kubenswrapper[4971]: healthz check failed Nov 27 06:55:12 crc kubenswrapper[4971]: I1127 06:55:12.480254 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-s6mff" podUID="5c9c5353-0cec-4776-9c51-553e90e76a2a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 27 06:55:12 crc kubenswrapper[4971]: I1127 06:55:12.556710 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Nov 27 06:55:12 crc kubenswrapper[4971]: I1127 06:55:12.588152 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Nov 27 06:55:12 crc kubenswrapper[4971]: I1127 06:55:12.588924 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 27 06:55:12 crc kubenswrapper[4971]: I1127 06:55:12.591995 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Nov 27 06:55:12 crc kubenswrapper[4971]: I1127 06:55:12.592290 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Nov 27 06:55:12 crc kubenswrapper[4971]: I1127 06:55:12.609589 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Nov 27 06:55:12 crc kubenswrapper[4971]: I1127 06:55:12.717825 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/aac85824-33f5-48d5-b639-bbcbb83d17e6-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"aac85824-33f5-48d5-b639-bbcbb83d17e6\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 27 06:55:12 crc kubenswrapper[4971]: I1127 06:55:12.717941 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/aac85824-33f5-48d5-b639-bbcbb83d17e6-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"aac85824-33f5-48d5-b639-bbcbb83d17e6\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 27 06:55:12 crc kubenswrapper[4971]: I1127 06:55:12.810599 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" event={"ID":"227860ca-baf3-4b07-b0b3-9a29a9eedab6","Type":"ContainerStarted","Data":"f7afc7a3453babc87b8684c325843cb9da949e0d4fb663c480b5d3207488ba10"} Nov 27 06:55:12 crc kubenswrapper[4971]: I1127 06:55:12.810717 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" event={"ID":"227860ca-baf3-4b07-b0b3-9a29a9eedab6","Type":"ContainerStarted","Data":"c1b19a9c8726abf46ebd5db73bb73ebd0e9d3dc15eed1e04c5ba43cd36ba4cf9"} Nov 27 06:55:12 crc kubenswrapper[4971]: I1127 06:55:12.810752 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:12 crc kubenswrapper[4971]: I1127 06:55:12.812246 4971 generic.go:334] "Generic (PLEG): container finished" podID="dda2cad1-cdbe-48f8-ba4c-3074565de24b" containerID="8b6d4d5ee6e80b6a51d1224965c7ebd63fb30bfee37288113df7011e7d937110" exitCode=0 Nov 27 06:55:12 crc kubenswrapper[4971]: I1127 06:55:12.812369 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bhc8q" event={"ID":"dda2cad1-cdbe-48f8-ba4c-3074565de24b","Type":"ContainerDied","Data":"8b6d4d5ee6e80b6a51d1224965c7ebd63fb30bfee37288113df7011e7d937110"} Nov 27 06:55:12 crc kubenswrapper[4971]: I1127 06:55:12.812442 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bhc8q" event={"ID":"dda2cad1-cdbe-48f8-ba4c-3074565de24b","Type":"ContainerStarted","Data":"219669685de829f3c01aa8f24f1893f9ee9b2a58d545816b0c09d0019ce0d04e"} Nov 27 06:55:12 crc kubenswrapper[4971]: I1127 06:55:12.813928 4971 generic.go:334] "Generic (PLEG): container finished" podID="8978d4d3-664f-40f4-b33c-83cd92577bc3" containerID="71b67f7b32a915365aca80dfd8a4110bac67dd4de2065ef88791d3241b053b61" exitCode=0 Nov 27 06:55:12 crc kubenswrapper[4971]: I1127 06:55:12.814019 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fhgrm" event={"ID":"8978d4d3-664f-40f4-b33c-83cd92577bc3","Type":"ContainerDied","Data":"71b67f7b32a915365aca80dfd8a4110bac67dd4de2065ef88791d3241b053b61"} Nov 27 06:55:12 crc kubenswrapper[4971]: I1127 06:55:12.814043 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fhgrm" event={"ID":"8978d4d3-664f-40f4-b33c-83cd92577bc3","Type":"ContainerStarted","Data":"017b5f39748f3450e81a862768bae7d3e1c0528e4668861949318bdd353cf4ae"} Nov 27 06:55:12 crc kubenswrapper[4971]: I1127 06:55:12.815040 4971 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 27 06:55:12 crc kubenswrapper[4971]: I1127 06:55:12.816404 4971 generic.go:334] "Generic (PLEG): container finished" podID="75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c" containerID="29e7ceaaca90e154c851484122ef5b8c6e19cc9745538cb09b85a41163e48ecb" exitCode=0 Nov 27 06:55:12 crc kubenswrapper[4971]: I1127 06:55:12.816509 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dvz2k" event={"ID":"75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c","Type":"ContainerDied","Data":"29e7ceaaca90e154c851484122ef5b8c6e19cc9745538cb09b85a41163e48ecb"} Nov 27 06:55:12 crc kubenswrapper[4971]: I1127 06:55:12.816571 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dvz2k" event={"ID":"75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c","Type":"ContainerStarted","Data":"7bea4fa174c4f55b85f2bc92c420e7667c7cacd720a523a7e10036771dd42ca3"} Nov 27 06:55:12 crc kubenswrapper[4971]: I1127 06:55:12.819056 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/aac85824-33f5-48d5-b639-bbcbb83d17e6-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"aac85824-33f5-48d5-b639-bbcbb83d17e6\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 27 06:55:12 crc kubenswrapper[4971]: I1127 06:55:12.819452 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/aac85824-33f5-48d5-b639-bbcbb83d17e6-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"aac85824-33f5-48d5-b639-bbcbb83d17e6\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 27 06:55:12 crc kubenswrapper[4971]: I1127 06:55:12.819261 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/aac85824-33f5-48d5-b639-bbcbb83d17e6-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"aac85824-33f5-48d5-b639-bbcbb83d17e6\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 27 06:55:12 crc kubenswrapper[4971]: I1127 06:55:12.826564 4971 generic.go:334] "Generic (PLEG): container finished" podID="2bec9dd3-b528-4b67-b949-8e9eb074e222" containerID="8fcb05713671593dd4936adedcd6c2b5af92a4b9f923ebbccb85c1684fedd4d1" exitCode=0 Nov 27 06:55:12 crc kubenswrapper[4971]: I1127 06:55:12.828162 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rnpmr" event={"ID":"2bec9dd3-b528-4b67-b949-8e9eb074e222","Type":"ContainerDied","Data":"8fcb05713671593dd4936adedcd6c2b5af92a4b9f923ebbccb85c1684fedd4d1"} Nov 27 06:55:12 crc kubenswrapper[4971]: I1127 06:55:12.854680 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/aac85824-33f5-48d5-b639-bbcbb83d17e6-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"aac85824-33f5-48d5-b639-bbcbb83d17e6\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 27 06:55:12 crc kubenswrapper[4971]: I1127 06:55:12.876964 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" podStartSLOduration=129.876941658 podStartE2EDuration="2m9.876941658s" podCreationTimestamp="2025-11-27 06:53:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:12.852337396 +0000 UTC m=+151.044381324" watchObservedRunningTime="2025-11-27 06:55:12.876941658 +0000 UTC m=+151.068985576" Nov 27 06:55:12 crc kubenswrapper[4971]: I1127 06:55:12.917721 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.018046 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-blqcq" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.019326 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-blqcq" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.024874 4971 patch_prober.go:28] interesting pod/console-f9d7485db-blqcq container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.12:8443/health\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.024975 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-blqcq" podUID="b2038ca5-beb4-434e-81a7-16a67ad9382d" containerName="console" probeResult="failure" output="Get \"https://10.217.0.12:8443/health\": dial tcp 10.217.0.12:8443: connect: connection refused" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.189619 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403765-26klv" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.192968 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Nov 27 06:55:13 crc kubenswrapper[4971]: W1127 06:55:13.208798 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podaac85824_33f5_48d5_b639_bbcbb83d17e6.slice/crio-424de521d41a012c2e3e7befe5286459adacb35ebaaf83942d39eaba56236e85 WatchSource:0}: Error finding container 424de521d41a012c2e3e7befe5286459adacb35ebaaf83942d39eaba56236e85: Status 404 returned error can't find the container with id 424de521d41a012c2e3e7befe5286459adacb35ebaaf83942d39eaba56236e85 Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.226292 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/89bb08d2-3252-4d85-91bc-3b595be4c32a-config-volume\") pod \"89bb08d2-3252-4d85-91bc-3b595be4c32a\" (UID: \"89bb08d2-3252-4d85-91bc-3b595be4c32a\") " Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.226369 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sclj4\" (UniqueName: \"kubernetes.io/projected/89bb08d2-3252-4d85-91bc-3b595be4c32a-kube-api-access-sclj4\") pod \"89bb08d2-3252-4d85-91bc-3b595be4c32a\" (UID: \"89bb08d2-3252-4d85-91bc-3b595be4c32a\") " Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.226499 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/89bb08d2-3252-4d85-91bc-3b595be4c32a-secret-volume\") pod \"89bb08d2-3252-4d85-91bc-3b595be4c32a\" (UID: \"89bb08d2-3252-4d85-91bc-3b595be4c32a\") " Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.227722 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/89bb08d2-3252-4d85-91bc-3b595be4c32a-config-volume" (OuterVolumeSpecName: "config-volume") pod "89bb08d2-3252-4d85-91bc-3b595be4c32a" (UID: "89bb08d2-3252-4d85-91bc-3b595be4c32a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.235187 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89bb08d2-3252-4d85-91bc-3b595be4c32a-kube-api-access-sclj4" (OuterVolumeSpecName: "kube-api-access-sclj4") pod "89bb08d2-3252-4d85-91bc-3b595be4c32a" (UID: "89bb08d2-3252-4d85-91bc-3b595be4c32a"). InnerVolumeSpecName "kube-api-access-sclj4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.237791 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89bb08d2-3252-4d85-91bc-3b595be4c32a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "89bb08d2-3252-4d85-91bc-3b595be4c32a" (UID: "89bb08d2-3252-4d85-91bc-3b595be4c32a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.280176 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-qfvr4"] Nov 27 06:55:13 crc kubenswrapper[4971]: E1127 06:55:13.280440 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89bb08d2-3252-4d85-91bc-3b595be4c32a" containerName="collect-profiles" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.280466 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="89bb08d2-3252-4d85-91bc-3b595be4c32a" containerName="collect-profiles" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.280600 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="89bb08d2-3252-4d85-91bc-3b595be4c32a" containerName="collect-profiles" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.281450 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qfvr4" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.285363 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.328026 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12325c6c-a817-46ae-b776-01213f064dbf-catalog-content\") pod \"redhat-marketplace-qfvr4\" (UID: \"12325c6c-a817-46ae-b776-01213f064dbf\") " pod="openshift-marketplace/redhat-marketplace-qfvr4" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.328133 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12325c6c-a817-46ae-b776-01213f064dbf-utilities\") pod \"redhat-marketplace-qfvr4\" (UID: \"12325c6c-a817-46ae-b776-01213f064dbf\") " pod="openshift-marketplace/redhat-marketplace-qfvr4" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.328159 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qpcx6\" (UniqueName: \"kubernetes.io/projected/12325c6c-a817-46ae-b776-01213f064dbf-kube-api-access-qpcx6\") pod \"redhat-marketplace-qfvr4\" (UID: \"12325c6c-a817-46ae-b776-01213f064dbf\") " pod="openshift-marketplace/redhat-marketplace-qfvr4" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.328196 4971 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/89bb08d2-3252-4d85-91bc-3b595be4c32a-config-volume\") on node \"crc\" DevicePath \"\"" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.328207 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sclj4\" (UniqueName: \"kubernetes.io/projected/89bb08d2-3252-4d85-91bc-3b595be4c32a-kube-api-access-sclj4\") on node \"crc\" DevicePath \"\"" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.328221 4971 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/89bb08d2-3252-4d85-91bc-3b595be4c32a-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.358515 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qfvr4"] Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.411184 4971 patch_prober.go:28] interesting pod/downloads-7954f5f757-75prl container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.411276 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-75prl" podUID="a602a33c-f8ea-45e4-8295-3c2230005ef4" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.411190 4971 patch_prober.go:28] interesting pod/downloads-7954f5f757-75prl container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.411750 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-75prl" podUID="a602a33c-f8ea-45e4-8295-3c2230005ef4" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.429566 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12325c6c-a817-46ae-b776-01213f064dbf-utilities\") pod \"redhat-marketplace-qfvr4\" (UID: \"12325c6c-a817-46ae-b776-01213f064dbf\") " pod="openshift-marketplace/redhat-marketplace-qfvr4" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.429624 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qpcx6\" (UniqueName: \"kubernetes.io/projected/12325c6c-a817-46ae-b776-01213f064dbf-kube-api-access-qpcx6\") pod \"redhat-marketplace-qfvr4\" (UID: \"12325c6c-a817-46ae-b776-01213f064dbf\") " pod="openshift-marketplace/redhat-marketplace-qfvr4" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.429674 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12325c6c-a817-46ae-b776-01213f064dbf-catalog-content\") pod \"redhat-marketplace-qfvr4\" (UID: \"12325c6c-a817-46ae-b776-01213f064dbf\") " pod="openshift-marketplace/redhat-marketplace-qfvr4" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.430011 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12325c6c-a817-46ae-b776-01213f064dbf-utilities\") pod \"redhat-marketplace-qfvr4\" (UID: \"12325c6c-a817-46ae-b776-01213f064dbf\") " pod="openshift-marketplace/redhat-marketplace-qfvr4" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.430329 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12325c6c-a817-46ae-b776-01213f064dbf-catalog-content\") pod \"redhat-marketplace-qfvr4\" (UID: \"12325c6c-a817-46ae-b776-01213f064dbf\") " pod="openshift-marketplace/redhat-marketplace-qfvr4" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.450727 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qpcx6\" (UniqueName: \"kubernetes.io/projected/12325c6c-a817-46ae-b776-01213f064dbf-kube-api-access-qpcx6\") pod \"redhat-marketplace-qfvr4\" (UID: \"12325c6c-a817-46ae-b776-01213f064dbf\") " pod="openshift-marketplace/redhat-marketplace-qfvr4" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.450880 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-s6mff" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.456219 4971 patch_prober.go:28] interesting pod/router-default-5444994796-s6mff container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 27 06:55:13 crc kubenswrapper[4971]: [-]has-synced failed: reason withheld Nov 27 06:55:13 crc kubenswrapper[4971]: [+]process-running ok Nov 27 06:55:13 crc kubenswrapper[4971]: healthz check failed Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.456269 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-s6mff" podUID="5c9c5353-0cec-4776-9c51-553e90e76a2a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.596884 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-m7fh9" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.597778 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qfvr4" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.670485 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-tgs6w"] Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.671750 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tgs6w" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.684810 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tgs6w"] Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.734124 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5pfz6\" (UniqueName: \"kubernetes.io/projected/e2a58f53-1c1f-48b1-82af-845bf9dd1d7e-kube-api-access-5pfz6\") pod \"redhat-marketplace-tgs6w\" (UID: \"e2a58f53-1c1f-48b1-82af-845bf9dd1d7e\") " pod="openshift-marketplace/redhat-marketplace-tgs6w" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.734201 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2a58f53-1c1f-48b1-82af-845bf9dd1d7e-utilities\") pod \"redhat-marketplace-tgs6w\" (UID: \"e2a58f53-1c1f-48b1-82af-845bf9dd1d7e\") " pod="openshift-marketplace/redhat-marketplace-tgs6w" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.734260 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2a58f53-1c1f-48b1-82af-845bf9dd1d7e-catalog-content\") pod \"redhat-marketplace-tgs6w\" (UID: \"e2a58f53-1c1f-48b1-82af-845bf9dd1d7e\") " pod="openshift-marketplace/redhat-marketplace-tgs6w" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.835269 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2a58f53-1c1f-48b1-82af-845bf9dd1d7e-utilities\") pod \"redhat-marketplace-tgs6w\" (UID: \"e2a58f53-1c1f-48b1-82af-845bf9dd1d7e\") " pod="openshift-marketplace/redhat-marketplace-tgs6w" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.835669 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2a58f53-1c1f-48b1-82af-845bf9dd1d7e-catalog-content\") pod \"redhat-marketplace-tgs6w\" (UID: \"e2a58f53-1c1f-48b1-82af-845bf9dd1d7e\") " pod="openshift-marketplace/redhat-marketplace-tgs6w" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.835754 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pfz6\" (UniqueName: \"kubernetes.io/projected/e2a58f53-1c1f-48b1-82af-845bf9dd1d7e-kube-api-access-5pfz6\") pod \"redhat-marketplace-tgs6w\" (UID: \"e2a58f53-1c1f-48b1-82af-845bf9dd1d7e\") " pod="openshift-marketplace/redhat-marketplace-tgs6w" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.836645 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2a58f53-1c1f-48b1-82af-845bf9dd1d7e-catalog-content\") pod \"redhat-marketplace-tgs6w\" (UID: \"e2a58f53-1c1f-48b1-82af-845bf9dd1d7e\") " pod="openshift-marketplace/redhat-marketplace-tgs6w" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.836709 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2a58f53-1c1f-48b1-82af-845bf9dd1d7e-utilities\") pod \"redhat-marketplace-tgs6w\" (UID: \"e2a58f53-1c1f-48b1-82af-845bf9dd1d7e\") " pod="openshift-marketplace/redhat-marketplace-tgs6w" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.838347 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403765-26klv" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.839668 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403765-26klv" event={"ID":"89bb08d2-3252-4d85-91bc-3b595be4c32a","Type":"ContainerDied","Data":"18e93e9d0d294263f33043c9717f895703af7c7058d51b8305c52d82e0c95826"} Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.839811 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="18e93e9d0d294263f33043c9717f895703af7c7058d51b8305c52d82e0c95826" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.850960 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"aac85824-33f5-48d5-b639-bbcbb83d17e6","Type":"ContainerStarted","Data":"96ff6b7d7a4a32420869ad808e2fe9c79be61900d7c114c4a22aab9b1dda4332"} Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.851574 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"aac85824-33f5-48d5-b639-bbcbb83d17e6","Type":"ContainerStarted","Data":"424de521d41a012c2e3e7befe5286459adacb35ebaaf83942d39eaba56236e85"} Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.863153 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5pfz6\" (UniqueName: \"kubernetes.io/projected/e2a58f53-1c1f-48b1-82af-845bf9dd1d7e-kube-api-access-5pfz6\") pod \"redhat-marketplace-tgs6w\" (UID: \"e2a58f53-1c1f-48b1-82af-845bf9dd1d7e\") " pod="openshift-marketplace/redhat-marketplace-tgs6w" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.882090 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qfvr4"] Nov 27 06:55:13 crc kubenswrapper[4971]: W1127 06:55:13.885851 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod12325c6c_a817_46ae_b776_01213f064dbf.slice/crio-2bd668bed8eb62deed3238a0d76995fb7e9ce11337c4dcfe5d15fcf3bf99cc87 WatchSource:0}: Error finding container 2bd668bed8eb62deed3238a0d76995fb7e9ce11337c4dcfe5d15fcf3bf99cc87: Status 404 returned error can't find the container with id 2bd668bed8eb62deed3238a0d76995fb7e9ce11337c4dcfe5d15fcf3bf99cc87 Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.887549 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=1.88464471 podStartE2EDuration="1.88464471s" podCreationTimestamp="2025-11-27 06:55:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:13.870609386 +0000 UTC m=+152.062653334" watchObservedRunningTime="2025-11-27 06:55:13.88464471 +0000 UTC m=+152.076688638" Nov 27 06:55:13 crc kubenswrapper[4971]: I1127 06:55:13.999565 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tgs6w" Nov 27 06:55:14 crc kubenswrapper[4971]: I1127 06:55:14.245664 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tgs6w"] Nov 27 06:55:14 crc kubenswrapper[4971]: W1127 06:55:14.260371 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode2a58f53_1c1f_48b1_82af_845bf9dd1d7e.slice/crio-bf8e00ed0a12887cb28ff1a361a28e698dde50447d249f9f20b355f2106c2912 WatchSource:0}: Error finding container bf8e00ed0a12887cb28ff1a361a28e698dde50447d249f9f20b355f2106c2912: Status 404 returned error can't find the container with id bf8e00ed0a12887cb28ff1a361a28e698dde50447d249f9f20b355f2106c2912 Nov 27 06:55:14 crc kubenswrapper[4971]: I1127 06:55:14.270109 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-7pr9t"] Nov 27 06:55:14 crc kubenswrapper[4971]: I1127 06:55:14.271376 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7pr9t" Nov 27 06:55:14 crc kubenswrapper[4971]: I1127 06:55:14.273689 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Nov 27 06:55:14 crc kubenswrapper[4971]: I1127 06:55:14.284960 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7pr9t"] Nov 27 06:55:14 crc kubenswrapper[4971]: I1127 06:55:14.445404 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lbz5t\" (UniqueName: \"kubernetes.io/projected/6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c-kube-api-access-lbz5t\") pod \"redhat-operators-7pr9t\" (UID: \"6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c\") " pod="openshift-marketplace/redhat-operators-7pr9t" Nov 27 06:55:14 crc kubenswrapper[4971]: I1127 06:55:14.445470 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c-catalog-content\") pod \"redhat-operators-7pr9t\" (UID: \"6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c\") " pod="openshift-marketplace/redhat-operators-7pr9t" Nov 27 06:55:14 crc kubenswrapper[4971]: I1127 06:55:14.445504 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c-utilities\") pod \"redhat-operators-7pr9t\" (UID: \"6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c\") " pod="openshift-marketplace/redhat-operators-7pr9t" Nov 27 06:55:14 crc kubenswrapper[4971]: I1127 06:55:14.454027 4971 patch_prober.go:28] interesting pod/router-default-5444994796-s6mff container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 27 06:55:14 crc kubenswrapper[4971]: [-]has-synced failed: reason withheld Nov 27 06:55:14 crc kubenswrapper[4971]: [+]process-running ok Nov 27 06:55:14 crc kubenswrapper[4971]: healthz check failed Nov 27 06:55:14 crc kubenswrapper[4971]: I1127 06:55:14.454100 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-s6mff" podUID="5c9c5353-0cec-4776-9c51-553e90e76a2a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 27 06:55:14 crc kubenswrapper[4971]: I1127 06:55:14.550195 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c-utilities\") pod \"redhat-operators-7pr9t\" (UID: \"6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c\") " pod="openshift-marketplace/redhat-operators-7pr9t" Nov 27 06:55:14 crc kubenswrapper[4971]: I1127 06:55:14.550683 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lbz5t\" (UniqueName: \"kubernetes.io/projected/6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c-kube-api-access-lbz5t\") pod \"redhat-operators-7pr9t\" (UID: \"6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c\") " pod="openshift-marketplace/redhat-operators-7pr9t" Nov 27 06:55:14 crc kubenswrapper[4971]: I1127 06:55:14.550723 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c-catalog-content\") pod \"redhat-operators-7pr9t\" (UID: \"6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c\") " pod="openshift-marketplace/redhat-operators-7pr9t" Nov 27 06:55:14 crc kubenswrapper[4971]: I1127 06:55:14.551207 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c-catalog-content\") pod \"redhat-operators-7pr9t\" (UID: \"6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c\") " pod="openshift-marketplace/redhat-operators-7pr9t" Nov 27 06:55:14 crc kubenswrapper[4971]: I1127 06:55:14.551516 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c-utilities\") pod \"redhat-operators-7pr9t\" (UID: \"6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c\") " pod="openshift-marketplace/redhat-operators-7pr9t" Nov 27 06:55:14 crc kubenswrapper[4971]: I1127 06:55:14.594910 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lbz5t\" (UniqueName: \"kubernetes.io/projected/6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c-kube-api-access-lbz5t\") pod \"redhat-operators-7pr9t\" (UID: \"6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c\") " pod="openshift-marketplace/redhat-operators-7pr9t" Nov 27 06:55:14 crc kubenswrapper[4971]: I1127 06:55:14.613272 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7pr9t" Nov 27 06:55:14 crc kubenswrapper[4971]: I1127 06:55:14.671801 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-8jk6j"] Nov 27 06:55:14 crc kubenswrapper[4971]: I1127 06:55:14.676403 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8jk6j" Nov 27 06:55:14 crc kubenswrapper[4971]: I1127 06:55:14.697706 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8jk6j"] Nov 27 06:55:14 crc kubenswrapper[4971]: I1127 06:55:14.861730 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a9d2eabc-ab6d-4bda-b71e-5b32a68105de-utilities\") pod \"redhat-operators-8jk6j\" (UID: \"a9d2eabc-ab6d-4bda-b71e-5b32a68105de\") " pod="openshift-marketplace/redhat-operators-8jk6j" Nov 27 06:55:14 crc kubenswrapper[4971]: I1127 06:55:14.861809 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a9d2eabc-ab6d-4bda-b71e-5b32a68105de-catalog-content\") pod \"redhat-operators-8jk6j\" (UID: \"a9d2eabc-ab6d-4bda-b71e-5b32a68105de\") " pod="openshift-marketplace/redhat-operators-8jk6j" Nov 27 06:55:14 crc kubenswrapper[4971]: I1127 06:55:14.861835 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z2nbh\" (UniqueName: \"kubernetes.io/projected/a9d2eabc-ab6d-4bda-b71e-5b32a68105de-kube-api-access-z2nbh\") pod \"redhat-operators-8jk6j\" (UID: \"a9d2eabc-ab6d-4bda-b71e-5b32a68105de\") " pod="openshift-marketplace/redhat-operators-8jk6j" Nov 27 06:55:14 crc kubenswrapper[4971]: I1127 06:55:14.878485 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7pr9t"] Nov 27 06:55:14 crc kubenswrapper[4971]: I1127 06:55:14.886158 4971 generic.go:334] "Generic (PLEG): container finished" podID="12325c6c-a817-46ae-b776-01213f064dbf" containerID="2889e4ac031d4ab036308eb77bb034fc39693c92bbc0872f14e4b6ef7b9a2e8e" exitCode=0 Nov 27 06:55:14 crc kubenswrapper[4971]: I1127 06:55:14.886216 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qfvr4" event={"ID":"12325c6c-a817-46ae-b776-01213f064dbf","Type":"ContainerDied","Data":"2889e4ac031d4ab036308eb77bb034fc39693c92bbc0872f14e4b6ef7b9a2e8e"} Nov 27 06:55:14 crc kubenswrapper[4971]: I1127 06:55:14.886241 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qfvr4" event={"ID":"12325c6c-a817-46ae-b776-01213f064dbf","Type":"ContainerStarted","Data":"2bd668bed8eb62deed3238a0d76995fb7e9ce11337c4dcfe5d15fcf3bf99cc87"} Nov 27 06:55:14 crc kubenswrapper[4971]: I1127 06:55:14.894817 4971 generic.go:334] "Generic (PLEG): container finished" podID="e2a58f53-1c1f-48b1-82af-845bf9dd1d7e" containerID="881e29e578423ae2e0165bc117506c7e05bbb5979da589e919a29b683b74055b" exitCode=0 Nov 27 06:55:14 crc kubenswrapper[4971]: I1127 06:55:14.894892 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tgs6w" event={"ID":"e2a58f53-1c1f-48b1-82af-845bf9dd1d7e","Type":"ContainerDied","Data":"881e29e578423ae2e0165bc117506c7e05bbb5979da589e919a29b683b74055b"} Nov 27 06:55:14 crc kubenswrapper[4971]: I1127 06:55:14.894918 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tgs6w" event={"ID":"e2a58f53-1c1f-48b1-82af-845bf9dd1d7e","Type":"ContainerStarted","Data":"bf8e00ed0a12887cb28ff1a361a28e698dde50447d249f9f20b355f2106c2912"} Nov 27 06:55:14 crc kubenswrapper[4971]: W1127 06:55:14.897323 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6dd9ee5f_ff22_401e_86c2_eb7f809a3f8c.slice/crio-cf136719246fb2fbbd7867638b088e6c65e7ef06384a5ed4274749e24574e691 WatchSource:0}: Error finding container cf136719246fb2fbbd7867638b088e6c65e7ef06384a5ed4274749e24574e691: Status 404 returned error can't find the container with id cf136719246fb2fbbd7867638b088e6c65e7ef06384a5ed4274749e24574e691 Nov 27 06:55:14 crc kubenswrapper[4971]: I1127 06:55:14.902181 4971 generic.go:334] "Generic (PLEG): container finished" podID="aac85824-33f5-48d5-b639-bbcbb83d17e6" containerID="96ff6b7d7a4a32420869ad808e2fe9c79be61900d7c114c4a22aab9b1dda4332" exitCode=0 Nov 27 06:55:14 crc kubenswrapper[4971]: I1127 06:55:14.902217 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"aac85824-33f5-48d5-b639-bbcbb83d17e6","Type":"ContainerDied","Data":"96ff6b7d7a4a32420869ad808e2fe9c79be61900d7c114c4a22aab9b1dda4332"} Nov 27 06:55:14 crc kubenswrapper[4971]: I1127 06:55:14.963422 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a9d2eabc-ab6d-4bda-b71e-5b32a68105de-utilities\") pod \"redhat-operators-8jk6j\" (UID: \"a9d2eabc-ab6d-4bda-b71e-5b32a68105de\") " pod="openshift-marketplace/redhat-operators-8jk6j" Nov 27 06:55:14 crc kubenswrapper[4971]: I1127 06:55:14.963512 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a9d2eabc-ab6d-4bda-b71e-5b32a68105de-catalog-content\") pod \"redhat-operators-8jk6j\" (UID: \"a9d2eabc-ab6d-4bda-b71e-5b32a68105de\") " pod="openshift-marketplace/redhat-operators-8jk6j" Nov 27 06:55:14 crc kubenswrapper[4971]: I1127 06:55:14.963545 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z2nbh\" (UniqueName: \"kubernetes.io/projected/a9d2eabc-ab6d-4bda-b71e-5b32a68105de-kube-api-access-z2nbh\") pod \"redhat-operators-8jk6j\" (UID: \"a9d2eabc-ab6d-4bda-b71e-5b32a68105de\") " pod="openshift-marketplace/redhat-operators-8jk6j" Nov 27 06:55:14 crc kubenswrapper[4971]: I1127 06:55:14.964115 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a9d2eabc-ab6d-4bda-b71e-5b32a68105de-utilities\") pod \"redhat-operators-8jk6j\" (UID: \"a9d2eabc-ab6d-4bda-b71e-5b32a68105de\") " pod="openshift-marketplace/redhat-operators-8jk6j" Nov 27 06:55:14 crc kubenswrapper[4971]: I1127 06:55:14.964140 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a9d2eabc-ab6d-4bda-b71e-5b32a68105de-catalog-content\") pod \"redhat-operators-8jk6j\" (UID: \"a9d2eabc-ab6d-4bda-b71e-5b32a68105de\") " pod="openshift-marketplace/redhat-operators-8jk6j" Nov 27 06:55:14 crc kubenswrapper[4971]: I1127 06:55:14.980018 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z2nbh\" (UniqueName: \"kubernetes.io/projected/a9d2eabc-ab6d-4bda-b71e-5b32a68105de-kube-api-access-z2nbh\") pod \"redhat-operators-8jk6j\" (UID: \"a9d2eabc-ab6d-4bda-b71e-5b32a68105de\") " pod="openshift-marketplace/redhat-operators-8jk6j" Nov 27 06:55:15 crc kubenswrapper[4971]: I1127 06:55:15.028291 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8jk6j" Nov 27 06:55:15 crc kubenswrapper[4971]: I1127 06:55:15.404125 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Nov 27 06:55:15 crc kubenswrapper[4971]: I1127 06:55:15.405036 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 27 06:55:15 crc kubenswrapper[4971]: I1127 06:55:15.409051 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Nov 27 06:55:15 crc kubenswrapper[4971]: I1127 06:55:15.413691 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Nov 27 06:55:15 crc kubenswrapper[4971]: I1127 06:55:15.422914 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Nov 27 06:55:15 crc kubenswrapper[4971]: I1127 06:55:15.456463 4971 patch_prober.go:28] interesting pod/router-default-5444994796-s6mff container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 27 06:55:15 crc kubenswrapper[4971]: [-]has-synced failed: reason withheld Nov 27 06:55:15 crc kubenswrapper[4971]: [+]process-running ok Nov 27 06:55:15 crc kubenswrapper[4971]: healthz check failed Nov 27 06:55:15 crc kubenswrapper[4971]: I1127 06:55:15.457134 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-s6mff" podUID="5c9c5353-0cec-4776-9c51-553e90e76a2a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 27 06:55:15 crc kubenswrapper[4971]: I1127 06:55:15.477439 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c6921236-6566-41da-a8bb-9682ef6f1b54-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"c6921236-6566-41da-a8bb-9682ef6f1b54\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 27 06:55:15 crc kubenswrapper[4971]: I1127 06:55:15.477597 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c6921236-6566-41da-a8bb-9682ef6f1b54-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"c6921236-6566-41da-a8bb-9682ef6f1b54\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 27 06:55:15 crc kubenswrapper[4971]: I1127 06:55:15.498541 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8jk6j"] Nov 27 06:55:15 crc kubenswrapper[4971]: W1127 06:55:15.542260 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda9d2eabc_ab6d_4bda_b71e_5b32a68105de.slice/crio-5ab4e9ac34708e2d2a21e8099f01ff144be930515255ca1051817fb4f3f5e5b8 WatchSource:0}: Error finding container 5ab4e9ac34708e2d2a21e8099f01ff144be930515255ca1051817fb4f3f5e5b8: Status 404 returned error can't find the container with id 5ab4e9ac34708e2d2a21e8099f01ff144be930515255ca1051817fb4f3f5e5b8 Nov 27 06:55:15 crc kubenswrapper[4971]: I1127 06:55:15.578352 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c6921236-6566-41da-a8bb-9682ef6f1b54-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"c6921236-6566-41da-a8bb-9682ef6f1b54\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 27 06:55:15 crc kubenswrapper[4971]: I1127 06:55:15.578503 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c6921236-6566-41da-a8bb-9682ef6f1b54-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"c6921236-6566-41da-a8bb-9682ef6f1b54\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 27 06:55:15 crc kubenswrapper[4971]: I1127 06:55:15.578611 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c6921236-6566-41da-a8bb-9682ef6f1b54-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"c6921236-6566-41da-a8bb-9682ef6f1b54\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 27 06:55:15 crc kubenswrapper[4971]: I1127 06:55:15.626364 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c6921236-6566-41da-a8bb-9682ef6f1b54-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"c6921236-6566-41da-a8bb-9682ef6f1b54\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 27 06:55:15 crc kubenswrapper[4971]: I1127 06:55:15.726095 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 27 06:55:15 crc kubenswrapper[4971]: I1127 06:55:15.920481 4971 generic.go:334] "Generic (PLEG): container finished" podID="6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c" containerID="091514dc329b7c2121a5cd29126223055b6006787bd87b45acf23c5c6fa04c58" exitCode=0 Nov 27 06:55:15 crc kubenswrapper[4971]: I1127 06:55:15.921082 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7pr9t" event={"ID":"6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c","Type":"ContainerDied","Data":"091514dc329b7c2121a5cd29126223055b6006787bd87b45acf23c5c6fa04c58"} Nov 27 06:55:15 crc kubenswrapper[4971]: I1127 06:55:15.921193 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7pr9t" event={"ID":"6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c","Type":"ContainerStarted","Data":"cf136719246fb2fbbd7867638b088e6c65e7ef06384a5ed4274749e24574e691"} Nov 27 06:55:15 crc kubenswrapper[4971]: I1127 06:55:15.927652 4971 generic.go:334] "Generic (PLEG): container finished" podID="a9d2eabc-ab6d-4bda-b71e-5b32a68105de" containerID="b1dbac5ac9a30b40d521917f2f3f0e2bf105d89ff25dbdf63d73d81c8a89d9d8" exitCode=0 Nov 27 06:55:15 crc kubenswrapper[4971]: I1127 06:55:15.928322 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8jk6j" event={"ID":"a9d2eabc-ab6d-4bda-b71e-5b32a68105de","Type":"ContainerDied","Data":"b1dbac5ac9a30b40d521917f2f3f0e2bf105d89ff25dbdf63d73d81c8a89d9d8"} Nov 27 06:55:15 crc kubenswrapper[4971]: I1127 06:55:15.928352 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8jk6j" event={"ID":"a9d2eabc-ab6d-4bda-b71e-5b32a68105de","Type":"ContainerStarted","Data":"5ab4e9ac34708e2d2a21e8099f01ff144be930515255ca1051817fb4f3f5e5b8"} Nov 27 06:55:16 crc kubenswrapper[4971]: I1127 06:55:16.066874 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Nov 27 06:55:16 crc kubenswrapper[4971]: W1127 06:55:16.090022 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podc6921236_6566_41da_a8bb_9682ef6f1b54.slice/crio-ad5bebd1ad061bebee9d4ab23ff74937745192dc11d59d4801abd6591bbd7a1d WatchSource:0}: Error finding container ad5bebd1ad061bebee9d4ab23ff74937745192dc11d59d4801abd6591bbd7a1d: Status 404 returned error can't find the container with id ad5bebd1ad061bebee9d4ab23ff74937745192dc11d59d4801abd6591bbd7a1d Nov 27 06:55:16 crc kubenswrapper[4971]: I1127 06:55:16.305004 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 27 06:55:16 crc kubenswrapper[4971]: I1127 06:55:16.401603 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/aac85824-33f5-48d5-b639-bbcbb83d17e6-kubelet-dir\") pod \"aac85824-33f5-48d5-b639-bbcbb83d17e6\" (UID: \"aac85824-33f5-48d5-b639-bbcbb83d17e6\") " Nov 27 06:55:16 crc kubenswrapper[4971]: I1127 06:55:16.401736 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/aac85824-33f5-48d5-b639-bbcbb83d17e6-kube-api-access\") pod \"aac85824-33f5-48d5-b639-bbcbb83d17e6\" (UID: \"aac85824-33f5-48d5-b639-bbcbb83d17e6\") " Nov 27 06:55:16 crc kubenswrapper[4971]: I1127 06:55:16.401758 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/aac85824-33f5-48d5-b639-bbcbb83d17e6-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "aac85824-33f5-48d5-b639-bbcbb83d17e6" (UID: "aac85824-33f5-48d5-b639-bbcbb83d17e6"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 06:55:16 crc kubenswrapper[4971]: I1127 06:55:16.402206 4971 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/aac85824-33f5-48d5-b639-bbcbb83d17e6-kubelet-dir\") on node \"crc\" DevicePath \"\"" Nov 27 06:55:16 crc kubenswrapper[4971]: I1127 06:55:16.427858 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aac85824-33f5-48d5-b639-bbcbb83d17e6-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "aac85824-33f5-48d5-b639-bbcbb83d17e6" (UID: "aac85824-33f5-48d5-b639-bbcbb83d17e6"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:55:16 crc kubenswrapper[4971]: I1127 06:55:16.460616 4971 patch_prober.go:28] interesting pod/router-default-5444994796-s6mff container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 27 06:55:16 crc kubenswrapper[4971]: [-]has-synced failed: reason withheld Nov 27 06:55:16 crc kubenswrapper[4971]: [+]process-running ok Nov 27 06:55:16 crc kubenswrapper[4971]: healthz check failed Nov 27 06:55:16 crc kubenswrapper[4971]: I1127 06:55:16.461018 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-s6mff" podUID="5c9c5353-0cec-4776-9c51-553e90e76a2a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 27 06:55:16 crc kubenswrapper[4971]: I1127 06:55:16.503952 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/aac85824-33f5-48d5-b639-bbcbb83d17e6-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 27 06:55:16 crc kubenswrapper[4971]: I1127 06:55:16.730237 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 06:55:16 crc kubenswrapper[4971]: I1127 06:55:16.955584 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"c6921236-6566-41da-a8bb-9682ef6f1b54","Type":"ContainerStarted","Data":"ad5bebd1ad061bebee9d4ab23ff74937745192dc11d59d4801abd6591bbd7a1d"} Nov 27 06:55:16 crc kubenswrapper[4971]: I1127 06:55:16.958370 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"aac85824-33f5-48d5-b639-bbcbb83d17e6","Type":"ContainerDied","Data":"424de521d41a012c2e3e7befe5286459adacb35ebaaf83942d39eaba56236e85"} Nov 27 06:55:16 crc kubenswrapper[4971]: I1127 06:55:16.958414 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="424de521d41a012c2e3e7befe5286459adacb35ebaaf83942d39eaba56236e85" Nov 27 06:55:16 crc kubenswrapper[4971]: I1127 06:55:16.958502 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 27 06:55:17 crc kubenswrapper[4971]: I1127 06:55:17.454457 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-s6mff" Nov 27 06:55:17 crc kubenswrapper[4971]: I1127 06:55:17.456757 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-s6mff" Nov 27 06:55:17 crc kubenswrapper[4971]: I1127 06:55:17.977098 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"c6921236-6566-41da-a8bb-9682ef6f1b54","Type":"ContainerStarted","Data":"e101db9b1a02a2833428b9677c33d0ebeb798b74a6cbc4f98ca541620ced8532"} Nov 27 06:55:17 crc kubenswrapper[4971]: I1127 06:55:17.992290 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=2.992252466 podStartE2EDuration="2.992252466s" podCreationTimestamp="2025-11-27 06:55:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:17.989385637 +0000 UTC m=+156.181429565" watchObservedRunningTime="2025-11-27 06:55:17.992252466 +0000 UTC m=+156.184296384" Nov 27 06:55:18 crc kubenswrapper[4971]: I1127 06:55:18.744517 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-k2ldt" Nov 27 06:55:18 crc kubenswrapper[4971]: I1127 06:55:18.984694 4971 generic.go:334] "Generic (PLEG): container finished" podID="c6921236-6566-41da-a8bb-9682ef6f1b54" containerID="e101db9b1a02a2833428b9677c33d0ebeb798b74a6cbc4f98ca541620ced8532" exitCode=0 Nov 27 06:55:18 crc kubenswrapper[4971]: I1127 06:55:18.984762 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"c6921236-6566-41da-a8bb-9682ef6f1b54","Type":"ContainerDied","Data":"e101db9b1a02a2833428b9677c33d0ebeb798b74a6cbc4f98ca541620ced8532"} Nov 27 06:55:22 crc kubenswrapper[4971]: I1127 06:55:22.982290 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-blqcq" Nov 27 06:55:22 crc kubenswrapper[4971]: I1127 06:55:22.987286 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-blqcq" Nov 27 06:55:23 crc kubenswrapper[4971]: I1127 06:55:23.400715 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-75prl" Nov 27 06:55:24 crc kubenswrapper[4971]: I1127 06:55:24.861520 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e710c835-f600-448f-a110-4ff4cef9d5f6-metrics-certs\") pod \"network-metrics-daemon-pdn5j\" (UID: \"e710c835-f600-448f-a110-4ff4cef9d5f6\") " pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:55:24 crc kubenswrapper[4971]: I1127 06:55:24.868245 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e710c835-f600-448f-a110-4ff4cef9d5f6-metrics-certs\") pod \"network-metrics-daemon-pdn5j\" (UID: \"e710c835-f600-448f-a110-4ff4cef9d5f6\") " pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:55:25 crc kubenswrapper[4971]: I1127 06:55:25.070142 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pdn5j" Nov 27 06:55:26 crc kubenswrapper[4971]: I1127 06:55:26.413236 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 06:55:26 crc kubenswrapper[4971]: I1127 06:55:26.413324 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 06:55:31 crc kubenswrapper[4971]: I1127 06:55:31.867067 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:55:35 crc kubenswrapper[4971]: I1127 06:55:35.489106 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 27 06:55:35 crc kubenswrapper[4971]: I1127 06:55:35.530412 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c6921236-6566-41da-a8bb-9682ef6f1b54-kube-api-access\") pod \"c6921236-6566-41da-a8bb-9682ef6f1b54\" (UID: \"c6921236-6566-41da-a8bb-9682ef6f1b54\") " Nov 27 06:55:35 crc kubenswrapper[4971]: I1127 06:55:35.530493 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c6921236-6566-41da-a8bb-9682ef6f1b54-kubelet-dir\") pod \"c6921236-6566-41da-a8bb-9682ef6f1b54\" (UID: \"c6921236-6566-41da-a8bb-9682ef6f1b54\") " Nov 27 06:55:35 crc kubenswrapper[4971]: I1127 06:55:35.530853 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c6921236-6566-41da-a8bb-9682ef6f1b54-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "c6921236-6566-41da-a8bb-9682ef6f1b54" (UID: "c6921236-6566-41da-a8bb-9682ef6f1b54"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 06:55:35 crc kubenswrapper[4971]: I1127 06:55:35.535702 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6921236-6566-41da-a8bb-9682ef6f1b54-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "c6921236-6566-41da-a8bb-9682ef6f1b54" (UID: "c6921236-6566-41da-a8bb-9682ef6f1b54"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:55:35 crc kubenswrapper[4971]: I1127 06:55:35.631550 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c6921236-6566-41da-a8bb-9682ef6f1b54-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 27 06:55:35 crc kubenswrapper[4971]: I1127 06:55:35.631589 4971 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c6921236-6566-41da-a8bb-9682ef6f1b54-kubelet-dir\") on node \"crc\" DevicePath \"\"" Nov 27 06:55:36 crc kubenswrapper[4971]: I1127 06:55:36.097565 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"c6921236-6566-41da-a8bb-9682ef6f1b54","Type":"ContainerDied","Data":"ad5bebd1ad061bebee9d4ab23ff74937745192dc11d59d4801abd6591bbd7a1d"} Nov 27 06:55:36 crc kubenswrapper[4971]: I1127 06:55:36.097606 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ad5bebd1ad061bebee9d4ab23ff74937745192dc11d59d4801abd6591bbd7a1d" Nov 27 06:55:36 crc kubenswrapper[4971]: I1127 06:55:36.097642 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 27 06:55:40 crc kubenswrapper[4971]: E1127 06:55:40.180647 4971 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: reading blob sha256:b45b4080e75db66dbb2f4d8403f29133c1829a6e7a5055752f4267aea3a23894: Get \"https://registry.redhat.io/v2/redhat/redhat-operator-index/blobs/sha256:b45b4080e75db66dbb2f4d8403f29133c1829a6e7a5055752f4267aea3a23894\": context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Nov 27 06:55:40 crc kubenswrapper[4971]: E1127 06:55:40.181348 4971 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-lbz5t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-7pr9t_openshift-marketplace(6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: reading blob sha256:b45b4080e75db66dbb2f4d8403f29133c1829a6e7a5055752f4267aea3a23894: Get \"https://registry.redhat.io/v2/redhat/redhat-operator-index/blobs/sha256:b45b4080e75db66dbb2f4d8403f29133c1829a6e7a5055752f4267aea3a23894\": context canceled" logger="UnhandledError" Nov 27 06:55:40 crc kubenswrapper[4971]: E1127 06:55:40.182563 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: reading blob sha256:b45b4080e75db66dbb2f4d8403f29133c1829a6e7a5055752f4267aea3a23894: Get \\\"https://registry.redhat.io/v2/redhat/redhat-operator-index/blobs/sha256:b45b4080e75db66dbb2f4d8403f29133c1829a6e7a5055752f4267aea3a23894\\\": context canceled\"" pod="openshift-marketplace/redhat-operators-7pr9t" podUID="6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c" Nov 27 06:55:41 crc kubenswrapper[4971]: E1127 06:55:41.821509 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-7pr9t" podUID="6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c" Nov 27 06:55:43 crc kubenswrapper[4971]: I1127 06:55:43.506461 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-p5wn9" Nov 27 06:55:44 crc kubenswrapper[4971]: E1127 06:55:44.919284 4971 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Nov 27 06:55:44 crc kubenswrapper[4971]: E1127 06:55:44.919918 4971 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7vmhq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-dvz2k_openshift-marketplace(75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 27 06:55:44 crc kubenswrapper[4971]: E1127 06:55:44.921324 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-dvz2k" podUID="75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c" Nov 27 06:55:45 crc kubenswrapper[4971]: I1127 06:55:45.048913 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-pdn5j"] Nov 27 06:55:45 crc kubenswrapper[4971]: E1127 06:55:45.900124 4971 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Nov 27 06:55:45 crc kubenswrapper[4971]: E1127 06:55:45.900660 4971 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qpcx6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-qfvr4_openshift-marketplace(12325c6c-a817-46ae-b776-01213f064dbf): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 27 06:55:45 crc kubenswrapper[4971]: E1127 06:55:45.901916 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-qfvr4" podUID="12325c6c-a817-46ae-b776-01213f064dbf" Nov 27 06:55:45 crc kubenswrapper[4971]: E1127 06:55:45.938802 4971 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Nov 27 06:55:45 crc kubenswrapper[4971]: E1127 06:55:45.938962 4971 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5pfz6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-tgs6w_openshift-marketplace(e2a58f53-1c1f-48b1-82af-845bf9dd1d7e): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 27 06:55:45 crc kubenswrapper[4971]: E1127 06:55:45.940167 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-tgs6w" podUID="e2a58f53-1c1f-48b1-82af-845bf9dd1d7e" Nov 27 06:55:50 crc kubenswrapper[4971]: I1127 06:55:50.581396 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 27 06:55:52 crc kubenswrapper[4971]: E1127 06:55:52.466279 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-qfvr4" podUID="12325c6c-a817-46ae-b776-01213f064dbf" Nov 27 06:55:52 crc kubenswrapper[4971]: E1127 06:55:52.466712 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-tgs6w" podUID="e2a58f53-1c1f-48b1-82af-845bf9dd1d7e" Nov 27 06:55:53 crc kubenswrapper[4971]: I1127 06:55:53.183814 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-pdn5j" event={"ID":"e710c835-f600-448f-a110-4ff4cef9d5f6","Type":"ContainerStarted","Data":"f576ca21411c4c366f105e41fba6017e5206bd497f8b213144f906386964225a"} Nov 27 06:55:53 crc kubenswrapper[4971]: I1127 06:55:53.205560 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Nov 27 06:55:53 crc kubenswrapper[4971]: E1127 06:55:53.205839 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aac85824-33f5-48d5-b639-bbcbb83d17e6" containerName="pruner" Nov 27 06:55:53 crc kubenswrapper[4971]: I1127 06:55:53.205853 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="aac85824-33f5-48d5-b639-bbcbb83d17e6" containerName="pruner" Nov 27 06:55:53 crc kubenswrapper[4971]: E1127 06:55:53.205872 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6921236-6566-41da-a8bb-9682ef6f1b54" containerName="pruner" Nov 27 06:55:53 crc kubenswrapper[4971]: I1127 06:55:53.205880 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6921236-6566-41da-a8bb-9682ef6f1b54" containerName="pruner" Nov 27 06:55:53 crc kubenswrapper[4971]: I1127 06:55:53.206118 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6921236-6566-41da-a8bb-9682ef6f1b54" containerName="pruner" Nov 27 06:55:53 crc kubenswrapper[4971]: I1127 06:55:53.206136 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="aac85824-33f5-48d5-b639-bbcbb83d17e6" containerName="pruner" Nov 27 06:55:53 crc kubenswrapper[4971]: I1127 06:55:53.207082 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 27 06:55:53 crc kubenswrapper[4971]: I1127 06:55:53.209025 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Nov 27 06:55:53 crc kubenswrapper[4971]: I1127 06:55:53.209281 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Nov 27 06:55:53 crc kubenswrapper[4971]: I1127 06:55:53.214213 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Nov 27 06:55:53 crc kubenswrapper[4971]: I1127 06:55:53.373199 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c6734555-2043-45d8-80da-b2387075068a-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"c6734555-2043-45d8-80da-b2387075068a\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 27 06:55:53 crc kubenswrapper[4971]: I1127 06:55:53.373315 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c6734555-2043-45d8-80da-b2387075068a-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"c6734555-2043-45d8-80da-b2387075068a\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 27 06:55:53 crc kubenswrapper[4971]: I1127 06:55:53.474797 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c6734555-2043-45d8-80da-b2387075068a-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"c6734555-2043-45d8-80da-b2387075068a\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 27 06:55:53 crc kubenswrapper[4971]: I1127 06:55:53.474914 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c6734555-2043-45d8-80da-b2387075068a-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"c6734555-2043-45d8-80da-b2387075068a\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 27 06:55:53 crc kubenswrapper[4971]: I1127 06:55:53.475017 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c6734555-2043-45d8-80da-b2387075068a-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"c6734555-2043-45d8-80da-b2387075068a\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 27 06:55:53 crc kubenswrapper[4971]: I1127 06:55:53.494402 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c6734555-2043-45d8-80da-b2387075068a-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"c6734555-2043-45d8-80da-b2387075068a\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 27 06:55:53 crc kubenswrapper[4971]: E1127 06:55:53.504395 4971 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Nov 27 06:55:53 crc kubenswrapper[4971]: E1127 06:55:53.504584 4971 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-k2cll,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-bhc8q_openshift-marketplace(dda2cad1-cdbe-48f8-ba4c-3074565de24b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 27 06:55:53 crc kubenswrapper[4971]: E1127 06:55:53.505771 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-bhc8q" podUID="dda2cad1-cdbe-48f8-ba4c-3074565de24b" Nov 27 06:55:53 crc kubenswrapper[4971]: I1127 06:55:53.528652 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 27 06:55:53 crc kubenswrapper[4971]: E1127 06:55:53.748046 4971 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Nov 27 06:55:53 crc kubenswrapper[4971]: E1127 06:55:53.748257 4971 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-thx9h,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-rnpmr_openshift-marketplace(2bec9dd3-b528-4b67-b949-8e9eb074e222): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 27 06:55:53 crc kubenswrapper[4971]: E1127 06:55:53.749540 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-rnpmr" podUID="2bec9dd3-b528-4b67-b949-8e9eb074e222" Nov 27 06:55:53 crc kubenswrapper[4971]: E1127 06:55:53.838088 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-dvz2k" podUID="75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c" Nov 27 06:55:54 crc kubenswrapper[4971]: I1127 06:55:54.052373 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Nov 27 06:55:54 crc kubenswrapper[4971]: W1127 06:55:54.056721 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podc6734555_2043_45d8_80da_b2387075068a.slice/crio-338fcb9e6633d6eebe8282bf1f76d007c7429cd9201afcba4dbe7beef26e32b4 WatchSource:0}: Error finding container 338fcb9e6633d6eebe8282bf1f76d007c7429cd9201afcba4dbe7beef26e32b4: Status 404 returned error can't find the container with id 338fcb9e6633d6eebe8282bf1f76d007c7429cd9201afcba4dbe7beef26e32b4 Nov 27 06:55:54 crc kubenswrapper[4971]: I1127 06:55:54.192572 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"c6734555-2043-45d8-80da-b2387075068a","Type":"ContainerStarted","Data":"338fcb9e6633d6eebe8282bf1f76d007c7429cd9201afcba4dbe7beef26e32b4"} Nov 27 06:55:54 crc kubenswrapper[4971]: I1127 06:55:54.194914 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fhgrm" event={"ID":"8978d4d3-664f-40f4-b33c-83cd92577bc3","Type":"ContainerStarted","Data":"b462bf549ccf023ad9cda607d873082c532d9d373f7361bae301c46ce448bedb"} Nov 27 06:55:54 crc kubenswrapper[4971]: I1127 06:55:54.197366 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8jk6j" event={"ID":"a9d2eabc-ab6d-4bda-b71e-5b32a68105de","Type":"ContainerStarted","Data":"0920598e949068af8232779951132ee6368eca2a5413a9f2a33f0510a9a3b7c6"} Nov 27 06:55:54 crc kubenswrapper[4971]: E1127 06:55:54.199305 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-rnpmr" podUID="2bec9dd3-b528-4b67-b949-8e9eb074e222" Nov 27 06:55:54 crc kubenswrapper[4971]: E1127 06:55:54.199403 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-bhc8q" podUID="dda2cad1-cdbe-48f8-ba4c-3074565de24b" Nov 27 06:55:55 crc kubenswrapper[4971]: I1127 06:55:55.208503 4971 generic.go:334] "Generic (PLEG): container finished" podID="a9d2eabc-ab6d-4bda-b71e-5b32a68105de" containerID="0920598e949068af8232779951132ee6368eca2a5413a9f2a33f0510a9a3b7c6" exitCode=0 Nov 27 06:55:55 crc kubenswrapper[4971]: I1127 06:55:55.208755 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8jk6j" event={"ID":"a9d2eabc-ab6d-4bda-b71e-5b32a68105de","Type":"ContainerDied","Data":"0920598e949068af8232779951132ee6368eca2a5413a9f2a33f0510a9a3b7c6"} Nov 27 06:55:55 crc kubenswrapper[4971]: I1127 06:55:55.214440 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-pdn5j" event={"ID":"e710c835-f600-448f-a110-4ff4cef9d5f6","Type":"ContainerStarted","Data":"64b203260af136ca17bb0c4784b1b981411b87f9996779c9175df2608176cc4e"} Nov 27 06:55:55 crc kubenswrapper[4971]: I1127 06:55:55.214477 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-pdn5j" event={"ID":"e710c835-f600-448f-a110-4ff4cef9d5f6","Type":"ContainerStarted","Data":"f11fa711acf9007cc8b986eb82248c75ffef19e235ea8a0c522d249ba3168923"} Nov 27 06:55:55 crc kubenswrapper[4971]: I1127 06:55:55.216074 4971 generic.go:334] "Generic (PLEG): container finished" podID="c6734555-2043-45d8-80da-b2387075068a" containerID="f5885a064fbd2f1794f50d7b882178e6f4cfb45b0bec2e446b069ba7de798bf4" exitCode=0 Nov 27 06:55:55 crc kubenswrapper[4971]: I1127 06:55:55.216138 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"c6734555-2043-45d8-80da-b2387075068a","Type":"ContainerDied","Data":"f5885a064fbd2f1794f50d7b882178e6f4cfb45b0bec2e446b069ba7de798bf4"} Nov 27 06:55:55 crc kubenswrapper[4971]: I1127 06:55:55.221731 4971 generic.go:334] "Generic (PLEG): container finished" podID="8978d4d3-664f-40f4-b33c-83cd92577bc3" containerID="b462bf549ccf023ad9cda607d873082c532d9d373f7361bae301c46ce448bedb" exitCode=0 Nov 27 06:55:55 crc kubenswrapper[4971]: I1127 06:55:55.221787 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fhgrm" event={"ID":"8978d4d3-664f-40f4-b33c-83cd92577bc3","Type":"ContainerDied","Data":"b462bf549ccf023ad9cda607d873082c532d9d373f7361bae301c46ce448bedb"} Nov 27 06:55:55 crc kubenswrapper[4971]: I1127 06:55:55.278551 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-pdn5j" podStartSLOduration=173.278508393 podStartE2EDuration="2m53.278508393s" podCreationTimestamp="2025-11-27 06:53:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:55:55.270139954 +0000 UTC m=+193.462183892" watchObservedRunningTime="2025-11-27 06:55:55.278508393 +0000 UTC m=+193.470552311" Nov 27 06:55:56 crc kubenswrapper[4971]: I1127 06:55:56.227999 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7pr9t" event={"ID":"6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c","Type":"ContainerStarted","Data":"771d9ad12566e5a483ac39cf336bbf56d8b49d98d00750fd6eee3f29f7ff9670"} Nov 27 06:55:56 crc kubenswrapper[4971]: I1127 06:55:56.412840 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 06:55:56 crc kubenswrapper[4971]: I1127 06:55:56.412917 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 06:55:56 crc kubenswrapper[4971]: I1127 06:55:56.416556 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 27 06:55:56 crc kubenswrapper[4971]: I1127 06:55:56.513952 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c6734555-2043-45d8-80da-b2387075068a-kubelet-dir\") pod \"c6734555-2043-45d8-80da-b2387075068a\" (UID: \"c6734555-2043-45d8-80da-b2387075068a\") " Nov 27 06:55:56 crc kubenswrapper[4971]: I1127 06:55:56.514024 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c6734555-2043-45d8-80da-b2387075068a-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "c6734555-2043-45d8-80da-b2387075068a" (UID: "c6734555-2043-45d8-80da-b2387075068a"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 06:55:56 crc kubenswrapper[4971]: I1127 06:55:56.514310 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c6734555-2043-45d8-80da-b2387075068a-kube-api-access\") pod \"c6734555-2043-45d8-80da-b2387075068a\" (UID: \"c6734555-2043-45d8-80da-b2387075068a\") " Nov 27 06:55:56 crc kubenswrapper[4971]: I1127 06:55:56.514796 4971 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c6734555-2043-45d8-80da-b2387075068a-kubelet-dir\") on node \"crc\" DevicePath \"\"" Nov 27 06:55:56 crc kubenswrapper[4971]: I1127 06:55:56.522713 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6734555-2043-45d8-80da-b2387075068a-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "c6734555-2043-45d8-80da-b2387075068a" (UID: "c6734555-2043-45d8-80da-b2387075068a"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:55:56 crc kubenswrapper[4971]: I1127 06:55:56.615938 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c6734555-2043-45d8-80da-b2387075068a-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 27 06:55:57 crc kubenswrapper[4971]: I1127 06:55:57.234911 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 27 06:55:57 crc kubenswrapper[4971]: I1127 06:55:57.234944 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"c6734555-2043-45d8-80da-b2387075068a","Type":"ContainerDied","Data":"338fcb9e6633d6eebe8282bf1f76d007c7429cd9201afcba4dbe7beef26e32b4"} Nov 27 06:55:57 crc kubenswrapper[4971]: I1127 06:55:57.234995 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="338fcb9e6633d6eebe8282bf1f76d007c7429cd9201afcba4dbe7beef26e32b4" Nov 27 06:55:57 crc kubenswrapper[4971]: I1127 06:55:57.237049 4971 generic.go:334] "Generic (PLEG): container finished" podID="6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c" containerID="771d9ad12566e5a483ac39cf336bbf56d8b49d98d00750fd6eee3f29f7ff9670" exitCode=0 Nov 27 06:55:57 crc kubenswrapper[4971]: I1127 06:55:57.237085 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7pr9t" event={"ID":"6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c","Type":"ContainerDied","Data":"771d9ad12566e5a483ac39cf336bbf56d8b49d98d00750fd6eee3f29f7ff9670"} Nov 27 06:56:00 crc kubenswrapper[4971]: I1127 06:56:00.272188 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fhgrm" event={"ID":"8978d4d3-664f-40f4-b33c-83cd92577bc3","Type":"ContainerStarted","Data":"b7a1cde51bbf318cdf838480249f3afd0bb260ed774d61fdfe43c67c9b42a1c2"} Nov 27 06:56:00 crc kubenswrapper[4971]: I1127 06:56:00.290339 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-fhgrm" podStartSLOduration=3.09210673 podStartE2EDuration="49.290307674s" podCreationTimestamp="2025-11-27 06:55:11 +0000 UTC" firstStartedPulling="2025-11-27 06:55:12.814876246 +0000 UTC m=+151.006920164" lastFinishedPulling="2025-11-27 06:55:59.01307715 +0000 UTC m=+197.205121108" observedRunningTime="2025-11-27 06:56:00.287851235 +0000 UTC m=+198.479895163" watchObservedRunningTime="2025-11-27 06:56:00.290307674 +0000 UTC m=+198.482351592" Nov 27 06:56:00 crc kubenswrapper[4971]: I1127 06:56:00.984276 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Nov 27 06:56:00 crc kubenswrapper[4971]: E1127 06:56:00.984648 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6734555-2043-45d8-80da-b2387075068a" containerName="pruner" Nov 27 06:56:00 crc kubenswrapper[4971]: I1127 06:56:00.984672 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6734555-2043-45d8-80da-b2387075068a" containerName="pruner" Nov 27 06:56:00 crc kubenswrapper[4971]: I1127 06:56:00.984824 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6734555-2043-45d8-80da-b2387075068a" containerName="pruner" Nov 27 06:56:00 crc kubenswrapper[4971]: I1127 06:56:00.985336 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Nov 27 06:56:00 crc kubenswrapper[4971]: I1127 06:56:00.988571 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Nov 27 06:56:00 crc kubenswrapper[4971]: I1127 06:56:00.988574 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Nov 27 06:56:01 crc kubenswrapper[4971]: I1127 06:56:01.001750 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Nov 27 06:56:01 crc kubenswrapper[4971]: I1127 06:56:01.075455 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/91ba58ad-1ec1-4990-a641-0e8a70e8c8c5-kube-api-access\") pod \"installer-9-crc\" (UID: \"91ba58ad-1ec1-4990-a641-0e8a70e8c8c5\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 27 06:56:01 crc kubenswrapper[4971]: I1127 06:56:01.075521 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/91ba58ad-1ec1-4990-a641-0e8a70e8c8c5-kubelet-dir\") pod \"installer-9-crc\" (UID: \"91ba58ad-1ec1-4990-a641-0e8a70e8c8c5\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 27 06:56:01 crc kubenswrapper[4971]: I1127 06:56:01.075580 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/91ba58ad-1ec1-4990-a641-0e8a70e8c8c5-var-lock\") pod \"installer-9-crc\" (UID: \"91ba58ad-1ec1-4990-a641-0e8a70e8c8c5\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 27 06:56:01 crc kubenswrapper[4971]: I1127 06:56:01.176898 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/91ba58ad-1ec1-4990-a641-0e8a70e8c8c5-kube-api-access\") pod \"installer-9-crc\" (UID: \"91ba58ad-1ec1-4990-a641-0e8a70e8c8c5\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 27 06:56:01 crc kubenswrapper[4971]: I1127 06:56:01.176979 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/91ba58ad-1ec1-4990-a641-0e8a70e8c8c5-kubelet-dir\") pod \"installer-9-crc\" (UID: \"91ba58ad-1ec1-4990-a641-0e8a70e8c8c5\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 27 06:56:01 crc kubenswrapper[4971]: I1127 06:56:01.177014 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/91ba58ad-1ec1-4990-a641-0e8a70e8c8c5-var-lock\") pod \"installer-9-crc\" (UID: \"91ba58ad-1ec1-4990-a641-0e8a70e8c8c5\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 27 06:56:01 crc kubenswrapper[4971]: I1127 06:56:01.177102 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/91ba58ad-1ec1-4990-a641-0e8a70e8c8c5-var-lock\") pod \"installer-9-crc\" (UID: \"91ba58ad-1ec1-4990-a641-0e8a70e8c8c5\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 27 06:56:01 crc kubenswrapper[4971]: I1127 06:56:01.177144 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/91ba58ad-1ec1-4990-a641-0e8a70e8c8c5-kubelet-dir\") pod \"installer-9-crc\" (UID: \"91ba58ad-1ec1-4990-a641-0e8a70e8c8c5\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 27 06:56:01 crc kubenswrapper[4971]: I1127 06:56:01.203508 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/91ba58ad-1ec1-4990-a641-0e8a70e8c8c5-kube-api-access\") pod \"installer-9-crc\" (UID: \"91ba58ad-1ec1-4990-a641-0e8a70e8c8c5\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 27 06:56:01 crc kubenswrapper[4971]: I1127 06:56:01.312135 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Nov 27 06:56:01 crc kubenswrapper[4971]: I1127 06:56:01.582489 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-fhgrm" Nov 27 06:56:01 crc kubenswrapper[4971]: I1127 06:56:01.582926 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-fhgrm" Nov 27 06:56:02 crc kubenswrapper[4971]: I1127 06:56:02.519900 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Nov 27 06:56:02 crc kubenswrapper[4971]: W1127 06:56:02.522501 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod91ba58ad_1ec1_4990_a641_0e8a70e8c8c5.slice/crio-8fbdb45b8db07e3715be5614d409c82d275da7d64ff19e11040475194ce619fe WatchSource:0}: Error finding container 8fbdb45b8db07e3715be5614d409c82d275da7d64ff19e11040475194ce619fe: Status 404 returned error can't find the container with id 8fbdb45b8db07e3715be5614d409c82d275da7d64ff19e11040475194ce619fe Nov 27 06:56:03 crc kubenswrapper[4971]: I1127 06:56:03.289581 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"91ba58ad-1ec1-4990-a641-0e8a70e8c8c5","Type":"ContainerStarted","Data":"8fbdb45b8db07e3715be5614d409c82d275da7d64ff19e11040475194ce619fe"} Nov 27 06:56:03 crc kubenswrapper[4971]: I1127 06:56:03.292270 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8jk6j" event={"ID":"a9d2eabc-ab6d-4bda-b71e-5b32a68105de","Type":"ContainerStarted","Data":"b3761daf3c88ca35794494e8bea53c4112dcaa9f84bf8e7bf9b556269f860f72"} Nov 27 06:56:03 crc kubenswrapper[4971]: I1127 06:56:03.314563 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-8jk6j" podStartSLOduration=3.169596131 podStartE2EDuration="49.31451931s" podCreationTimestamp="2025-11-27 06:55:14 +0000 UTC" firstStartedPulling="2025-11-27 06:55:15.986265873 +0000 UTC m=+154.178309781" lastFinishedPulling="2025-11-27 06:56:02.131189052 +0000 UTC m=+200.323232960" observedRunningTime="2025-11-27 06:56:03.310986677 +0000 UTC m=+201.503030605" watchObservedRunningTime="2025-11-27 06:56:03.31451931 +0000 UTC m=+201.506563228" Nov 27 06:56:03 crc kubenswrapper[4971]: I1127 06:56:03.388624 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-fhgrm" podUID="8978d4d3-664f-40f4-b33c-83cd92577bc3" containerName="registry-server" probeResult="failure" output=< Nov 27 06:56:03 crc kubenswrapper[4971]: timeout: failed to connect service ":50051" within 1s Nov 27 06:56:03 crc kubenswrapper[4971]: > Nov 27 06:56:05 crc kubenswrapper[4971]: I1127 06:56:05.028561 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-8jk6j" Nov 27 06:56:05 crc kubenswrapper[4971]: I1127 06:56:05.028804 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-8jk6j" Nov 27 06:56:06 crc kubenswrapper[4971]: I1127 06:56:06.071457 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-8jk6j" podUID="a9d2eabc-ab6d-4bda-b71e-5b32a68105de" containerName="registry-server" probeResult="failure" output=< Nov 27 06:56:06 crc kubenswrapper[4971]: timeout: failed to connect service ":50051" within 1s Nov 27 06:56:06 crc kubenswrapper[4971]: > Nov 27 06:56:06 crc kubenswrapper[4971]: I1127 06:56:06.327771 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7pr9t" event={"ID":"6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c","Type":"ContainerStarted","Data":"79fa646f743f7f4059a17ac081237d0b2fff084f67c728c80a9864f67667ed79"} Nov 27 06:56:06 crc kubenswrapper[4971]: I1127 06:56:06.329262 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"91ba58ad-1ec1-4990-a641-0e8a70e8c8c5","Type":"ContainerStarted","Data":"59a1b9085bf8e6a260c4229fe13d634c01f84f622007b0e5fde8634639e62e97"} Nov 27 06:56:07 crc kubenswrapper[4971]: I1127 06:56:07.351770 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-7pr9t" podStartSLOduration=3.702919333 podStartE2EDuration="53.351750109s" podCreationTimestamp="2025-11-27 06:55:14 +0000 UTC" firstStartedPulling="2025-11-27 06:55:15.928411812 +0000 UTC m=+154.120455730" lastFinishedPulling="2025-11-27 06:56:05.577242578 +0000 UTC m=+203.769286506" observedRunningTime="2025-11-27 06:56:07.350801968 +0000 UTC m=+205.542845886" watchObservedRunningTime="2025-11-27 06:56:07.351750109 +0000 UTC m=+205.543794027" Nov 27 06:56:07 crc kubenswrapper[4971]: I1127 06:56:07.377277 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7hzzs"] Nov 27 06:56:07 crc kubenswrapper[4971]: I1127 06:56:07.377484 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-7hzzs" podUID="78734b35-61db-46e9-b16b-1f03258f9fcb" containerName="controller-manager" containerID="cri-o://68db77eb991c63835e0067a546d988680d5df96131649cc1e6d414028039b607" gracePeriod=30 Nov 27 06:56:07 crc kubenswrapper[4971]: I1127 06:56:07.379788 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=7.379762133 podStartE2EDuration="7.379762133s" podCreationTimestamp="2025-11-27 06:56:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:56:07.374566827 +0000 UTC m=+205.566610765" watchObservedRunningTime="2025-11-27 06:56:07.379762133 +0000 UTC m=+205.571806051" Nov 27 06:56:07 crc kubenswrapper[4971]: I1127 06:56:07.470808 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-29jwb"] Nov 27 06:56:07 crc kubenswrapper[4971]: I1127 06:56:07.471145 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-29jwb" podUID="0316d962-5de2-4709-95e7-1679943f35fd" containerName="route-controller-manager" containerID="cri-o://8322e3720ea1cd1f04684587515e07e81db27143190cc4163685bd8e1aa24644" gracePeriod=30 Nov 27 06:56:08 crc kubenswrapper[4971]: I1127 06:56:08.339346 4971 generic.go:334] "Generic (PLEG): container finished" podID="0316d962-5de2-4709-95e7-1679943f35fd" containerID="8322e3720ea1cd1f04684587515e07e81db27143190cc4163685bd8e1aa24644" exitCode=0 Nov 27 06:56:08 crc kubenswrapper[4971]: I1127 06:56:08.339394 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-29jwb" event={"ID":"0316d962-5de2-4709-95e7-1679943f35fd","Type":"ContainerDied","Data":"8322e3720ea1cd1f04684587515e07e81db27143190cc4163685bd8e1aa24644"} Nov 27 06:56:08 crc kubenswrapper[4971]: I1127 06:56:08.340873 4971 generic.go:334] "Generic (PLEG): container finished" podID="78734b35-61db-46e9-b16b-1f03258f9fcb" containerID="68db77eb991c63835e0067a546d988680d5df96131649cc1e6d414028039b607" exitCode=0 Nov 27 06:56:08 crc kubenswrapper[4971]: I1127 06:56:08.340910 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7hzzs" event={"ID":"78734b35-61db-46e9-b16b-1f03258f9fcb","Type":"ContainerDied","Data":"68db77eb991c63835e0067a546d988680d5df96131649cc1e6d414028039b607"} Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.248220 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7hzzs" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.254727 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-29jwb" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.276881 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7cc94f8bd7-2qxbk"] Nov 27 06:56:09 crc kubenswrapper[4971]: E1127 06:56:09.277224 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78734b35-61db-46e9-b16b-1f03258f9fcb" containerName="controller-manager" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.277317 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="78734b35-61db-46e9-b16b-1f03258f9fcb" containerName="controller-manager" Nov 27 06:56:09 crc kubenswrapper[4971]: E1127 06:56:09.277386 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0316d962-5de2-4709-95e7-1679943f35fd" containerName="route-controller-manager" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.277396 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="0316d962-5de2-4709-95e7-1679943f35fd" containerName="route-controller-manager" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.281793 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="0316d962-5de2-4709-95e7-1679943f35fd" containerName="route-controller-manager" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.281846 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="78734b35-61db-46e9-b16b-1f03258f9fcb" containerName="controller-manager" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.282364 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7cc94f8bd7-2qxbk" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.294382 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7cc94f8bd7-2qxbk"] Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.347717 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-29jwb" event={"ID":"0316d962-5de2-4709-95e7-1679943f35fd","Type":"ContainerDied","Data":"f57ec2aeb5e22f8ef8cdb8aeeb0e3bfb1781fe2a83e9d97f175cd7224037bb5a"} Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.347742 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-29jwb" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.347777 4971 scope.go:117] "RemoveContainer" containerID="8322e3720ea1cd1f04684587515e07e81db27143190cc4163685bd8e1aa24644" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.349679 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7hzzs" event={"ID":"78734b35-61db-46e9-b16b-1f03258f9fcb","Type":"ContainerDied","Data":"ac2201f0ae22a8468871139832e43fe9e5416597c69482ddb2578663657e4995"} Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.349775 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7hzzs" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.385229 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78734b35-61db-46e9-b16b-1f03258f9fcb-config\") pod \"78734b35-61db-46e9-b16b-1f03258f9fcb\" (UID: \"78734b35-61db-46e9-b16b-1f03258f9fcb\") " Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.385294 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0316d962-5de2-4709-95e7-1679943f35fd-config\") pod \"0316d962-5de2-4709-95e7-1679943f35fd\" (UID: \"0316d962-5de2-4709-95e7-1679943f35fd\") " Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.385346 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/78734b35-61db-46e9-b16b-1f03258f9fcb-client-ca\") pod \"78734b35-61db-46e9-b16b-1f03258f9fcb\" (UID: \"78734b35-61db-46e9-b16b-1f03258f9fcb\") " Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.385373 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xw4rt\" (UniqueName: \"kubernetes.io/projected/0316d962-5de2-4709-95e7-1679943f35fd-kube-api-access-xw4rt\") pod \"0316d962-5de2-4709-95e7-1679943f35fd\" (UID: \"0316d962-5de2-4709-95e7-1679943f35fd\") " Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.385406 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2djsr\" (UniqueName: \"kubernetes.io/projected/78734b35-61db-46e9-b16b-1f03258f9fcb-kube-api-access-2djsr\") pod \"78734b35-61db-46e9-b16b-1f03258f9fcb\" (UID: \"78734b35-61db-46e9-b16b-1f03258f9fcb\") " Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.385428 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/78734b35-61db-46e9-b16b-1f03258f9fcb-serving-cert\") pod \"78734b35-61db-46e9-b16b-1f03258f9fcb\" (UID: \"78734b35-61db-46e9-b16b-1f03258f9fcb\") " Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.385475 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0316d962-5de2-4709-95e7-1679943f35fd-client-ca\") pod \"0316d962-5de2-4709-95e7-1679943f35fd\" (UID: \"0316d962-5de2-4709-95e7-1679943f35fd\") " Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.385518 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0316d962-5de2-4709-95e7-1679943f35fd-serving-cert\") pod \"0316d962-5de2-4709-95e7-1679943f35fd\" (UID: \"0316d962-5de2-4709-95e7-1679943f35fd\") " Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.385576 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/78734b35-61db-46e9-b16b-1f03258f9fcb-proxy-ca-bundles\") pod \"78734b35-61db-46e9-b16b-1f03258f9fcb\" (UID: \"78734b35-61db-46e9-b16b-1f03258f9fcb\") " Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.385720 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4dccc\" (UniqueName: \"kubernetes.io/projected/322ed97d-f0c9-4065-8c04-a80357e8783e-kube-api-access-4dccc\") pod \"controller-manager-7cc94f8bd7-2qxbk\" (UID: \"322ed97d-f0c9-4065-8c04-a80357e8783e\") " pod="openshift-controller-manager/controller-manager-7cc94f8bd7-2qxbk" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.385749 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/322ed97d-f0c9-4065-8c04-a80357e8783e-proxy-ca-bundles\") pod \"controller-manager-7cc94f8bd7-2qxbk\" (UID: \"322ed97d-f0c9-4065-8c04-a80357e8783e\") " pod="openshift-controller-manager/controller-manager-7cc94f8bd7-2qxbk" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.385777 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/322ed97d-f0c9-4065-8c04-a80357e8783e-client-ca\") pod \"controller-manager-7cc94f8bd7-2qxbk\" (UID: \"322ed97d-f0c9-4065-8c04-a80357e8783e\") " pod="openshift-controller-manager/controller-manager-7cc94f8bd7-2qxbk" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.385846 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/322ed97d-f0c9-4065-8c04-a80357e8783e-config\") pod \"controller-manager-7cc94f8bd7-2qxbk\" (UID: \"322ed97d-f0c9-4065-8c04-a80357e8783e\") " pod="openshift-controller-manager/controller-manager-7cc94f8bd7-2qxbk" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.385866 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/322ed97d-f0c9-4065-8c04-a80357e8783e-serving-cert\") pod \"controller-manager-7cc94f8bd7-2qxbk\" (UID: \"322ed97d-f0c9-4065-8c04-a80357e8783e\") " pod="openshift-controller-manager/controller-manager-7cc94f8bd7-2qxbk" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.386494 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78734b35-61db-46e9-b16b-1f03258f9fcb-client-ca" (OuterVolumeSpecName: "client-ca") pod "78734b35-61db-46e9-b16b-1f03258f9fcb" (UID: "78734b35-61db-46e9-b16b-1f03258f9fcb"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.386566 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78734b35-61db-46e9-b16b-1f03258f9fcb-config" (OuterVolumeSpecName: "config") pod "78734b35-61db-46e9-b16b-1f03258f9fcb" (UID: "78734b35-61db-46e9-b16b-1f03258f9fcb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.386802 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0316d962-5de2-4709-95e7-1679943f35fd-config" (OuterVolumeSpecName: "config") pod "0316d962-5de2-4709-95e7-1679943f35fd" (UID: "0316d962-5de2-4709-95e7-1679943f35fd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.387094 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0316d962-5de2-4709-95e7-1679943f35fd-client-ca" (OuterVolumeSpecName: "client-ca") pod "0316d962-5de2-4709-95e7-1679943f35fd" (UID: "0316d962-5de2-4709-95e7-1679943f35fd"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.387209 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78734b35-61db-46e9-b16b-1f03258f9fcb-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "78734b35-61db-46e9-b16b-1f03258f9fcb" (UID: "78734b35-61db-46e9-b16b-1f03258f9fcb"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.397772 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78734b35-61db-46e9-b16b-1f03258f9fcb-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "78734b35-61db-46e9-b16b-1f03258f9fcb" (UID: "78734b35-61db-46e9-b16b-1f03258f9fcb"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.397797 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78734b35-61db-46e9-b16b-1f03258f9fcb-kube-api-access-2djsr" (OuterVolumeSpecName: "kube-api-access-2djsr") pod "78734b35-61db-46e9-b16b-1f03258f9fcb" (UID: "78734b35-61db-46e9-b16b-1f03258f9fcb"). InnerVolumeSpecName "kube-api-access-2djsr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.397991 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0316d962-5de2-4709-95e7-1679943f35fd-kube-api-access-xw4rt" (OuterVolumeSpecName: "kube-api-access-xw4rt") pod "0316d962-5de2-4709-95e7-1679943f35fd" (UID: "0316d962-5de2-4709-95e7-1679943f35fd"). InnerVolumeSpecName "kube-api-access-xw4rt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.398569 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0316d962-5de2-4709-95e7-1679943f35fd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0316d962-5de2-4709-95e7-1679943f35fd" (UID: "0316d962-5de2-4709-95e7-1679943f35fd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.487597 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/322ed97d-f0c9-4065-8c04-a80357e8783e-config\") pod \"controller-manager-7cc94f8bd7-2qxbk\" (UID: \"322ed97d-f0c9-4065-8c04-a80357e8783e\") " pod="openshift-controller-manager/controller-manager-7cc94f8bd7-2qxbk" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.488129 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/322ed97d-f0c9-4065-8c04-a80357e8783e-serving-cert\") pod \"controller-manager-7cc94f8bd7-2qxbk\" (UID: \"322ed97d-f0c9-4065-8c04-a80357e8783e\") " pod="openshift-controller-manager/controller-manager-7cc94f8bd7-2qxbk" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.488222 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4dccc\" (UniqueName: \"kubernetes.io/projected/322ed97d-f0c9-4065-8c04-a80357e8783e-kube-api-access-4dccc\") pod \"controller-manager-7cc94f8bd7-2qxbk\" (UID: \"322ed97d-f0c9-4065-8c04-a80357e8783e\") " pod="openshift-controller-manager/controller-manager-7cc94f8bd7-2qxbk" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.488312 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/322ed97d-f0c9-4065-8c04-a80357e8783e-proxy-ca-bundles\") pod \"controller-manager-7cc94f8bd7-2qxbk\" (UID: \"322ed97d-f0c9-4065-8c04-a80357e8783e\") " pod="openshift-controller-manager/controller-manager-7cc94f8bd7-2qxbk" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.488405 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/322ed97d-f0c9-4065-8c04-a80357e8783e-client-ca\") pod \"controller-manager-7cc94f8bd7-2qxbk\" (UID: \"322ed97d-f0c9-4065-8c04-a80357e8783e\") " pod="openshift-controller-manager/controller-manager-7cc94f8bd7-2qxbk" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.488521 4971 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0316d962-5de2-4709-95e7-1679943f35fd-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.488612 4971 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/78734b35-61db-46e9-b16b-1f03258f9fcb-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.488674 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78734b35-61db-46e9-b16b-1f03258f9fcb-config\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.488733 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0316d962-5de2-4709-95e7-1679943f35fd-config\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.488792 4971 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/78734b35-61db-46e9-b16b-1f03258f9fcb-client-ca\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.488856 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xw4rt\" (UniqueName: \"kubernetes.io/projected/0316d962-5de2-4709-95e7-1679943f35fd-kube-api-access-xw4rt\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.488915 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2djsr\" (UniqueName: \"kubernetes.io/projected/78734b35-61db-46e9-b16b-1f03258f9fcb-kube-api-access-2djsr\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.488979 4971 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/78734b35-61db-46e9-b16b-1f03258f9fcb-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.489036 4971 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0316d962-5de2-4709-95e7-1679943f35fd-client-ca\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.489358 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/322ed97d-f0c9-4065-8c04-a80357e8783e-client-ca\") pod \"controller-manager-7cc94f8bd7-2qxbk\" (UID: \"322ed97d-f0c9-4065-8c04-a80357e8783e\") " pod="openshift-controller-manager/controller-manager-7cc94f8bd7-2qxbk" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.489446 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/322ed97d-f0c9-4065-8c04-a80357e8783e-config\") pod \"controller-manager-7cc94f8bd7-2qxbk\" (UID: \"322ed97d-f0c9-4065-8c04-a80357e8783e\") " pod="openshift-controller-manager/controller-manager-7cc94f8bd7-2qxbk" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.489594 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/322ed97d-f0c9-4065-8c04-a80357e8783e-proxy-ca-bundles\") pod \"controller-manager-7cc94f8bd7-2qxbk\" (UID: \"322ed97d-f0c9-4065-8c04-a80357e8783e\") " pod="openshift-controller-manager/controller-manager-7cc94f8bd7-2qxbk" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.500811 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/322ed97d-f0c9-4065-8c04-a80357e8783e-serving-cert\") pod \"controller-manager-7cc94f8bd7-2qxbk\" (UID: \"322ed97d-f0c9-4065-8c04-a80357e8783e\") " pod="openshift-controller-manager/controller-manager-7cc94f8bd7-2qxbk" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.503049 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4dccc\" (UniqueName: \"kubernetes.io/projected/322ed97d-f0c9-4065-8c04-a80357e8783e-kube-api-access-4dccc\") pod \"controller-manager-7cc94f8bd7-2qxbk\" (UID: \"322ed97d-f0c9-4065-8c04-a80357e8783e\") " pod="openshift-controller-manager/controller-manager-7cc94f8bd7-2qxbk" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.606163 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7cc94f8bd7-2qxbk" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.674428 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-29jwb"] Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.679363 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-29jwb"] Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.681847 4971 scope.go:117] "RemoveContainer" containerID="68db77eb991c63835e0067a546d988680d5df96131649cc1e6d414028039b607" Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.688502 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7hzzs"] Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.692403 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7hzzs"] Nov 27 06:56:09 crc kubenswrapper[4971]: I1127 06:56:09.961642 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7cc94f8bd7-2qxbk"] Nov 27 06:56:09 crc kubenswrapper[4971]: W1127 06:56:09.967611 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod322ed97d_f0c9_4065_8c04_a80357e8783e.slice/crio-a617492a814ec2e205579bafedb00e0380226e15089f8c4164a081016bd50e9c WatchSource:0}: Error finding container a617492a814ec2e205579bafedb00e0380226e15089f8c4164a081016bd50e9c: Status 404 returned error can't find the container with id a617492a814ec2e205579bafedb00e0380226e15089f8c4164a081016bd50e9c Nov 27 06:56:10 crc kubenswrapper[4971]: I1127 06:56:10.356573 4971 generic.go:334] "Generic (PLEG): container finished" podID="12325c6c-a817-46ae-b776-01213f064dbf" containerID="353330776d82599c62b540162c29b1c3a1197f997124e798b0ea069a6553b707" exitCode=0 Nov 27 06:56:10 crc kubenswrapper[4971]: I1127 06:56:10.356640 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qfvr4" event={"ID":"12325c6c-a817-46ae-b776-01213f064dbf","Type":"ContainerDied","Data":"353330776d82599c62b540162c29b1c3a1197f997124e798b0ea069a6553b707"} Nov 27 06:56:10 crc kubenswrapper[4971]: I1127 06:56:10.362595 4971 generic.go:334] "Generic (PLEG): container finished" podID="2bec9dd3-b528-4b67-b949-8e9eb074e222" containerID="6269307f566369ece89a79544f896d99d6641ebbea8f82b35145c0eb6cbaceb6" exitCode=0 Nov 27 06:56:10 crc kubenswrapper[4971]: I1127 06:56:10.362762 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rnpmr" event={"ID":"2bec9dd3-b528-4b67-b949-8e9eb074e222","Type":"ContainerDied","Data":"6269307f566369ece89a79544f896d99d6641ebbea8f82b35145c0eb6cbaceb6"} Nov 27 06:56:10 crc kubenswrapper[4971]: I1127 06:56:10.368462 4971 generic.go:334] "Generic (PLEG): container finished" podID="e2a58f53-1c1f-48b1-82af-845bf9dd1d7e" containerID="6c7887f4f79008fb38e69f64b462ef51474868f8f6e9e5a248260288e8b183a7" exitCode=0 Nov 27 06:56:10 crc kubenswrapper[4971]: I1127 06:56:10.368552 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tgs6w" event={"ID":"e2a58f53-1c1f-48b1-82af-845bf9dd1d7e","Type":"ContainerDied","Data":"6c7887f4f79008fb38e69f64b462ef51474868f8f6e9e5a248260288e8b183a7"} Nov 27 06:56:10 crc kubenswrapper[4971]: I1127 06:56:10.372477 4971 generic.go:334] "Generic (PLEG): container finished" podID="dda2cad1-cdbe-48f8-ba4c-3074565de24b" containerID="21a5ce27585e5be59ed538abfd63d69c11d9404e70470caf9327b63f1cc0100b" exitCode=0 Nov 27 06:56:10 crc kubenswrapper[4971]: I1127 06:56:10.372575 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bhc8q" event={"ID":"dda2cad1-cdbe-48f8-ba4c-3074565de24b","Type":"ContainerDied","Data":"21a5ce27585e5be59ed538abfd63d69c11d9404e70470caf9327b63f1cc0100b"} Nov 27 06:56:10 crc kubenswrapper[4971]: I1127 06:56:10.375496 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7cc94f8bd7-2qxbk" event={"ID":"322ed97d-f0c9-4065-8c04-a80357e8783e","Type":"ContainerStarted","Data":"df3360243e3ad3bfc212e21ab495bb2a8d2ea061ffaeb6d71ab14238cf7366bc"} Nov 27 06:56:10 crc kubenswrapper[4971]: I1127 06:56:10.375543 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7cc94f8bd7-2qxbk" event={"ID":"322ed97d-f0c9-4065-8c04-a80357e8783e","Type":"ContainerStarted","Data":"a617492a814ec2e205579bafedb00e0380226e15089f8c4164a081016bd50e9c"} Nov 27 06:56:10 crc kubenswrapper[4971]: I1127 06:56:10.375941 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7cc94f8bd7-2qxbk" Nov 27 06:56:10 crc kubenswrapper[4971]: I1127 06:56:10.382937 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7cc94f8bd7-2qxbk" Nov 27 06:56:10 crc kubenswrapper[4971]: I1127 06:56:10.558814 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0316d962-5de2-4709-95e7-1679943f35fd" path="/var/lib/kubelet/pods/0316d962-5de2-4709-95e7-1679943f35fd/volumes" Nov 27 06:56:10 crc kubenswrapper[4971]: I1127 06:56:10.559468 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78734b35-61db-46e9-b16b-1f03258f9fcb" path="/var/lib/kubelet/pods/78734b35-61db-46e9-b16b-1f03258f9fcb/volumes" Nov 27 06:56:11 crc kubenswrapper[4971]: I1127 06:56:11.384513 4971 generic.go:334] "Generic (PLEG): container finished" podID="75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c" containerID="bfa2fc50ce01d2b73600e107858fc7c2bda07d8ee17751907d6096e095d05eac" exitCode=0 Nov 27 06:56:11 crc kubenswrapper[4971]: I1127 06:56:11.385236 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dvz2k" event={"ID":"75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c","Type":"ContainerDied","Data":"bfa2fc50ce01d2b73600e107858fc7c2bda07d8ee17751907d6096e095d05eac"} Nov 27 06:56:11 crc kubenswrapper[4971]: I1127 06:56:11.402335 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7cc94f8bd7-2qxbk" podStartSLOduration=4.402311193 podStartE2EDuration="4.402311193s" podCreationTimestamp="2025-11-27 06:56:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:56:10.508968488 +0000 UTC m=+208.701012416" watchObservedRunningTime="2025-11-27 06:56:11.402311193 +0000 UTC m=+209.594355121" Nov 27 06:56:11 crc kubenswrapper[4971]: I1127 06:56:11.628463 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-84f54dd585-zv5rp"] Nov 27 06:56:11 crc kubenswrapper[4971]: I1127 06:56:11.629163 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-84f54dd585-zv5rp" Nov 27 06:56:11 crc kubenswrapper[4971]: I1127 06:56:11.631987 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Nov 27 06:56:11 crc kubenswrapper[4971]: I1127 06:56:11.632024 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Nov 27 06:56:11 crc kubenswrapper[4971]: I1127 06:56:11.632737 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Nov 27 06:56:11 crc kubenswrapper[4971]: I1127 06:56:11.633041 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Nov 27 06:56:11 crc kubenswrapper[4971]: I1127 06:56:11.633756 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Nov 27 06:56:11 crc kubenswrapper[4971]: I1127 06:56:11.633848 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Nov 27 06:56:11 crc kubenswrapper[4971]: I1127 06:56:11.642014 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-84f54dd585-zv5rp"] Nov 27 06:56:11 crc kubenswrapper[4971]: I1127 06:56:11.675160 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-fhgrm" Nov 27 06:56:11 crc kubenswrapper[4971]: I1127 06:56:11.718220 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svjz6\" (UniqueName: \"kubernetes.io/projected/ac22a34d-29da-422e-b0cd-acdbb0db83d4-kube-api-access-svjz6\") pod \"route-controller-manager-84f54dd585-zv5rp\" (UID: \"ac22a34d-29da-422e-b0cd-acdbb0db83d4\") " pod="openshift-route-controller-manager/route-controller-manager-84f54dd585-zv5rp" Nov 27 06:56:11 crc kubenswrapper[4971]: I1127 06:56:11.718629 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ac22a34d-29da-422e-b0cd-acdbb0db83d4-client-ca\") pod \"route-controller-manager-84f54dd585-zv5rp\" (UID: \"ac22a34d-29da-422e-b0cd-acdbb0db83d4\") " pod="openshift-route-controller-manager/route-controller-manager-84f54dd585-zv5rp" Nov 27 06:56:11 crc kubenswrapper[4971]: I1127 06:56:11.718695 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac22a34d-29da-422e-b0cd-acdbb0db83d4-config\") pod \"route-controller-manager-84f54dd585-zv5rp\" (UID: \"ac22a34d-29da-422e-b0cd-acdbb0db83d4\") " pod="openshift-route-controller-manager/route-controller-manager-84f54dd585-zv5rp" Nov 27 06:56:11 crc kubenswrapper[4971]: I1127 06:56:11.718744 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ac22a34d-29da-422e-b0cd-acdbb0db83d4-serving-cert\") pod \"route-controller-manager-84f54dd585-zv5rp\" (UID: \"ac22a34d-29da-422e-b0cd-acdbb0db83d4\") " pod="openshift-route-controller-manager/route-controller-manager-84f54dd585-zv5rp" Nov 27 06:56:11 crc kubenswrapper[4971]: I1127 06:56:11.725235 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-fhgrm" Nov 27 06:56:11 crc kubenswrapper[4971]: I1127 06:56:11.820805 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svjz6\" (UniqueName: \"kubernetes.io/projected/ac22a34d-29da-422e-b0cd-acdbb0db83d4-kube-api-access-svjz6\") pod \"route-controller-manager-84f54dd585-zv5rp\" (UID: \"ac22a34d-29da-422e-b0cd-acdbb0db83d4\") " pod="openshift-route-controller-manager/route-controller-manager-84f54dd585-zv5rp" Nov 27 06:56:11 crc kubenswrapper[4971]: I1127 06:56:11.820853 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ac22a34d-29da-422e-b0cd-acdbb0db83d4-client-ca\") pod \"route-controller-manager-84f54dd585-zv5rp\" (UID: \"ac22a34d-29da-422e-b0cd-acdbb0db83d4\") " pod="openshift-route-controller-manager/route-controller-manager-84f54dd585-zv5rp" Nov 27 06:56:11 crc kubenswrapper[4971]: I1127 06:56:11.820906 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac22a34d-29da-422e-b0cd-acdbb0db83d4-config\") pod \"route-controller-manager-84f54dd585-zv5rp\" (UID: \"ac22a34d-29da-422e-b0cd-acdbb0db83d4\") " pod="openshift-route-controller-manager/route-controller-manager-84f54dd585-zv5rp" Nov 27 06:56:11 crc kubenswrapper[4971]: I1127 06:56:11.822079 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ac22a34d-29da-422e-b0cd-acdbb0db83d4-client-ca\") pod \"route-controller-manager-84f54dd585-zv5rp\" (UID: \"ac22a34d-29da-422e-b0cd-acdbb0db83d4\") " pod="openshift-route-controller-manager/route-controller-manager-84f54dd585-zv5rp" Nov 27 06:56:11 crc kubenswrapper[4971]: I1127 06:56:11.822182 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac22a34d-29da-422e-b0cd-acdbb0db83d4-config\") pod \"route-controller-manager-84f54dd585-zv5rp\" (UID: \"ac22a34d-29da-422e-b0cd-acdbb0db83d4\") " pod="openshift-route-controller-manager/route-controller-manager-84f54dd585-zv5rp" Nov 27 06:56:11 crc kubenswrapper[4971]: I1127 06:56:11.822218 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ac22a34d-29da-422e-b0cd-acdbb0db83d4-serving-cert\") pod \"route-controller-manager-84f54dd585-zv5rp\" (UID: \"ac22a34d-29da-422e-b0cd-acdbb0db83d4\") " pod="openshift-route-controller-manager/route-controller-manager-84f54dd585-zv5rp" Nov 27 06:56:11 crc kubenswrapper[4971]: I1127 06:56:11.836082 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ac22a34d-29da-422e-b0cd-acdbb0db83d4-serving-cert\") pod \"route-controller-manager-84f54dd585-zv5rp\" (UID: \"ac22a34d-29da-422e-b0cd-acdbb0db83d4\") " pod="openshift-route-controller-manager/route-controller-manager-84f54dd585-zv5rp" Nov 27 06:56:11 crc kubenswrapper[4971]: I1127 06:56:11.839292 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svjz6\" (UniqueName: \"kubernetes.io/projected/ac22a34d-29da-422e-b0cd-acdbb0db83d4-kube-api-access-svjz6\") pod \"route-controller-manager-84f54dd585-zv5rp\" (UID: \"ac22a34d-29da-422e-b0cd-acdbb0db83d4\") " pod="openshift-route-controller-manager/route-controller-manager-84f54dd585-zv5rp" Nov 27 06:56:11 crc kubenswrapper[4971]: I1127 06:56:11.853273 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-h6p78"] Nov 27 06:56:11 crc kubenswrapper[4971]: I1127 06:56:11.945149 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-84f54dd585-zv5rp" Nov 27 06:56:12 crc kubenswrapper[4971]: I1127 06:56:12.337554 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-84f54dd585-zv5rp"] Nov 27 06:56:12 crc kubenswrapper[4971]: W1127 06:56:12.344524 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podac22a34d_29da_422e_b0cd_acdbb0db83d4.slice/crio-298dd3e16fb0f2d6efb8bf5290e7ecaa775270208352c95ca5c4cca4f7382b71 WatchSource:0}: Error finding container 298dd3e16fb0f2d6efb8bf5290e7ecaa775270208352c95ca5c4cca4f7382b71: Status 404 returned error can't find the container with id 298dd3e16fb0f2d6efb8bf5290e7ecaa775270208352c95ca5c4cca4f7382b71 Nov 27 06:56:12 crc kubenswrapper[4971]: I1127 06:56:12.394747 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rnpmr" event={"ID":"2bec9dd3-b528-4b67-b949-8e9eb074e222","Type":"ContainerStarted","Data":"951993ff140652fc2efa39e6a84fef2b79d0a9d8777b3ea466d9979e8e9e7ae3"} Nov 27 06:56:12 crc kubenswrapper[4971]: I1127 06:56:12.401008 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tgs6w" event={"ID":"e2a58f53-1c1f-48b1-82af-845bf9dd1d7e","Type":"ContainerStarted","Data":"991a6637fd7c721001a12797444e49f24fda324675d04497a2bffd8ddc0ffebc"} Nov 27 06:56:12 crc kubenswrapper[4971]: I1127 06:56:12.406962 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bhc8q" event={"ID":"dda2cad1-cdbe-48f8-ba4c-3074565de24b","Type":"ContainerStarted","Data":"8d6e0bc042f00624923d90d28f208891f9ce5c16cec09fe34ef97619918c5525"} Nov 27 06:56:12 crc kubenswrapper[4971]: I1127 06:56:12.408851 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-84f54dd585-zv5rp" event={"ID":"ac22a34d-29da-422e-b0cd-acdbb0db83d4","Type":"ContainerStarted","Data":"298dd3e16fb0f2d6efb8bf5290e7ecaa775270208352c95ca5c4cca4f7382b71"} Nov 27 06:56:12 crc kubenswrapper[4971]: I1127 06:56:12.411061 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qfvr4" event={"ID":"12325c6c-a817-46ae-b776-01213f064dbf","Type":"ContainerStarted","Data":"ba3227ee0632cff4ea50b0ac425afb773f8b72eba30aa9fd27b8bd9d07d2e1d0"} Nov 27 06:56:12 crc kubenswrapper[4971]: I1127 06:56:12.420711 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-rnpmr" podStartSLOduration=2.181422408 podStartE2EDuration="1m1.420690727s" podCreationTimestamp="2025-11-27 06:55:11 +0000 UTC" firstStartedPulling="2025-11-27 06:55:12.829349914 +0000 UTC m=+151.021393832" lastFinishedPulling="2025-11-27 06:56:12.068618233 +0000 UTC m=+210.260662151" observedRunningTime="2025-11-27 06:56:12.418143635 +0000 UTC m=+210.610187573" watchObservedRunningTime="2025-11-27 06:56:12.420690727 +0000 UTC m=+210.612734645" Nov 27 06:56:12 crc kubenswrapper[4971]: I1127 06:56:12.439586 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-qfvr4" podStartSLOduration=2.466203644 podStartE2EDuration="59.439507257s" podCreationTimestamp="2025-11-27 06:55:13 +0000 UTC" firstStartedPulling="2025-11-27 06:55:14.889896946 +0000 UTC m=+153.081940864" lastFinishedPulling="2025-11-27 06:56:11.863200559 +0000 UTC m=+210.055244477" observedRunningTime="2025-11-27 06:56:12.438373641 +0000 UTC m=+210.630417589" watchObservedRunningTime="2025-11-27 06:56:12.439507257 +0000 UTC m=+210.631551175" Nov 27 06:56:12 crc kubenswrapper[4971]: I1127 06:56:12.459569 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-tgs6w" podStartSLOduration=3.054794918 podStartE2EDuration="59.459552127s" podCreationTimestamp="2025-11-27 06:55:13 +0000 UTC" firstStartedPulling="2025-11-27 06:55:14.896659515 +0000 UTC m=+153.088703433" lastFinishedPulling="2025-11-27 06:56:11.301416724 +0000 UTC m=+209.493460642" observedRunningTime="2025-11-27 06:56:12.456580022 +0000 UTC m=+210.648623960" watchObservedRunningTime="2025-11-27 06:56:12.459552127 +0000 UTC m=+210.651596045" Nov 27 06:56:12 crc kubenswrapper[4971]: I1127 06:56:12.484841 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-bhc8q" podStartSLOduration=2.815952817 podStartE2EDuration="1m1.484825913s" podCreationTimestamp="2025-11-27 06:55:11 +0000 UTC" firstStartedPulling="2025-11-27 06:55:12.814721381 +0000 UTC m=+151.006765299" lastFinishedPulling="2025-11-27 06:56:11.483594477 +0000 UTC m=+209.675638395" observedRunningTime="2025-11-27 06:56:12.483238102 +0000 UTC m=+210.675282030" watchObservedRunningTime="2025-11-27 06:56:12.484825913 +0000 UTC m=+210.676869831" Nov 27 06:56:13 crc kubenswrapper[4971]: I1127 06:56:13.419216 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-84f54dd585-zv5rp" event={"ID":"ac22a34d-29da-422e-b0cd-acdbb0db83d4","Type":"ContainerStarted","Data":"33246aadd14b4765995f93b4fa8c237544ad590bbba6e6a37f78d8a26ba7c454"} Nov 27 06:56:13 crc kubenswrapper[4971]: I1127 06:56:13.421822 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dvz2k" event={"ID":"75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c","Type":"ContainerStarted","Data":"0f84d1e81fefdf69ea4f2c7fee6af85d4d21f9b6562b8da48d93379c4959ede0"} Nov 27 06:56:13 crc kubenswrapper[4971]: I1127 06:56:13.471144 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-84f54dd585-zv5rp" podStartSLOduration=6.471121354 podStartE2EDuration="6.471121354s" podCreationTimestamp="2025-11-27 06:56:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:56:13.447277733 +0000 UTC m=+211.639321661" watchObservedRunningTime="2025-11-27 06:56:13.471121354 +0000 UTC m=+211.663165272" Nov 27 06:56:13 crc kubenswrapper[4971]: I1127 06:56:13.472292 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-dvz2k" podStartSLOduration=2.845087595 podStartE2EDuration="1m2.472284731s" podCreationTimestamp="2025-11-27 06:55:11 +0000 UTC" firstStartedPulling="2025-11-27 06:55:12.819157939 +0000 UTC m=+151.011201857" lastFinishedPulling="2025-11-27 06:56:12.446355085 +0000 UTC m=+210.638398993" observedRunningTime="2025-11-27 06:56:13.472082974 +0000 UTC m=+211.664126892" watchObservedRunningTime="2025-11-27 06:56:13.472284731 +0000 UTC m=+211.664328649" Nov 27 06:56:13 crc kubenswrapper[4971]: I1127 06:56:13.598696 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-qfvr4" Nov 27 06:56:13 crc kubenswrapper[4971]: I1127 06:56:13.598760 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-qfvr4" Nov 27 06:56:13 crc kubenswrapper[4971]: I1127 06:56:13.648330 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-qfvr4" Nov 27 06:56:14 crc kubenswrapper[4971]: I1127 06:56:13.999698 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-tgs6w" Nov 27 06:56:14 crc kubenswrapper[4971]: I1127 06:56:14.000054 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-tgs6w" Nov 27 06:56:14 crc kubenswrapper[4971]: I1127 06:56:14.046292 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-tgs6w" Nov 27 06:56:14 crc kubenswrapper[4971]: I1127 06:56:14.426743 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-84f54dd585-zv5rp" Nov 27 06:56:14 crc kubenswrapper[4971]: I1127 06:56:14.432924 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-84f54dd585-zv5rp" Nov 27 06:56:14 crc kubenswrapper[4971]: I1127 06:56:14.613977 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-7pr9t" Nov 27 06:56:14 crc kubenswrapper[4971]: I1127 06:56:14.614065 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-7pr9t" Nov 27 06:56:14 crc kubenswrapper[4971]: I1127 06:56:14.663232 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-7pr9t" Nov 27 06:56:15 crc kubenswrapper[4971]: I1127 06:56:15.067511 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-8jk6j" Nov 27 06:56:15 crc kubenswrapper[4971]: I1127 06:56:15.109607 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-8jk6j" Nov 27 06:56:15 crc kubenswrapper[4971]: I1127 06:56:15.471341 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-7pr9t" Nov 27 06:56:18 crc kubenswrapper[4971]: I1127 06:56:18.387837 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8jk6j"] Nov 27 06:56:18 crc kubenswrapper[4971]: I1127 06:56:18.388479 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-8jk6j" podUID="a9d2eabc-ab6d-4bda-b71e-5b32a68105de" containerName="registry-server" containerID="cri-o://b3761daf3c88ca35794494e8bea53c4112dcaa9f84bf8e7bf9b556269f860f72" gracePeriod=2 Nov 27 06:56:20 crc kubenswrapper[4971]: I1127 06:56:20.110858 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8jk6j" Nov 27 06:56:20 crc kubenswrapper[4971]: I1127 06:56:20.245390 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a9d2eabc-ab6d-4bda-b71e-5b32a68105de-catalog-content\") pod \"a9d2eabc-ab6d-4bda-b71e-5b32a68105de\" (UID: \"a9d2eabc-ab6d-4bda-b71e-5b32a68105de\") " Nov 27 06:56:20 crc kubenswrapper[4971]: I1127 06:56:20.245603 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a9d2eabc-ab6d-4bda-b71e-5b32a68105de-utilities\") pod \"a9d2eabc-ab6d-4bda-b71e-5b32a68105de\" (UID: \"a9d2eabc-ab6d-4bda-b71e-5b32a68105de\") " Nov 27 06:56:20 crc kubenswrapper[4971]: I1127 06:56:20.245690 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z2nbh\" (UniqueName: \"kubernetes.io/projected/a9d2eabc-ab6d-4bda-b71e-5b32a68105de-kube-api-access-z2nbh\") pod \"a9d2eabc-ab6d-4bda-b71e-5b32a68105de\" (UID: \"a9d2eabc-ab6d-4bda-b71e-5b32a68105de\") " Nov 27 06:56:20 crc kubenswrapper[4971]: I1127 06:56:20.246614 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a9d2eabc-ab6d-4bda-b71e-5b32a68105de-utilities" (OuterVolumeSpecName: "utilities") pod "a9d2eabc-ab6d-4bda-b71e-5b32a68105de" (UID: "a9d2eabc-ab6d-4bda-b71e-5b32a68105de"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 06:56:20 crc kubenswrapper[4971]: I1127 06:56:20.254893 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9d2eabc-ab6d-4bda-b71e-5b32a68105de-kube-api-access-z2nbh" (OuterVolumeSpecName: "kube-api-access-z2nbh") pod "a9d2eabc-ab6d-4bda-b71e-5b32a68105de" (UID: "a9d2eabc-ab6d-4bda-b71e-5b32a68105de"). InnerVolumeSpecName "kube-api-access-z2nbh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:56:20 crc kubenswrapper[4971]: I1127 06:56:20.347588 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a9d2eabc-ab6d-4bda-b71e-5b32a68105de-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:20 crc kubenswrapper[4971]: I1127 06:56:20.347627 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z2nbh\" (UniqueName: \"kubernetes.io/projected/a9d2eabc-ab6d-4bda-b71e-5b32a68105de-kube-api-access-z2nbh\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:20 crc kubenswrapper[4971]: I1127 06:56:20.461073 4971 generic.go:334] "Generic (PLEG): container finished" podID="a9d2eabc-ab6d-4bda-b71e-5b32a68105de" containerID="b3761daf3c88ca35794494e8bea53c4112dcaa9f84bf8e7bf9b556269f860f72" exitCode=0 Nov 27 06:56:20 crc kubenswrapper[4971]: I1127 06:56:20.461128 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8jk6j" event={"ID":"a9d2eabc-ab6d-4bda-b71e-5b32a68105de","Type":"ContainerDied","Data":"b3761daf3c88ca35794494e8bea53c4112dcaa9f84bf8e7bf9b556269f860f72"} Nov 27 06:56:20 crc kubenswrapper[4971]: I1127 06:56:20.461154 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8jk6j" event={"ID":"a9d2eabc-ab6d-4bda-b71e-5b32a68105de","Type":"ContainerDied","Data":"5ab4e9ac34708e2d2a21e8099f01ff144be930515255ca1051817fb4f3f5e5b8"} Nov 27 06:56:20 crc kubenswrapper[4971]: I1127 06:56:20.461156 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8jk6j" Nov 27 06:56:20 crc kubenswrapper[4971]: I1127 06:56:20.461169 4971 scope.go:117] "RemoveContainer" containerID="b3761daf3c88ca35794494e8bea53c4112dcaa9f84bf8e7bf9b556269f860f72" Nov 27 06:56:20 crc kubenswrapper[4971]: I1127 06:56:20.524577 4971 scope.go:117] "RemoveContainer" containerID="0920598e949068af8232779951132ee6368eca2a5413a9f2a33f0510a9a3b7c6" Nov 27 06:56:20 crc kubenswrapper[4971]: I1127 06:56:20.540729 4971 scope.go:117] "RemoveContainer" containerID="b1dbac5ac9a30b40d521917f2f3f0e2bf105d89ff25dbdf63d73d81c8a89d9d8" Nov 27 06:56:20 crc kubenswrapper[4971]: I1127 06:56:20.554220 4971 scope.go:117] "RemoveContainer" containerID="b3761daf3c88ca35794494e8bea53c4112dcaa9f84bf8e7bf9b556269f860f72" Nov 27 06:56:20 crc kubenswrapper[4971]: E1127 06:56:20.554687 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3761daf3c88ca35794494e8bea53c4112dcaa9f84bf8e7bf9b556269f860f72\": container with ID starting with b3761daf3c88ca35794494e8bea53c4112dcaa9f84bf8e7bf9b556269f860f72 not found: ID does not exist" containerID="b3761daf3c88ca35794494e8bea53c4112dcaa9f84bf8e7bf9b556269f860f72" Nov 27 06:56:20 crc kubenswrapper[4971]: I1127 06:56:20.554733 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3761daf3c88ca35794494e8bea53c4112dcaa9f84bf8e7bf9b556269f860f72"} err="failed to get container status \"b3761daf3c88ca35794494e8bea53c4112dcaa9f84bf8e7bf9b556269f860f72\": rpc error: code = NotFound desc = could not find container \"b3761daf3c88ca35794494e8bea53c4112dcaa9f84bf8e7bf9b556269f860f72\": container with ID starting with b3761daf3c88ca35794494e8bea53c4112dcaa9f84bf8e7bf9b556269f860f72 not found: ID does not exist" Nov 27 06:56:20 crc kubenswrapper[4971]: I1127 06:56:20.554802 4971 scope.go:117] "RemoveContainer" containerID="0920598e949068af8232779951132ee6368eca2a5413a9f2a33f0510a9a3b7c6" Nov 27 06:56:20 crc kubenswrapper[4971]: E1127 06:56:20.555053 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0920598e949068af8232779951132ee6368eca2a5413a9f2a33f0510a9a3b7c6\": container with ID starting with 0920598e949068af8232779951132ee6368eca2a5413a9f2a33f0510a9a3b7c6 not found: ID does not exist" containerID="0920598e949068af8232779951132ee6368eca2a5413a9f2a33f0510a9a3b7c6" Nov 27 06:56:20 crc kubenswrapper[4971]: I1127 06:56:20.555083 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0920598e949068af8232779951132ee6368eca2a5413a9f2a33f0510a9a3b7c6"} err="failed to get container status \"0920598e949068af8232779951132ee6368eca2a5413a9f2a33f0510a9a3b7c6\": rpc error: code = NotFound desc = could not find container \"0920598e949068af8232779951132ee6368eca2a5413a9f2a33f0510a9a3b7c6\": container with ID starting with 0920598e949068af8232779951132ee6368eca2a5413a9f2a33f0510a9a3b7c6 not found: ID does not exist" Nov 27 06:56:20 crc kubenswrapper[4971]: I1127 06:56:20.555102 4971 scope.go:117] "RemoveContainer" containerID="b1dbac5ac9a30b40d521917f2f3f0e2bf105d89ff25dbdf63d73d81c8a89d9d8" Nov 27 06:56:20 crc kubenswrapper[4971]: E1127 06:56:20.555467 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1dbac5ac9a30b40d521917f2f3f0e2bf105d89ff25dbdf63d73d81c8a89d9d8\": container with ID starting with b1dbac5ac9a30b40d521917f2f3f0e2bf105d89ff25dbdf63d73d81c8a89d9d8 not found: ID does not exist" containerID="b1dbac5ac9a30b40d521917f2f3f0e2bf105d89ff25dbdf63d73d81c8a89d9d8" Nov 27 06:56:20 crc kubenswrapper[4971]: I1127 06:56:20.555490 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1dbac5ac9a30b40d521917f2f3f0e2bf105d89ff25dbdf63d73d81c8a89d9d8"} err="failed to get container status \"b1dbac5ac9a30b40d521917f2f3f0e2bf105d89ff25dbdf63d73d81c8a89d9d8\": rpc error: code = NotFound desc = could not find container \"b1dbac5ac9a30b40d521917f2f3f0e2bf105d89ff25dbdf63d73d81c8a89d9d8\": container with ID starting with b1dbac5ac9a30b40d521917f2f3f0e2bf105d89ff25dbdf63d73d81c8a89d9d8 not found: ID does not exist" Nov 27 06:56:20 crc kubenswrapper[4971]: I1127 06:56:20.833120 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a9d2eabc-ab6d-4bda-b71e-5b32a68105de-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a9d2eabc-ab6d-4bda-b71e-5b32a68105de" (UID: "a9d2eabc-ab6d-4bda-b71e-5b32a68105de"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 06:56:20 crc kubenswrapper[4971]: I1127 06:56:20.854407 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a9d2eabc-ab6d-4bda-b71e-5b32a68105de-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:21 crc kubenswrapper[4971]: I1127 06:56:21.087961 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8jk6j"] Nov 27 06:56:21 crc kubenswrapper[4971]: I1127 06:56:21.090556 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-8jk6j"] Nov 27 06:56:21 crc kubenswrapper[4971]: I1127 06:56:21.477376 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-rnpmr" Nov 27 06:56:21 crc kubenswrapper[4971]: I1127 06:56:21.477433 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-rnpmr" Nov 27 06:56:21 crc kubenswrapper[4971]: I1127 06:56:21.565880 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-rnpmr" Nov 27 06:56:21 crc kubenswrapper[4971]: I1127 06:56:21.828676 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-bhc8q" Nov 27 06:56:21 crc kubenswrapper[4971]: I1127 06:56:21.828776 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-bhc8q" Nov 27 06:56:21 crc kubenswrapper[4971]: I1127 06:56:21.904065 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-bhc8q" Nov 27 06:56:21 crc kubenswrapper[4971]: I1127 06:56:21.991666 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-dvz2k" Nov 27 06:56:21 crc kubenswrapper[4971]: I1127 06:56:21.991982 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-dvz2k" Nov 27 06:56:22 crc kubenswrapper[4971]: I1127 06:56:22.026793 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-dvz2k" Nov 27 06:56:22 crc kubenswrapper[4971]: I1127 06:56:22.522191 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-dvz2k" Nov 27 06:56:22 crc kubenswrapper[4971]: I1127 06:56:22.523666 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-bhc8q" Nov 27 06:56:22 crc kubenswrapper[4971]: I1127 06:56:22.526289 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-rnpmr" Nov 27 06:56:22 crc kubenswrapper[4971]: I1127 06:56:22.557170 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9d2eabc-ab6d-4bda-b71e-5b32a68105de" path="/var/lib/kubelet/pods/a9d2eabc-ab6d-4bda-b71e-5b32a68105de/volumes" Nov 27 06:56:23 crc kubenswrapper[4971]: I1127 06:56:23.635050 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-qfvr4" Nov 27 06:56:23 crc kubenswrapper[4971]: I1127 06:56:23.987056 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bhc8q"] Nov 27 06:56:24 crc kubenswrapper[4971]: I1127 06:56:24.036456 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-tgs6w" Nov 27 06:56:24 crc kubenswrapper[4971]: I1127 06:56:24.488759 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-bhc8q" podUID="dda2cad1-cdbe-48f8-ba4c-3074565de24b" containerName="registry-server" containerID="cri-o://8d6e0bc042f00624923d90d28f208891f9ce5c16cec09fe34ef97619918c5525" gracePeriod=2 Nov 27 06:56:24 crc kubenswrapper[4971]: I1127 06:56:24.585017 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dvz2k"] Nov 27 06:56:25 crc kubenswrapper[4971]: I1127 06:56:25.493910 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-dvz2k" podUID="75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c" containerName="registry-server" containerID="cri-o://0f84d1e81fefdf69ea4f2c7fee6af85d4d21f9b6562b8da48d93379c4959ede0" gracePeriod=2 Nov 27 06:56:26 crc kubenswrapper[4971]: I1127 06:56:26.413392 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 06:56:26 crc kubenswrapper[4971]: I1127 06:56:26.413460 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 06:56:26 crc kubenswrapper[4971]: I1127 06:56:26.413517 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 06:56:26 crc kubenswrapper[4971]: I1127 06:56:26.414242 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 06:56:26 crc kubenswrapper[4971]: I1127 06:56:26.414327 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20" gracePeriod=600 Nov 27 06:56:26 crc kubenswrapper[4971]: I1127 06:56:26.986177 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tgs6w"] Nov 27 06:56:26 crc kubenswrapper[4971]: I1127 06:56:26.986469 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-tgs6w" podUID="e2a58f53-1c1f-48b1-82af-845bf9dd1d7e" containerName="registry-server" containerID="cri-o://991a6637fd7c721001a12797444e49f24fda324675d04497a2bffd8ddc0ffebc" gracePeriod=2 Nov 27 06:56:27 crc kubenswrapper[4971]: I1127 06:56:27.343384 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7cc94f8bd7-2qxbk"] Nov 27 06:56:27 crc kubenswrapper[4971]: I1127 06:56:27.343712 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-7cc94f8bd7-2qxbk" podUID="322ed97d-f0c9-4065-8c04-a80357e8783e" containerName="controller-manager" containerID="cri-o://df3360243e3ad3bfc212e21ab495bb2a8d2ea061ffaeb6d71ab14238cf7366bc" gracePeriod=30 Nov 27 06:56:27 crc kubenswrapper[4971]: I1127 06:56:27.353594 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-84f54dd585-zv5rp"] Nov 27 06:56:27 crc kubenswrapper[4971]: I1127 06:56:27.353829 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-84f54dd585-zv5rp" podUID="ac22a34d-29da-422e-b0cd-acdbb0db83d4" containerName="route-controller-manager" containerID="cri-o://33246aadd14b4765995f93b4fa8c237544ad590bbba6e6a37f78d8a26ba7c454" gracePeriod=30 Nov 27 06:56:27 crc kubenswrapper[4971]: I1127 06:56:27.504425 4971 generic.go:334] "Generic (PLEG): container finished" podID="75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c" containerID="0f84d1e81fefdf69ea4f2c7fee6af85d4d21f9b6562b8da48d93379c4959ede0" exitCode=0 Nov 27 06:56:27 crc kubenswrapper[4971]: I1127 06:56:27.504513 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dvz2k" event={"ID":"75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c","Type":"ContainerDied","Data":"0f84d1e81fefdf69ea4f2c7fee6af85d4d21f9b6562b8da48d93379c4959ede0"} Nov 27 06:56:27 crc kubenswrapper[4971]: I1127 06:56:27.506039 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20" exitCode=0 Nov 27 06:56:27 crc kubenswrapper[4971]: I1127 06:56:27.506115 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20"} Nov 27 06:56:27 crc kubenswrapper[4971]: I1127 06:56:27.507991 4971 generic.go:334] "Generic (PLEG): container finished" podID="dda2cad1-cdbe-48f8-ba4c-3074565de24b" containerID="8d6e0bc042f00624923d90d28f208891f9ce5c16cec09fe34ef97619918c5525" exitCode=0 Nov 27 06:56:27 crc kubenswrapper[4971]: I1127 06:56:27.508029 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bhc8q" event={"ID":"dda2cad1-cdbe-48f8-ba4c-3074565de24b","Type":"ContainerDied","Data":"8d6e0bc042f00624923d90d28f208891f9ce5c16cec09fe34ef97619918c5525"} Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.393001 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dvz2k" Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.437254 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bhc8q" Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.514506 4971 generic.go:334] "Generic (PLEG): container finished" podID="ac22a34d-29da-422e-b0cd-acdbb0db83d4" containerID="33246aadd14b4765995f93b4fa8c237544ad590bbba6e6a37f78d8a26ba7c454" exitCode=0 Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.514567 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-84f54dd585-zv5rp" event={"ID":"ac22a34d-29da-422e-b0cd-acdbb0db83d4","Type":"ContainerDied","Data":"33246aadd14b4765995f93b4fa8c237544ad590bbba6e6a37f78d8a26ba7c454"} Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.516729 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dvz2k" event={"ID":"75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c","Type":"ContainerDied","Data":"7bea4fa174c4f55b85f2bc92c420e7667c7cacd720a523a7e10036771dd42ca3"} Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.516782 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dvz2k" Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.516790 4971 scope.go:117] "RemoveContainer" containerID="0f84d1e81fefdf69ea4f2c7fee6af85d4d21f9b6562b8da48d93379c4959ede0" Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.520554 4971 generic.go:334] "Generic (PLEG): container finished" podID="e2a58f53-1c1f-48b1-82af-845bf9dd1d7e" containerID="991a6637fd7c721001a12797444e49f24fda324675d04497a2bffd8ddc0ffebc" exitCode=0 Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.520581 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tgs6w" event={"ID":"e2a58f53-1c1f-48b1-82af-845bf9dd1d7e","Type":"ContainerDied","Data":"991a6637fd7c721001a12797444e49f24fda324675d04497a2bffd8ddc0ffebc"} Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.522403 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bhc8q" event={"ID":"dda2cad1-cdbe-48f8-ba4c-3074565de24b","Type":"ContainerDied","Data":"219669685de829f3c01aa8f24f1893f9ee9b2a58d545816b0c09d0019ce0d04e"} Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.522444 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bhc8q" Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.523452 4971 generic.go:334] "Generic (PLEG): container finished" podID="322ed97d-f0c9-4065-8c04-a80357e8783e" containerID="df3360243e3ad3bfc212e21ab495bb2a8d2ea061ffaeb6d71ab14238cf7366bc" exitCode=0 Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.523476 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7cc94f8bd7-2qxbk" event={"ID":"322ed97d-f0c9-4065-8c04-a80357e8783e","Type":"ContainerDied","Data":"df3360243e3ad3bfc212e21ab495bb2a8d2ea061ffaeb6d71ab14238cf7366bc"} Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.535201 4971 scope.go:117] "RemoveContainer" containerID="bfa2fc50ce01d2b73600e107858fc7c2bda07d8ee17751907d6096e095d05eac" Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.549122 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c-catalog-content\") pod \"75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c\" (UID: \"75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c\") " Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.549162 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c-utilities\") pod \"75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c\" (UID: \"75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c\") " Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.549195 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dda2cad1-cdbe-48f8-ba4c-3074565de24b-catalog-content\") pod \"dda2cad1-cdbe-48f8-ba4c-3074565de24b\" (UID: \"dda2cad1-cdbe-48f8-ba4c-3074565de24b\") " Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.549227 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dda2cad1-cdbe-48f8-ba4c-3074565de24b-utilities\") pod \"dda2cad1-cdbe-48f8-ba4c-3074565de24b\" (UID: \"dda2cad1-cdbe-48f8-ba4c-3074565de24b\") " Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.549289 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k2cll\" (UniqueName: \"kubernetes.io/projected/dda2cad1-cdbe-48f8-ba4c-3074565de24b-kube-api-access-k2cll\") pod \"dda2cad1-cdbe-48f8-ba4c-3074565de24b\" (UID: \"dda2cad1-cdbe-48f8-ba4c-3074565de24b\") " Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.549323 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7vmhq\" (UniqueName: \"kubernetes.io/projected/75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c-kube-api-access-7vmhq\") pod \"75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c\" (UID: \"75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c\") " Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.553255 4971 scope.go:117] "RemoveContainer" containerID="29e7ceaaca90e154c851484122ef5b8c6e19cc9745538cb09b85a41163e48ecb" Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.553586 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c-utilities" (OuterVolumeSpecName: "utilities") pod "75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c" (UID: "75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.553943 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dda2cad1-cdbe-48f8-ba4c-3074565de24b-utilities" (OuterVolumeSpecName: "utilities") pod "dda2cad1-cdbe-48f8-ba4c-3074565de24b" (UID: "dda2cad1-cdbe-48f8-ba4c-3074565de24b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.556495 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c-kube-api-access-7vmhq" (OuterVolumeSpecName: "kube-api-access-7vmhq") pod "75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c" (UID: "75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c"). InnerVolumeSpecName "kube-api-access-7vmhq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.558152 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dda2cad1-cdbe-48f8-ba4c-3074565de24b-kube-api-access-k2cll" (OuterVolumeSpecName: "kube-api-access-k2cll") pod "dda2cad1-cdbe-48f8-ba4c-3074565de24b" (UID: "dda2cad1-cdbe-48f8-ba4c-3074565de24b"). InnerVolumeSpecName "kube-api-access-k2cll". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.604171 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c" (UID: "75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.614706 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dda2cad1-cdbe-48f8-ba4c-3074565de24b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dda2cad1-cdbe-48f8-ba4c-3074565de24b" (UID: "dda2cad1-cdbe-48f8-ba4c-3074565de24b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.623344 4971 scope.go:117] "RemoveContainer" containerID="8d6e0bc042f00624923d90d28f208891f9ce5c16cec09fe34ef97619918c5525" Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.649651 4971 scope.go:117] "RemoveContainer" containerID="21a5ce27585e5be59ed538abfd63d69c11d9404e70470caf9327b63f1cc0100b" Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.650148 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k2cll\" (UniqueName: \"kubernetes.io/projected/dda2cad1-cdbe-48f8-ba4c-3074565de24b-kube-api-access-k2cll\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.650178 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7vmhq\" (UniqueName: \"kubernetes.io/projected/75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c-kube-api-access-7vmhq\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.650188 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.650196 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.650206 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dda2cad1-cdbe-48f8-ba4c-3074565de24b-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.650214 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dda2cad1-cdbe-48f8-ba4c-3074565de24b-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.666671 4971 scope.go:117] "RemoveContainer" containerID="8b6d4d5ee6e80b6a51d1224965c7ebd63fb30bfee37288113df7011e7d937110" Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.851149 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dvz2k"] Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.852239 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-dvz2k"] Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.863523 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bhc8q"] Nov 27 06:56:28 crc kubenswrapper[4971]: I1127 06:56:28.872515 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-bhc8q"] Nov 27 06:56:29 crc kubenswrapper[4971]: I1127 06:56:29.607690 4971 patch_prober.go:28] interesting pod/controller-manager-7cc94f8bd7-2qxbk container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.56:8443/healthz\": dial tcp 10.217.0.56:8443: connect: connection refused" start-of-body= Nov 27 06:56:29 crc kubenswrapper[4971]: I1127 06:56:29.607748 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-7cc94f8bd7-2qxbk" podUID="322ed97d-f0c9-4065-8c04-a80357e8783e" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.56:8443/healthz\": dial tcp 10.217.0.56:8443: connect: connection refused" Nov 27 06:56:30 crc kubenswrapper[4971]: I1127 06:56:30.558631 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c" path="/var/lib/kubelet/pods/75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c/volumes" Nov 27 06:56:30 crc kubenswrapper[4971]: I1127 06:56:30.559774 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dda2cad1-cdbe-48f8-ba4c-3074565de24b" path="/var/lib/kubelet/pods/dda2cad1-cdbe-48f8-ba4c-3074565de24b/volumes" Nov 27 06:56:30 crc kubenswrapper[4971]: I1127 06:56:30.731366 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tgs6w" Nov 27 06:56:30 crc kubenswrapper[4971]: I1127 06:56:30.884853 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2a58f53-1c1f-48b1-82af-845bf9dd1d7e-utilities\") pod \"e2a58f53-1c1f-48b1-82af-845bf9dd1d7e\" (UID: \"e2a58f53-1c1f-48b1-82af-845bf9dd1d7e\") " Nov 27 06:56:30 crc kubenswrapper[4971]: I1127 06:56:30.884927 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2a58f53-1c1f-48b1-82af-845bf9dd1d7e-catalog-content\") pod \"e2a58f53-1c1f-48b1-82af-845bf9dd1d7e\" (UID: \"e2a58f53-1c1f-48b1-82af-845bf9dd1d7e\") " Nov 27 06:56:30 crc kubenswrapper[4971]: I1127 06:56:30.884971 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5pfz6\" (UniqueName: \"kubernetes.io/projected/e2a58f53-1c1f-48b1-82af-845bf9dd1d7e-kube-api-access-5pfz6\") pod \"e2a58f53-1c1f-48b1-82af-845bf9dd1d7e\" (UID: \"e2a58f53-1c1f-48b1-82af-845bf9dd1d7e\") " Nov 27 06:56:30 crc kubenswrapper[4971]: I1127 06:56:30.886173 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e2a58f53-1c1f-48b1-82af-845bf9dd1d7e-utilities" (OuterVolumeSpecName: "utilities") pod "e2a58f53-1c1f-48b1-82af-845bf9dd1d7e" (UID: "e2a58f53-1c1f-48b1-82af-845bf9dd1d7e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 06:56:30 crc kubenswrapper[4971]: I1127 06:56:30.891783 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2a58f53-1c1f-48b1-82af-845bf9dd1d7e-kube-api-access-5pfz6" (OuterVolumeSpecName: "kube-api-access-5pfz6") pod "e2a58f53-1c1f-48b1-82af-845bf9dd1d7e" (UID: "e2a58f53-1c1f-48b1-82af-845bf9dd1d7e"). InnerVolumeSpecName "kube-api-access-5pfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:56:30 crc kubenswrapper[4971]: I1127 06:56:30.900733 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e2a58f53-1c1f-48b1-82af-845bf9dd1d7e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e2a58f53-1c1f-48b1-82af-845bf9dd1d7e" (UID: "e2a58f53-1c1f-48b1-82af-845bf9dd1d7e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 06:56:30 crc kubenswrapper[4971]: I1127 06:56:30.986883 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2a58f53-1c1f-48b1-82af-845bf9dd1d7e-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:30 crc kubenswrapper[4971]: I1127 06:56:30.986916 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2a58f53-1c1f-48b1-82af-845bf9dd1d7e-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:30 crc kubenswrapper[4971]: I1127 06:56:30.986929 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5pfz6\" (UniqueName: \"kubernetes.io/projected/e2a58f53-1c1f-48b1-82af-845bf9dd1d7e-kube-api-access-5pfz6\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.328820 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-84f54dd585-zv5rp" Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.333081 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7cc94f8bd7-2qxbk" Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.494043 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/322ed97d-f0c9-4065-8c04-a80357e8783e-serving-cert\") pod \"322ed97d-f0c9-4065-8c04-a80357e8783e\" (UID: \"322ed97d-f0c9-4065-8c04-a80357e8783e\") " Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.494101 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-svjz6\" (UniqueName: \"kubernetes.io/projected/ac22a34d-29da-422e-b0cd-acdbb0db83d4-kube-api-access-svjz6\") pod \"ac22a34d-29da-422e-b0cd-acdbb0db83d4\" (UID: \"ac22a34d-29da-422e-b0cd-acdbb0db83d4\") " Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.494150 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4dccc\" (UniqueName: \"kubernetes.io/projected/322ed97d-f0c9-4065-8c04-a80357e8783e-kube-api-access-4dccc\") pod \"322ed97d-f0c9-4065-8c04-a80357e8783e\" (UID: \"322ed97d-f0c9-4065-8c04-a80357e8783e\") " Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.494178 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/322ed97d-f0c9-4065-8c04-a80357e8783e-config\") pod \"322ed97d-f0c9-4065-8c04-a80357e8783e\" (UID: \"322ed97d-f0c9-4065-8c04-a80357e8783e\") " Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.494235 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ac22a34d-29da-422e-b0cd-acdbb0db83d4-serving-cert\") pod \"ac22a34d-29da-422e-b0cd-acdbb0db83d4\" (UID: \"ac22a34d-29da-422e-b0cd-acdbb0db83d4\") " Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.494269 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/322ed97d-f0c9-4065-8c04-a80357e8783e-proxy-ca-bundles\") pod \"322ed97d-f0c9-4065-8c04-a80357e8783e\" (UID: \"322ed97d-f0c9-4065-8c04-a80357e8783e\") " Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.494296 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac22a34d-29da-422e-b0cd-acdbb0db83d4-config\") pod \"ac22a34d-29da-422e-b0cd-acdbb0db83d4\" (UID: \"ac22a34d-29da-422e-b0cd-acdbb0db83d4\") " Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.494328 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/322ed97d-f0c9-4065-8c04-a80357e8783e-client-ca\") pod \"322ed97d-f0c9-4065-8c04-a80357e8783e\" (UID: \"322ed97d-f0c9-4065-8c04-a80357e8783e\") " Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.494353 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ac22a34d-29da-422e-b0cd-acdbb0db83d4-client-ca\") pod \"ac22a34d-29da-422e-b0cd-acdbb0db83d4\" (UID: \"ac22a34d-29da-422e-b0cd-acdbb0db83d4\") " Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.495112 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac22a34d-29da-422e-b0cd-acdbb0db83d4-client-ca" (OuterVolumeSpecName: "client-ca") pod "ac22a34d-29da-422e-b0cd-acdbb0db83d4" (UID: "ac22a34d-29da-422e-b0cd-acdbb0db83d4"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.495384 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/322ed97d-f0c9-4065-8c04-a80357e8783e-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "322ed97d-f0c9-4065-8c04-a80357e8783e" (UID: "322ed97d-f0c9-4065-8c04-a80357e8783e"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.495730 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac22a34d-29da-422e-b0cd-acdbb0db83d4-config" (OuterVolumeSpecName: "config") pod "ac22a34d-29da-422e-b0cd-acdbb0db83d4" (UID: "ac22a34d-29da-422e-b0cd-acdbb0db83d4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.495960 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/322ed97d-f0c9-4065-8c04-a80357e8783e-client-ca" (OuterVolumeSpecName: "client-ca") pod "322ed97d-f0c9-4065-8c04-a80357e8783e" (UID: "322ed97d-f0c9-4065-8c04-a80357e8783e"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.496320 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/322ed97d-f0c9-4065-8c04-a80357e8783e-config" (OuterVolumeSpecName: "config") pod "322ed97d-f0c9-4065-8c04-a80357e8783e" (UID: "322ed97d-f0c9-4065-8c04-a80357e8783e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.498987 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac22a34d-29da-422e-b0cd-acdbb0db83d4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "ac22a34d-29da-422e-b0cd-acdbb0db83d4" (UID: "ac22a34d-29da-422e-b0cd-acdbb0db83d4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.499124 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac22a34d-29da-422e-b0cd-acdbb0db83d4-kube-api-access-svjz6" (OuterVolumeSpecName: "kube-api-access-svjz6") pod "ac22a34d-29da-422e-b0cd-acdbb0db83d4" (UID: "ac22a34d-29da-422e-b0cd-acdbb0db83d4"). InnerVolumeSpecName "kube-api-access-svjz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.499430 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/322ed97d-f0c9-4065-8c04-a80357e8783e-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "322ed97d-f0c9-4065-8c04-a80357e8783e" (UID: "322ed97d-f0c9-4065-8c04-a80357e8783e"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.501424 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/322ed97d-f0c9-4065-8c04-a80357e8783e-kube-api-access-4dccc" (OuterVolumeSpecName: "kube-api-access-4dccc") pod "322ed97d-f0c9-4065-8c04-a80357e8783e" (UID: "322ed97d-f0c9-4065-8c04-a80357e8783e"). InnerVolumeSpecName "kube-api-access-4dccc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.542960 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7cc94f8bd7-2qxbk" Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.542954 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7cc94f8bd7-2qxbk" event={"ID":"322ed97d-f0c9-4065-8c04-a80357e8783e","Type":"ContainerDied","Data":"a617492a814ec2e205579bafedb00e0380226e15089f8c4164a081016bd50e9c"} Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.543031 4971 scope.go:117] "RemoveContainer" containerID="df3360243e3ad3bfc212e21ab495bb2a8d2ea061ffaeb6d71ab14238cf7366bc" Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.545739 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-84f54dd585-zv5rp" Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.545749 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-84f54dd585-zv5rp" event={"ID":"ac22a34d-29da-422e-b0cd-acdbb0db83d4","Type":"ContainerDied","Data":"298dd3e16fb0f2d6efb8bf5290e7ecaa775270208352c95ca5c4cca4f7382b71"} Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.547721 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tgs6w" event={"ID":"e2a58f53-1c1f-48b1-82af-845bf9dd1d7e","Type":"ContainerDied","Data":"bf8e00ed0a12887cb28ff1a361a28e698dde50447d249f9f20b355f2106c2912"} Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.547802 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tgs6w" Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.554260 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"40313d1ab55e3e184ca9e9b1dba415d9d63c5d44213d0c1e944e6167764f0f6a"} Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.560738 4971 scope.go:117] "RemoveContainer" containerID="33246aadd14b4765995f93b4fa8c237544ad590bbba6e6a37f78d8a26ba7c454" Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.579222 4971 scope.go:117] "RemoveContainer" containerID="991a6637fd7c721001a12797444e49f24fda324675d04497a2bffd8ddc0ffebc" Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.601186 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-84f54dd585-zv5rp"] Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.601293 4971 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ac22a34d-29da-422e-b0cd-acdbb0db83d4-client-ca\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.601314 4971 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/322ed97d-f0c9-4065-8c04-a80357e8783e-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.601324 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-svjz6\" (UniqueName: \"kubernetes.io/projected/ac22a34d-29da-422e-b0cd-acdbb0db83d4-kube-api-access-svjz6\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.601333 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4dccc\" (UniqueName: \"kubernetes.io/projected/322ed97d-f0c9-4065-8c04-a80357e8783e-kube-api-access-4dccc\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.601343 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/322ed97d-f0c9-4065-8c04-a80357e8783e-config\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.601351 4971 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ac22a34d-29da-422e-b0cd-acdbb0db83d4-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.601359 4971 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/322ed97d-f0c9-4065-8c04-a80357e8783e-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.601367 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac22a34d-29da-422e-b0cd-acdbb0db83d4-config\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.601374 4971 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/322ed97d-f0c9-4065-8c04-a80357e8783e-client-ca\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.607954 4971 scope.go:117] "RemoveContainer" containerID="6c7887f4f79008fb38e69f64b462ef51474868f8f6e9e5a248260288e8b183a7" Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.617461 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-84f54dd585-zv5rp"] Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.630988 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tgs6w"] Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.639724 4971 scope.go:117] "RemoveContainer" containerID="881e29e578423ae2e0165bc117506c7e05bbb5979da589e919a29b683b74055b" Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.647790 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-tgs6w"] Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.675756 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7cc94f8bd7-2qxbk"] Nov 27 06:56:31 crc kubenswrapper[4971]: I1127 06:56:31.679240 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-7cc94f8bd7-2qxbk"] Nov 27 06:56:32 crc kubenswrapper[4971]: I1127 06:56:32.561522 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="322ed97d-f0c9-4065-8c04-a80357e8783e" path="/var/lib/kubelet/pods/322ed97d-f0c9-4065-8c04-a80357e8783e/volumes" Nov 27 06:56:32 crc kubenswrapper[4971]: I1127 06:56:32.562469 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac22a34d-29da-422e-b0cd-acdbb0db83d4" path="/var/lib/kubelet/pods/ac22a34d-29da-422e-b0cd-acdbb0db83d4/volumes" Nov 27 06:56:32 crc kubenswrapper[4971]: I1127 06:56:32.563035 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2a58f53-1c1f-48b1-82af-845bf9dd1d7e" path="/var/lib/kubelet/pods/e2a58f53-1c1f-48b1-82af-845bf9dd1d7e/volumes" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.649943 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7f75565bfb-hz8ph"] Nov 27 06:56:34 crc kubenswrapper[4971]: E1127 06:56:34.650667 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2a58f53-1c1f-48b1-82af-845bf9dd1d7e" containerName="extract-utilities" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.650696 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2a58f53-1c1f-48b1-82af-845bf9dd1d7e" containerName="extract-utilities" Nov 27 06:56:34 crc kubenswrapper[4971]: E1127 06:56:34.650773 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2a58f53-1c1f-48b1-82af-845bf9dd1d7e" containerName="extract-content" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.650796 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2a58f53-1c1f-48b1-82af-845bf9dd1d7e" containerName="extract-content" Nov 27 06:56:34 crc kubenswrapper[4971]: E1127 06:56:34.650823 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9d2eabc-ab6d-4bda-b71e-5b32a68105de" containerName="registry-server" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.650837 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9d2eabc-ab6d-4bda-b71e-5b32a68105de" containerName="registry-server" Nov 27 06:56:34 crc kubenswrapper[4971]: E1127 06:56:34.650866 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2a58f53-1c1f-48b1-82af-845bf9dd1d7e" containerName="registry-server" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.650887 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2a58f53-1c1f-48b1-82af-845bf9dd1d7e" containerName="registry-server" Nov 27 06:56:34 crc kubenswrapper[4971]: E1127 06:56:34.650913 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c" containerName="extract-utilities" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.650931 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c" containerName="extract-utilities" Nov 27 06:56:34 crc kubenswrapper[4971]: E1127 06:56:34.650955 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9d2eabc-ab6d-4bda-b71e-5b32a68105de" containerName="extract-content" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.650973 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9d2eabc-ab6d-4bda-b71e-5b32a68105de" containerName="extract-content" Nov 27 06:56:34 crc kubenswrapper[4971]: E1127 06:56:34.650994 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dda2cad1-cdbe-48f8-ba4c-3074565de24b" containerName="extract-content" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.651008 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="dda2cad1-cdbe-48f8-ba4c-3074565de24b" containerName="extract-content" Nov 27 06:56:34 crc kubenswrapper[4971]: E1127 06:56:34.651032 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac22a34d-29da-422e-b0cd-acdbb0db83d4" containerName="route-controller-manager" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.651049 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac22a34d-29da-422e-b0cd-acdbb0db83d4" containerName="route-controller-manager" Nov 27 06:56:34 crc kubenswrapper[4971]: E1127 06:56:34.651075 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dda2cad1-cdbe-48f8-ba4c-3074565de24b" containerName="extract-utilities" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.651092 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="dda2cad1-cdbe-48f8-ba4c-3074565de24b" containerName="extract-utilities" Nov 27 06:56:34 crc kubenswrapper[4971]: E1127 06:56:34.651116 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c" containerName="extract-content" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.651131 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c" containerName="extract-content" Nov 27 06:56:34 crc kubenswrapper[4971]: E1127 06:56:34.651146 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c" containerName="registry-server" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.651160 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c" containerName="registry-server" Nov 27 06:56:34 crc kubenswrapper[4971]: E1127 06:56:34.651178 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="322ed97d-f0c9-4065-8c04-a80357e8783e" containerName="controller-manager" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.651190 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="322ed97d-f0c9-4065-8c04-a80357e8783e" containerName="controller-manager" Nov 27 06:56:34 crc kubenswrapper[4971]: E1127 06:56:34.651206 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9d2eabc-ab6d-4bda-b71e-5b32a68105de" containerName="extract-utilities" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.651219 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9d2eabc-ab6d-4bda-b71e-5b32a68105de" containerName="extract-utilities" Nov 27 06:56:34 crc kubenswrapper[4971]: E1127 06:56:34.651238 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dda2cad1-cdbe-48f8-ba4c-3074565de24b" containerName="registry-server" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.651251 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="dda2cad1-cdbe-48f8-ba4c-3074565de24b" containerName="registry-server" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.651431 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2a58f53-1c1f-48b1-82af-845bf9dd1d7e" containerName="registry-server" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.651453 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="dda2cad1-cdbe-48f8-ba4c-3074565de24b" containerName="registry-server" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.651478 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="75ab6104-e49a-4ad9-adfa-d8b4b6aedd2c" containerName="registry-server" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.651498 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac22a34d-29da-422e-b0cd-acdbb0db83d4" containerName="route-controller-manager" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.651514 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="322ed97d-f0c9-4065-8c04-a80357e8783e" containerName="controller-manager" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.651564 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9d2eabc-ab6d-4bda-b71e-5b32a68105de" containerName="registry-server" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.652169 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7f75565bfb-hz8ph" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.654511 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.656025 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.656679 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.657020 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.657173 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.657379 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.658027 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-58b88c7968-46ssb"] Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.659055 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-58b88c7968-46ssb" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.661273 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.661416 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.661627 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.661746 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.661858 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.661985 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.664308 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7f75565bfb-hz8ph"] Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.666060 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.673814 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-58b88c7968-46ssb"] Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.746947 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e745c5f7-d950-4cf7-9d16-d5668e22f053-client-ca\") pod \"controller-manager-7f75565bfb-hz8ph\" (UID: \"e745c5f7-d950-4cf7-9d16-d5668e22f053\") " pod="openshift-controller-manager/controller-manager-7f75565bfb-hz8ph" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.746998 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/225c2e39-491f-4eb4-8f9c-67ec81c30ec0-serving-cert\") pod \"route-controller-manager-58b88c7968-46ssb\" (UID: \"225c2e39-491f-4eb4-8f9c-67ec81c30ec0\") " pod="openshift-route-controller-manager/route-controller-manager-58b88c7968-46ssb" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.747040 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c96nw\" (UniqueName: \"kubernetes.io/projected/e745c5f7-d950-4cf7-9d16-d5668e22f053-kube-api-access-c96nw\") pod \"controller-manager-7f75565bfb-hz8ph\" (UID: \"e745c5f7-d950-4cf7-9d16-d5668e22f053\") " pod="openshift-controller-manager/controller-manager-7f75565bfb-hz8ph" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.747076 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9952s\" (UniqueName: \"kubernetes.io/projected/225c2e39-491f-4eb4-8f9c-67ec81c30ec0-kube-api-access-9952s\") pod \"route-controller-manager-58b88c7968-46ssb\" (UID: \"225c2e39-491f-4eb4-8f9c-67ec81c30ec0\") " pod="openshift-route-controller-manager/route-controller-manager-58b88c7968-46ssb" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.747165 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/225c2e39-491f-4eb4-8f9c-67ec81c30ec0-client-ca\") pod \"route-controller-manager-58b88c7968-46ssb\" (UID: \"225c2e39-491f-4eb4-8f9c-67ec81c30ec0\") " pod="openshift-route-controller-manager/route-controller-manager-58b88c7968-46ssb" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.747203 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e745c5f7-d950-4cf7-9d16-d5668e22f053-config\") pod \"controller-manager-7f75565bfb-hz8ph\" (UID: \"e745c5f7-d950-4cf7-9d16-d5668e22f053\") " pod="openshift-controller-manager/controller-manager-7f75565bfb-hz8ph" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.747225 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/225c2e39-491f-4eb4-8f9c-67ec81c30ec0-config\") pod \"route-controller-manager-58b88c7968-46ssb\" (UID: \"225c2e39-491f-4eb4-8f9c-67ec81c30ec0\") " pod="openshift-route-controller-manager/route-controller-manager-58b88c7968-46ssb" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.747247 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e745c5f7-d950-4cf7-9d16-d5668e22f053-serving-cert\") pod \"controller-manager-7f75565bfb-hz8ph\" (UID: \"e745c5f7-d950-4cf7-9d16-d5668e22f053\") " pod="openshift-controller-manager/controller-manager-7f75565bfb-hz8ph" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.747261 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e745c5f7-d950-4cf7-9d16-d5668e22f053-proxy-ca-bundles\") pod \"controller-manager-7f75565bfb-hz8ph\" (UID: \"e745c5f7-d950-4cf7-9d16-d5668e22f053\") " pod="openshift-controller-manager/controller-manager-7f75565bfb-hz8ph" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.848486 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e745c5f7-d950-4cf7-9d16-d5668e22f053-client-ca\") pod \"controller-manager-7f75565bfb-hz8ph\" (UID: \"e745c5f7-d950-4cf7-9d16-d5668e22f053\") " pod="openshift-controller-manager/controller-manager-7f75565bfb-hz8ph" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.848555 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/225c2e39-491f-4eb4-8f9c-67ec81c30ec0-serving-cert\") pod \"route-controller-manager-58b88c7968-46ssb\" (UID: \"225c2e39-491f-4eb4-8f9c-67ec81c30ec0\") " pod="openshift-route-controller-manager/route-controller-manager-58b88c7968-46ssb" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.848579 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c96nw\" (UniqueName: \"kubernetes.io/projected/e745c5f7-d950-4cf7-9d16-d5668e22f053-kube-api-access-c96nw\") pod \"controller-manager-7f75565bfb-hz8ph\" (UID: \"e745c5f7-d950-4cf7-9d16-d5668e22f053\") " pod="openshift-controller-manager/controller-manager-7f75565bfb-hz8ph" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.848610 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9952s\" (UniqueName: \"kubernetes.io/projected/225c2e39-491f-4eb4-8f9c-67ec81c30ec0-kube-api-access-9952s\") pod \"route-controller-manager-58b88c7968-46ssb\" (UID: \"225c2e39-491f-4eb4-8f9c-67ec81c30ec0\") " pod="openshift-route-controller-manager/route-controller-manager-58b88c7968-46ssb" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.848669 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/225c2e39-491f-4eb4-8f9c-67ec81c30ec0-client-ca\") pod \"route-controller-manager-58b88c7968-46ssb\" (UID: \"225c2e39-491f-4eb4-8f9c-67ec81c30ec0\") " pod="openshift-route-controller-manager/route-controller-manager-58b88c7968-46ssb" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.848699 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e745c5f7-d950-4cf7-9d16-d5668e22f053-config\") pod \"controller-manager-7f75565bfb-hz8ph\" (UID: \"e745c5f7-d950-4cf7-9d16-d5668e22f053\") " pod="openshift-controller-manager/controller-manager-7f75565bfb-hz8ph" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.848723 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/225c2e39-491f-4eb4-8f9c-67ec81c30ec0-config\") pod \"route-controller-manager-58b88c7968-46ssb\" (UID: \"225c2e39-491f-4eb4-8f9c-67ec81c30ec0\") " pod="openshift-route-controller-manager/route-controller-manager-58b88c7968-46ssb" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.848754 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e745c5f7-d950-4cf7-9d16-d5668e22f053-serving-cert\") pod \"controller-manager-7f75565bfb-hz8ph\" (UID: \"e745c5f7-d950-4cf7-9d16-d5668e22f053\") " pod="openshift-controller-manager/controller-manager-7f75565bfb-hz8ph" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.848775 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e745c5f7-d950-4cf7-9d16-d5668e22f053-proxy-ca-bundles\") pod \"controller-manager-7f75565bfb-hz8ph\" (UID: \"e745c5f7-d950-4cf7-9d16-d5668e22f053\") " pod="openshift-controller-manager/controller-manager-7f75565bfb-hz8ph" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.849819 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e745c5f7-d950-4cf7-9d16-d5668e22f053-proxy-ca-bundles\") pod \"controller-manager-7f75565bfb-hz8ph\" (UID: \"e745c5f7-d950-4cf7-9d16-d5668e22f053\") " pod="openshift-controller-manager/controller-manager-7f75565bfb-hz8ph" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.850103 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e745c5f7-d950-4cf7-9d16-d5668e22f053-client-ca\") pod \"controller-manager-7f75565bfb-hz8ph\" (UID: \"e745c5f7-d950-4cf7-9d16-d5668e22f053\") " pod="openshift-controller-manager/controller-manager-7f75565bfb-hz8ph" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.850507 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e745c5f7-d950-4cf7-9d16-d5668e22f053-config\") pod \"controller-manager-7f75565bfb-hz8ph\" (UID: \"e745c5f7-d950-4cf7-9d16-d5668e22f053\") " pod="openshift-controller-manager/controller-manager-7f75565bfb-hz8ph" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.851711 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/225c2e39-491f-4eb4-8f9c-67ec81c30ec0-client-ca\") pod \"route-controller-manager-58b88c7968-46ssb\" (UID: \"225c2e39-491f-4eb4-8f9c-67ec81c30ec0\") " pod="openshift-route-controller-manager/route-controller-manager-58b88c7968-46ssb" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.852386 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/225c2e39-491f-4eb4-8f9c-67ec81c30ec0-config\") pod \"route-controller-manager-58b88c7968-46ssb\" (UID: \"225c2e39-491f-4eb4-8f9c-67ec81c30ec0\") " pod="openshift-route-controller-manager/route-controller-manager-58b88c7968-46ssb" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.859170 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/225c2e39-491f-4eb4-8f9c-67ec81c30ec0-serving-cert\") pod \"route-controller-manager-58b88c7968-46ssb\" (UID: \"225c2e39-491f-4eb4-8f9c-67ec81c30ec0\") " pod="openshift-route-controller-manager/route-controller-manager-58b88c7968-46ssb" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.859863 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e745c5f7-d950-4cf7-9d16-d5668e22f053-serving-cert\") pod \"controller-manager-7f75565bfb-hz8ph\" (UID: \"e745c5f7-d950-4cf7-9d16-d5668e22f053\") " pod="openshift-controller-manager/controller-manager-7f75565bfb-hz8ph" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.866010 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c96nw\" (UniqueName: \"kubernetes.io/projected/e745c5f7-d950-4cf7-9d16-d5668e22f053-kube-api-access-c96nw\") pod \"controller-manager-7f75565bfb-hz8ph\" (UID: \"e745c5f7-d950-4cf7-9d16-d5668e22f053\") " pod="openshift-controller-manager/controller-manager-7f75565bfb-hz8ph" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.868225 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9952s\" (UniqueName: \"kubernetes.io/projected/225c2e39-491f-4eb4-8f9c-67ec81c30ec0-kube-api-access-9952s\") pod \"route-controller-manager-58b88c7968-46ssb\" (UID: \"225c2e39-491f-4eb4-8f9c-67ec81c30ec0\") " pod="openshift-route-controller-manager/route-controller-manager-58b88c7968-46ssb" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.989799 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7f75565bfb-hz8ph" Nov 27 06:56:34 crc kubenswrapper[4971]: I1127 06:56:34.998447 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-58b88c7968-46ssb" Nov 27 06:56:35 crc kubenswrapper[4971]: I1127 06:56:35.381000 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-58b88c7968-46ssb"] Nov 27 06:56:35 crc kubenswrapper[4971]: I1127 06:56:35.384671 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7f75565bfb-hz8ph"] Nov 27 06:56:35 crc kubenswrapper[4971]: W1127 06:56:35.391130 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode745c5f7_d950_4cf7_9d16_d5668e22f053.slice/crio-149c90a58f96e125efe9ab5400556d20281067a75f25acd3b4fb847c047d0da5 WatchSource:0}: Error finding container 149c90a58f96e125efe9ab5400556d20281067a75f25acd3b4fb847c047d0da5: Status 404 returned error can't find the container with id 149c90a58f96e125efe9ab5400556d20281067a75f25acd3b4fb847c047d0da5 Nov 27 06:56:35 crc kubenswrapper[4971]: I1127 06:56:35.579950 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7f75565bfb-hz8ph" event={"ID":"e745c5f7-d950-4cf7-9d16-d5668e22f053","Type":"ContainerStarted","Data":"149c90a58f96e125efe9ab5400556d20281067a75f25acd3b4fb847c047d0da5"} Nov 27 06:56:35 crc kubenswrapper[4971]: I1127 06:56:35.581327 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-58b88c7968-46ssb" event={"ID":"225c2e39-491f-4eb4-8f9c-67ec81c30ec0","Type":"ContainerStarted","Data":"5c533fcded1254b232bef5e0de7616cf964e17412341eb20cbccbcd05f903f95"} Nov 27 06:56:35 crc kubenswrapper[4971]: I1127 06:56:35.581350 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-58b88c7968-46ssb" event={"ID":"225c2e39-491f-4eb4-8f9c-67ec81c30ec0","Type":"ContainerStarted","Data":"d418740976f870dc0f39df8886d13efbb38e5868e638a6e80c581a2a6cf418ac"} Nov 27 06:56:36 crc kubenswrapper[4971]: I1127 06:56:36.586933 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7f75565bfb-hz8ph" event={"ID":"e745c5f7-d950-4cf7-9d16-d5668e22f053","Type":"ContainerStarted","Data":"13c9adef55b88f4121a82d1bdd854f957e4be0a0ea232bc8302d376b4b16eaf8"} Nov 27 06:56:36 crc kubenswrapper[4971]: I1127 06:56:36.587250 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-58b88c7968-46ssb" Nov 27 06:56:36 crc kubenswrapper[4971]: I1127 06:56:36.587265 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7f75565bfb-hz8ph" Nov 27 06:56:36 crc kubenswrapper[4971]: I1127 06:56:36.594420 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-58b88c7968-46ssb" Nov 27 06:56:36 crc kubenswrapper[4971]: I1127 06:56:36.595228 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7f75565bfb-hz8ph" Nov 27 06:56:36 crc kubenswrapper[4971]: I1127 06:56:36.626486 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7f75565bfb-hz8ph" podStartSLOduration=9.626468338 podStartE2EDuration="9.626468338s" podCreationTimestamp="2025-11-27 06:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:56:36.608937028 +0000 UTC m=+234.800980946" watchObservedRunningTime="2025-11-27 06:56:36.626468338 +0000 UTC m=+234.818512256" Nov 27 06:56:36 crc kubenswrapper[4971]: I1127 06:56:36.627292 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-58b88c7968-46ssb" podStartSLOduration=9.627285644 podStartE2EDuration="9.627285644s" podCreationTimestamp="2025-11-27 06:56:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:56:36.622633566 +0000 UTC m=+234.814677504" watchObservedRunningTime="2025-11-27 06:56:36.627285644 +0000 UTC m=+234.819329562" Nov 27 06:56:36 crc kubenswrapper[4971]: I1127 06:56:36.885306 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" podUID="f268e2fa-798d-4438-9225-24c4a07ea999" containerName="oauth-openshift" containerID="cri-o://38490efdf6206f990845a6d39d721e3a5d0d5bd0f79f3e9993915aa041f8faaf" gracePeriod=15 Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.235836 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.377218 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-user-template-provider-selection\") pod \"f268e2fa-798d-4438-9225-24c4a07ea999\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.377255 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f268e2fa-798d-4438-9225-24c4a07ea999-audit-dir\") pod \"f268e2fa-798d-4438-9225-24c4a07ea999\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.377274 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-router-certs\") pod \"f268e2fa-798d-4438-9225-24c4a07ea999\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.377297 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-trusted-ca-bundle\") pod \"f268e2fa-798d-4438-9225-24c4a07ea999\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.377323 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-session\") pod \"f268e2fa-798d-4438-9225-24c4a07ea999\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.377340 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-user-idp-0-file-data\") pod \"f268e2fa-798d-4438-9225-24c4a07ea999\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.377365 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-cliconfig\") pod \"f268e2fa-798d-4438-9225-24c4a07ea999\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.377364 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f268e2fa-798d-4438-9225-24c4a07ea999-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f268e2fa-798d-4438-9225-24c4a07ea999" (UID: "f268e2fa-798d-4438-9225-24c4a07ea999"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.377406 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9q9qk\" (UniqueName: \"kubernetes.io/projected/f268e2fa-798d-4438-9225-24c4a07ea999-kube-api-access-9q9qk\") pod \"f268e2fa-798d-4438-9225-24c4a07ea999\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.377458 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f268e2fa-798d-4438-9225-24c4a07ea999-audit-policies\") pod \"f268e2fa-798d-4438-9225-24c4a07ea999\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.377485 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-serving-cert\") pod \"f268e2fa-798d-4438-9225-24c4a07ea999\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.377517 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-user-template-error\") pod \"f268e2fa-798d-4438-9225-24c4a07ea999\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.377562 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-ocp-branding-template\") pod \"f268e2fa-798d-4438-9225-24c4a07ea999\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.377596 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-user-template-login\") pod \"f268e2fa-798d-4438-9225-24c4a07ea999\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.377661 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-service-ca\") pod \"f268e2fa-798d-4438-9225-24c4a07ea999\" (UID: \"f268e2fa-798d-4438-9225-24c4a07ea999\") " Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.377897 4971 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f268e2fa-798d-4438-9225-24c4a07ea999-audit-dir\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.379584 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "f268e2fa-798d-4438-9225-24c4a07ea999" (UID: "f268e2fa-798d-4438-9225-24c4a07ea999"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.379776 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "f268e2fa-798d-4438-9225-24c4a07ea999" (UID: "f268e2fa-798d-4438-9225-24c4a07ea999"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.380100 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f268e2fa-798d-4438-9225-24c4a07ea999-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "f268e2fa-798d-4438-9225-24c4a07ea999" (UID: "f268e2fa-798d-4438-9225-24c4a07ea999"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.380178 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "f268e2fa-798d-4438-9225-24c4a07ea999" (UID: "f268e2fa-798d-4438-9225-24c4a07ea999"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.386075 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "f268e2fa-798d-4438-9225-24c4a07ea999" (UID: "f268e2fa-798d-4438-9225-24c4a07ea999"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.386143 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f268e2fa-798d-4438-9225-24c4a07ea999-kube-api-access-9q9qk" (OuterVolumeSpecName: "kube-api-access-9q9qk") pod "f268e2fa-798d-4438-9225-24c4a07ea999" (UID: "f268e2fa-798d-4438-9225-24c4a07ea999"). InnerVolumeSpecName "kube-api-access-9q9qk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.386359 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "f268e2fa-798d-4438-9225-24c4a07ea999" (UID: "f268e2fa-798d-4438-9225-24c4a07ea999"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.386446 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "f268e2fa-798d-4438-9225-24c4a07ea999" (UID: "f268e2fa-798d-4438-9225-24c4a07ea999"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.386713 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "f268e2fa-798d-4438-9225-24c4a07ea999" (UID: "f268e2fa-798d-4438-9225-24c4a07ea999"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.387101 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "f268e2fa-798d-4438-9225-24c4a07ea999" (UID: "f268e2fa-798d-4438-9225-24c4a07ea999"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.387138 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "f268e2fa-798d-4438-9225-24c4a07ea999" (UID: "f268e2fa-798d-4438-9225-24c4a07ea999"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.387338 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "f268e2fa-798d-4438-9225-24c4a07ea999" (UID: "f268e2fa-798d-4438-9225-24c4a07ea999"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.389679 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "f268e2fa-798d-4438-9225-24c4a07ea999" (UID: "f268e2fa-798d-4438-9225-24c4a07ea999"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.478505 4971 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.478563 4971 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.478578 4971 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.478589 4971 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.478599 4971 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.478609 4971 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.478619 4971 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.478628 4971 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.478647 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9q9qk\" (UniqueName: \"kubernetes.io/projected/f268e2fa-798d-4438-9225-24c4a07ea999-kube-api-access-9q9qk\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.478659 4971 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f268e2fa-798d-4438-9225-24c4a07ea999-audit-policies\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.478670 4971 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.478681 4971 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.478696 4971 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f268e2fa-798d-4438-9225-24c4a07ea999-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.599809 4971 generic.go:334] "Generic (PLEG): container finished" podID="f268e2fa-798d-4438-9225-24c4a07ea999" containerID="38490efdf6206f990845a6d39d721e3a5d0d5bd0f79f3e9993915aa041f8faaf" exitCode=0 Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.599875 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.599917 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" event={"ID":"f268e2fa-798d-4438-9225-24c4a07ea999","Type":"ContainerDied","Data":"38490efdf6206f990845a6d39d721e3a5d0d5bd0f79f3e9993915aa041f8faaf"} Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.599960 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-h6p78" event={"ID":"f268e2fa-798d-4438-9225-24c4a07ea999","Type":"ContainerDied","Data":"f3c7a695152dd73ff1f1887a8e142968a5fcdc5d0c0573b8a5c5473994381203"} Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.599985 4971 scope.go:117] "RemoveContainer" containerID="38490efdf6206f990845a6d39d721e3a5d0d5bd0f79f3e9993915aa041f8faaf" Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.623227 4971 scope.go:117] "RemoveContainer" containerID="38490efdf6206f990845a6d39d721e3a5d0d5bd0f79f3e9993915aa041f8faaf" Nov 27 06:56:37 crc kubenswrapper[4971]: E1127 06:56:37.623737 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38490efdf6206f990845a6d39d721e3a5d0d5bd0f79f3e9993915aa041f8faaf\": container with ID starting with 38490efdf6206f990845a6d39d721e3a5d0d5bd0f79f3e9993915aa041f8faaf not found: ID does not exist" containerID="38490efdf6206f990845a6d39d721e3a5d0d5bd0f79f3e9993915aa041f8faaf" Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.623787 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38490efdf6206f990845a6d39d721e3a5d0d5bd0f79f3e9993915aa041f8faaf"} err="failed to get container status \"38490efdf6206f990845a6d39d721e3a5d0d5bd0f79f3e9993915aa041f8faaf\": rpc error: code = NotFound desc = could not find container \"38490efdf6206f990845a6d39d721e3a5d0d5bd0f79f3e9993915aa041f8faaf\": container with ID starting with 38490efdf6206f990845a6d39d721e3a5d0d5bd0f79f3e9993915aa041f8faaf not found: ID does not exist" Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.640823 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-h6p78"] Nov 27 06:56:37 crc kubenswrapper[4971]: I1127 06:56:37.644671 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-h6p78"] Nov 27 06:56:38 crc kubenswrapper[4971]: I1127 06:56:38.558090 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f268e2fa-798d-4438-9225-24c4a07ea999" path="/var/lib/kubelet/pods/f268e2fa-798d-4438-9225-24c4a07ea999/volumes" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.651432 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw"] Nov 27 06:56:40 crc kubenswrapper[4971]: E1127 06:56:40.651751 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f268e2fa-798d-4438-9225-24c4a07ea999" containerName="oauth-openshift" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.651775 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="f268e2fa-798d-4438-9225-24c4a07ea999" containerName="oauth-openshift" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.651973 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="f268e2fa-798d-4438-9225-24c4a07ea999" containerName="oauth-openshift" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.652513 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.656791 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.656823 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.656850 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.657267 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.657562 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.657715 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.658308 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.658855 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.659599 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.659652 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.660627 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.661276 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.670286 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.670489 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw"] Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.674651 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.685015 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.821557 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6df51c47-dfe6-4996-874e-0766a1bd4232-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.821615 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6df51c47-dfe6-4996-874e-0766a1bd4232-v4-0-config-user-template-login\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.821641 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9txz6\" (UniqueName: \"kubernetes.io/projected/6df51c47-dfe6-4996-874e-0766a1bd4232-kube-api-access-9txz6\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.821671 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6df51c47-dfe6-4996-874e-0766a1bd4232-v4-0-config-system-cliconfig\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.821691 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6df51c47-dfe6-4996-874e-0766a1bd4232-audit-policies\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.821714 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6df51c47-dfe6-4996-874e-0766a1bd4232-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.821731 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6df51c47-dfe6-4996-874e-0766a1bd4232-v4-0-config-user-template-error\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.821766 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6df51c47-dfe6-4996-874e-0766a1bd4232-audit-dir\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.821794 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6df51c47-dfe6-4996-874e-0766a1bd4232-v4-0-config-system-router-certs\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.821816 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6df51c47-dfe6-4996-874e-0766a1bd4232-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.821835 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6df51c47-dfe6-4996-874e-0766a1bd4232-v4-0-config-system-service-ca\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.821851 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6df51c47-dfe6-4996-874e-0766a1bd4232-v4-0-config-system-session\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.821872 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6df51c47-dfe6-4996-874e-0766a1bd4232-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.821896 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6df51c47-dfe6-4996-874e-0766a1bd4232-v4-0-config-system-serving-cert\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.923077 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6df51c47-dfe6-4996-874e-0766a1bd4232-v4-0-config-system-service-ca\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.923112 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6df51c47-dfe6-4996-874e-0766a1bd4232-v4-0-config-system-session\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.923138 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6df51c47-dfe6-4996-874e-0766a1bd4232-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.923160 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6df51c47-dfe6-4996-874e-0766a1bd4232-v4-0-config-system-serving-cert\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.923180 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6df51c47-dfe6-4996-874e-0766a1bd4232-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.923204 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6df51c47-dfe6-4996-874e-0766a1bd4232-v4-0-config-user-template-login\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.923225 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9txz6\" (UniqueName: \"kubernetes.io/projected/6df51c47-dfe6-4996-874e-0766a1bd4232-kube-api-access-9txz6\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.923247 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6df51c47-dfe6-4996-874e-0766a1bd4232-v4-0-config-system-cliconfig\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.923829 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6df51c47-dfe6-4996-874e-0766a1bd4232-audit-policies\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.923914 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6df51c47-dfe6-4996-874e-0766a1bd4232-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.923968 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6df51c47-dfe6-4996-874e-0766a1bd4232-v4-0-config-user-template-error\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.923982 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6df51c47-dfe6-4996-874e-0766a1bd4232-v4-0-config-system-cliconfig\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.924008 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6df51c47-dfe6-4996-874e-0766a1bd4232-audit-dir\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.924013 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6df51c47-dfe6-4996-874e-0766a1bd4232-v4-0-config-system-service-ca\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.924091 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6df51c47-dfe6-4996-874e-0766a1bd4232-v4-0-config-system-router-certs\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.924117 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6df51c47-dfe6-4996-874e-0766a1bd4232-audit-dir\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.924125 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6df51c47-dfe6-4996-874e-0766a1bd4232-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.924453 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6df51c47-dfe6-4996-874e-0766a1bd4232-audit-policies\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.924847 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6df51c47-dfe6-4996-874e-0766a1bd4232-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.928606 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6df51c47-dfe6-4996-874e-0766a1bd4232-v4-0-config-user-template-error\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.928828 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6df51c47-dfe6-4996-874e-0766a1bd4232-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.929054 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6df51c47-dfe6-4996-874e-0766a1bd4232-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.929391 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6df51c47-dfe6-4996-874e-0766a1bd4232-v4-0-config-system-serving-cert\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.929797 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6df51c47-dfe6-4996-874e-0766a1bd4232-v4-0-config-user-template-login\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.930044 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6df51c47-dfe6-4996-874e-0766a1bd4232-v4-0-config-system-router-certs\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.930240 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6df51c47-dfe6-4996-874e-0766a1bd4232-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.934257 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6df51c47-dfe6-4996-874e-0766a1bd4232-v4-0-config-system-session\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.944647 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9txz6\" (UniqueName: \"kubernetes.io/projected/6df51c47-dfe6-4996-874e-0766a1bd4232-kube-api-access-9txz6\") pod \"oauth-openshift-f94d7b7c5-z7ttw\" (UID: \"6df51c47-dfe6-4996-874e-0766a1bd4232\") " pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:40 crc kubenswrapper[4971]: I1127 06:56:40.987499 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:41 crc kubenswrapper[4971]: I1127 06:56:41.376125 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw"] Nov 27 06:56:41 crc kubenswrapper[4971]: W1127 06:56:41.381604 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6df51c47_dfe6_4996_874e_0766a1bd4232.slice/crio-bd240793b326a4df91c7f54e79fe60a79c6ba7cd4180308d8b7361b669ba89b9 WatchSource:0}: Error finding container bd240793b326a4df91c7f54e79fe60a79c6ba7cd4180308d8b7361b669ba89b9: Status 404 returned error can't find the container with id bd240793b326a4df91c7f54e79fe60a79c6ba7cd4180308d8b7361b669ba89b9 Nov 27 06:56:41 crc kubenswrapper[4971]: I1127 06:56:41.623923 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" event={"ID":"6df51c47-dfe6-4996-874e-0766a1bd4232","Type":"ContainerStarted","Data":"bd240793b326a4df91c7f54e79fe60a79c6ba7cd4180308d8b7361b669ba89b9"} Nov 27 06:56:42 crc kubenswrapper[4971]: I1127 06:56:42.629442 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" event={"ID":"6df51c47-dfe6-4996-874e-0766a1bd4232","Type":"ContainerStarted","Data":"29e5af3cdca7c971d87d02a2e2173594456470dbbad756d98938bc9eb74e7f46"} Nov 27 06:56:42 crc kubenswrapper[4971]: I1127 06:56:42.629788 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:42 crc kubenswrapper[4971]: I1127 06:56:42.634946 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" Nov 27 06:56:42 crc kubenswrapper[4971]: I1127 06:56:42.651440 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" podStartSLOduration=31.651420081 podStartE2EDuration="31.651420081s" podCreationTimestamp="2025-11-27 06:56:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:56:42.647724913 +0000 UTC m=+240.839768841" watchObservedRunningTime="2025-11-27 06:56:42.651420081 +0000 UTC m=+240.843464019" Nov 27 06:56:43 crc kubenswrapper[4971]: I1127 06:56:43.789429 4971 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Nov 27 06:56:43 crc kubenswrapper[4971]: I1127 06:56:43.791012 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 27 06:56:43 crc kubenswrapper[4971]: I1127 06:56:43.791522 4971 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Nov 27 06:56:43 crc kubenswrapper[4971]: I1127 06:56:43.792254 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3" gracePeriod=15 Nov 27 06:56:43 crc kubenswrapper[4971]: I1127 06:56:43.792331 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4" gracePeriod=15 Nov 27 06:56:43 crc kubenswrapper[4971]: I1127 06:56:43.792277 4971 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Nov 27 06:56:43 crc kubenswrapper[4971]: I1127 06:56:43.792349 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1" gracePeriod=15 Nov 27 06:56:43 crc kubenswrapper[4971]: I1127 06:56:43.792421 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c" gracePeriod=15 Nov 27 06:56:43 crc kubenswrapper[4971]: I1127 06:56:43.792335 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af" gracePeriod=15 Nov 27 06:56:43 crc kubenswrapper[4971]: E1127 06:56:43.794950 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Nov 27 06:56:43 crc kubenswrapper[4971]: I1127 06:56:43.794983 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Nov 27 06:56:43 crc kubenswrapper[4971]: E1127 06:56:43.795007 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Nov 27 06:56:43 crc kubenswrapper[4971]: I1127 06:56:43.795020 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Nov 27 06:56:43 crc kubenswrapper[4971]: E1127 06:56:43.795045 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Nov 27 06:56:43 crc kubenswrapper[4971]: I1127 06:56:43.795056 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Nov 27 06:56:43 crc kubenswrapper[4971]: E1127 06:56:43.795079 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Nov 27 06:56:43 crc kubenswrapper[4971]: I1127 06:56:43.795091 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Nov 27 06:56:43 crc kubenswrapper[4971]: E1127 06:56:43.795115 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Nov 27 06:56:43 crc kubenswrapper[4971]: I1127 06:56:43.795127 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Nov 27 06:56:43 crc kubenswrapper[4971]: E1127 06:56:43.795154 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Nov 27 06:56:43 crc kubenswrapper[4971]: I1127 06:56:43.795173 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Nov 27 06:56:43 crc kubenswrapper[4971]: E1127 06:56:43.795279 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Nov 27 06:56:43 crc kubenswrapper[4971]: I1127 06:56:43.795291 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Nov 27 06:56:43 crc kubenswrapper[4971]: I1127 06:56:43.797250 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Nov 27 06:56:43 crc kubenswrapper[4971]: I1127 06:56:43.797298 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Nov 27 06:56:43 crc kubenswrapper[4971]: I1127 06:56:43.797327 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Nov 27 06:56:43 crc kubenswrapper[4971]: I1127 06:56:43.797344 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Nov 27 06:56:43 crc kubenswrapper[4971]: I1127 06:56:43.797355 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Nov 27 06:56:43 crc kubenswrapper[4971]: I1127 06:56:43.797377 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Nov 27 06:56:43 crc kubenswrapper[4971]: E1127 06:56:43.846595 4971 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.50:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 27 06:56:43 crc kubenswrapper[4971]: I1127 06:56:43.960720 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:56:43 crc kubenswrapper[4971]: I1127 06:56:43.960765 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 27 06:56:43 crc kubenswrapper[4971]: I1127 06:56:43.960790 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 27 06:56:43 crc kubenswrapper[4971]: I1127 06:56:43.960850 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 27 06:56:43 crc kubenswrapper[4971]: I1127 06:56:43.960889 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:56:43 crc kubenswrapper[4971]: I1127 06:56:43.960924 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 27 06:56:43 crc kubenswrapper[4971]: I1127 06:56:43.960949 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:56:43 crc kubenswrapper[4971]: I1127 06:56:43.961041 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 27 06:56:44 crc kubenswrapper[4971]: I1127 06:56:44.062752 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 27 06:56:44 crc kubenswrapper[4971]: I1127 06:56:44.062828 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 27 06:56:44 crc kubenswrapper[4971]: I1127 06:56:44.062932 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:56:44 crc kubenswrapper[4971]: I1127 06:56:44.063034 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:56:44 crc kubenswrapper[4971]: I1127 06:56:44.063053 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 27 06:56:44 crc kubenswrapper[4971]: I1127 06:56:44.063083 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:56:44 crc kubenswrapper[4971]: I1127 06:56:44.063126 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 27 06:56:44 crc kubenswrapper[4971]: I1127 06:56:44.063170 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:56:44 crc kubenswrapper[4971]: I1127 06:56:44.063244 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 27 06:56:44 crc kubenswrapper[4971]: I1127 06:56:44.063328 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:56:44 crc kubenswrapper[4971]: I1127 06:56:44.063373 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 27 06:56:44 crc kubenswrapper[4971]: I1127 06:56:44.063416 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 27 06:56:44 crc kubenswrapper[4971]: I1127 06:56:44.063420 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:56:44 crc kubenswrapper[4971]: I1127 06:56:44.063387 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 27 06:56:44 crc kubenswrapper[4971]: I1127 06:56:44.063487 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 27 06:56:44 crc kubenswrapper[4971]: I1127 06:56:44.063520 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 27 06:56:44 crc kubenswrapper[4971]: I1127 06:56:44.147728 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 27 06:56:44 crc kubenswrapper[4971]: W1127 06:56:44.173955 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-73048943136b1e2359d356380d49995ceeef4c4732c6ee6a942a68da608d536a WatchSource:0}: Error finding container 73048943136b1e2359d356380d49995ceeef4c4732c6ee6a942a68da608d536a: Status 404 returned error can't find the container with id 73048943136b1e2359d356380d49995ceeef4c4732c6ee6a942a68da608d536a Nov 27 06:56:44 crc kubenswrapper[4971]: E1127 06:56:44.178591 4971 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.50:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187bcabfcd52466c openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-11-27 06:56:44.1779667 +0000 UTC m=+242.370010658,LastTimestamp:2025-11-27 06:56:44.1779667 +0000 UTC m=+242.370010658,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Nov 27 06:56:44 crc kubenswrapper[4971]: I1127 06:56:44.642272 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"a7aa3dfad75690eeaeae812838ab898bfecae62227727f5bb58a697f249a2c66"} Nov 27 06:56:44 crc kubenswrapper[4971]: I1127 06:56:44.642618 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"73048943136b1e2359d356380d49995ceeef4c4732c6ee6a942a68da608d536a"} Nov 27 06:56:44 crc kubenswrapper[4971]: E1127 06:56:44.643585 4971 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.50:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 27 06:56:44 crc kubenswrapper[4971]: I1127 06:56:44.646511 4971 generic.go:334] "Generic (PLEG): container finished" podID="91ba58ad-1ec1-4990-a641-0e8a70e8c8c5" containerID="59a1b9085bf8e6a260c4229fe13d634c01f84f622007b0e5fde8634639e62e97" exitCode=0 Nov 27 06:56:44 crc kubenswrapper[4971]: I1127 06:56:44.646594 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"91ba58ad-1ec1-4990-a641-0e8a70e8c8c5","Type":"ContainerDied","Data":"59a1b9085bf8e6a260c4229fe13d634c01f84f622007b0e5fde8634639e62e97"} Nov 27 06:56:44 crc kubenswrapper[4971]: I1127 06:56:44.647233 4971 status_manager.go:851] "Failed to get status for pod" podUID="91ba58ad-1ec1-4990-a641-0e8a70e8c8c5" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.50:6443: connect: connection refused" Nov 27 06:56:44 crc kubenswrapper[4971]: I1127 06:56:44.648855 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Nov 27 06:56:44 crc kubenswrapper[4971]: I1127 06:56:44.650338 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Nov 27 06:56:44 crc kubenswrapper[4971]: I1127 06:56:44.651229 4971 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af" exitCode=0 Nov 27 06:56:44 crc kubenswrapper[4971]: I1127 06:56:44.651251 4971 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c" exitCode=0 Nov 27 06:56:44 crc kubenswrapper[4971]: I1127 06:56:44.651259 4971 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4" exitCode=0 Nov 27 06:56:44 crc kubenswrapper[4971]: I1127 06:56:44.651267 4971 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1" exitCode=2 Nov 27 06:56:44 crc kubenswrapper[4971]: I1127 06:56:44.651336 4971 scope.go:117] "RemoveContainer" containerID="fd3521e171647cb95d139f2863cc1c533eae6f53bd5ea9d1940118b70cac0f54" Nov 27 06:56:45 crc kubenswrapper[4971]: E1127 06:56:45.592173 4971 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.50:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" volumeName="registry-storage" Nov 27 06:56:45 crc kubenswrapper[4971]: I1127 06:56:45.658430 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.103509 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.104622 4971 status_manager.go:851] "Failed to get status for pod" podUID="91ba58ad-1ec1-4990-a641-0e8a70e8c8c5" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.50:6443: connect: connection refused" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.109908 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.110666 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.111199 4971 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.50:6443: connect: connection refused" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.111516 4971 status_manager.go:851] "Failed to get status for pod" podUID="91ba58ad-1ec1-4990-a641-0e8a70e8c8c5" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.50:6443: connect: connection refused" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.290400 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/91ba58ad-1ec1-4990-a641-0e8a70e8c8c5-kube-api-access\") pod \"91ba58ad-1ec1-4990-a641-0e8a70e8c8c5\" (UID: \"91ba58ad-1ec1-4990-a641-0e8a70e8c8c5\") " Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.290453 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.290485 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.290553 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.290592 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/91ba58ad-1ec1-4990-a641-0e8a70e8c8c5-var-lock\") pod \"91ba58ad-1ec1-4990-a641-0e8a70e8c8c5\" (UID: \"91ba58ad-1ec1-4990-a641-0e8a70e8c8c5\") " Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.290610 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/91ba58ad-1ec1-4990-a641-0e8a70e8c8c5-kubelet-dir\") pod \"91ba58ad-1ec1-4990-a641-0e8a70e8c8c5\" (UID: \"91ba58ad-1ec1-4990-a641-0e8a70e8c8c5\") " Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.290638 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.290685 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.290734 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.290754 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/91ba58ad-1ec1-4990-a641-0e8a70e8c8c5-var-lock" (OuterVolumeSpecName: "var-lock") pod "91ba58ad-1ec1-4990-a641-0e8a70e8c8c5" (UID: "91ba58ad-1ec1-4990-a641-0e8a70e8c8c5"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.290781 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/91ba58ad-1ec1-4990-a641-0e8a70e8c8c5-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "91ba58ad-1ec1-4990-a641-0e8a70e8c8c5" (UID: "91ba58ad-1ec1-4990-a641-0e8a70e8c8c5"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.291065 4971 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.291088 4971 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.291100 4971 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.291111 4971 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/91ba58ad-1ec1-4990-a641-0e8a70e8c8c5-var-lock\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.291123 4971 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/91ba58ad-1ec1-4990-a641-0e8a70e8c8c5-kubelet-dir\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.297338 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91ba58ad-1ec1-4990-a641-0e8a70e8c8c5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "91ba58ad-1ec1-4990-a641-0e8a70e8c8c5" (UID: "91ba58ad-1ec1-4990-a641-0e8a70e8c8c5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.392490 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/91ba58ad-1ec1-4990-a641-0e8a70e8c8c5-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.560762 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.667216 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"91ba58ad-1ec1-4990-a641-0e8a70e8c8c5","Type":"ContainerDied","Data":"8fbdb45b8db07e3715be5614d409c82d275da7d64ff19e11040475194ce619fe"} Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.667338 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8fbdb45b8db07e3715be5614d409c82d275da7d64ff19e11040475194ce619fe" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.667398 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.669720 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.670598 4971 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3" exitCode=0 Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.670637 4971 scope.go:117] "RemoveContainer" containerID="eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.670743 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.671272 4971 status_manager.go:851] "Failed to get status for pod" podUID="91ba58ad-1ec1-4990-a641-0e8a70e8c8c5" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.50:6443: connect: connection refused" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.671582 4971 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.50:6443: connect: connection refused" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.672109 4971 status_manager.go:851] "Failed to get status for pod" podUID="91ba58ad-1ec1-4990-a641-0e8a70e8c8c5" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.50:6443: connect: connection refused" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.672987 4971 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.50:6443: connect: connection refused" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.674223 4971 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.50:6443: connect: connection refused" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.674410 4971 status_manager.go:851] "Failed to get status for pod" podUID="91ba58ad-1ec1-4990-a641-0e8a70e8c8c5" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.50:6443: connect: connection refused" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.684081 4971 scope.go:117] "RemoveContainer" containerID="dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.695412 4971 scope.go:117] "RemoveContainer" containerID="08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.707440 4971 scope.go:117] "RemoveContainer" containerID="d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.719846 4971 scope.go:117] "RemoveContainer" containerID="d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.737281 4971 scope.go:117] "RemoveContainer" containerID="09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.752698 4971 scope.go:117] "RemoveContainer" containerID="eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af" Nov 27 06:56:46 crc kubenswrapper[4971]: E1127 06:56:46.753037 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af\": container with ID starting with eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af not found: ID does not exist" containerID="eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.753093 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af"} err="failed to get container status \"eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af\": rpc error: code = NotFound desc = could not find container \"eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af\": container with ID starting with eb427d616fd3eb9fd6ab93a81ae6b7921f2871e40402aa4e6cca432940e624af not found: ID does not exist" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.753113 4971 scope.go:117] "RemoveContainer" containerID="dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c" Nov 27 06:56:46 crc kubenswrapper[4971]: E1127 06:56:46.753768 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\": container with ID starting with dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c not found: ID does not exist" containerID="dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.753800 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c"} err="failed to get container status \"dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\": rpc error: code = NotFound desc = could not find container \"dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c\": container with ID starting with dd1496a116b3a4708d40994658750525af6ce9b7aa317b8a0d9309e35850c30c not found: ID does not exist" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.753819 4971 scope.go:117] "RemoveContainer" containerID="08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4" Nov 27 06:56:46 crc kubenswrapper[4971]: E1127 06:56:46.754130 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\": container with ID starting with 08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4 not found: ID does not exist" containerID="08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.754159 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4"} err="failed to get container status \"08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\": rpc error: code = NotFound desc = could not find container \"08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4\": container with ID starting with 08bbb79b38a46b0730b712e7a20649b1471d1b2bed23a814da49343cc69799b4 not found: ID does not exist" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.754172 4971 scope.go:117] "RemoveContainer" containerID="d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1" Nov 27 06:56:46 crc kubenswrapper[4971]: E1127 06:56:46.754512 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\": container with ID starting with d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1 not found: ID does not exist" containerID="d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.754560 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1"} err="failed to get container status \"d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\": rpc error: code = NotFound desc = could not find container \"d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1\": container with ID starting with d4f1aa71a3309b32f9798e4be1f9400a21a7e3a71807158c1ecef5bf867af9d1 not found: ID does not exist" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.754579 4971 scope.go:117] "RemoveContainer" containerID="d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3" Nov 27 06:56:46 crc kubenswrapper[4971]: E1127 06:56:46.754889 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\": container with ID starting with d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3 not found: ID does not exist" containerID="d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.754922 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3"} err="failed to get container status \"d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\": rpc error: code = NotFound desc = could not find container \"d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3\": container with ID starting with d06f4f5426c5008eac7ae43fa8198aac47d8816c560e4819a258b3235c425cd3 not found: ID does not exist" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.754943 4971 scope.go:117] "RemoveContainer" containerID="09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435" Nov 27 06:56:46 crc kubenswrapper[4971]: E1127 06:56:46.755233 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\": container with ID starting with 09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435 not found: ID does not exist" containerID="09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435" Nov 27 06:56:46 crc kubenswrapper[4971]: I1127 06:56:46.755261 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435"} err="failed to get container status \"09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\": rpc error: code = NotFound desc = could not find container \"09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435\": container with ID starting with 09dec89a9077168c0a23666618b6195d0aa6f03a0c8dc706e0f0d83da1ac1435 not found: ID does not exist" Nov 27 06:56:46 crc kubenswrapper[4971]: E1127 06:56:46.774265 4971 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.50:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187bcabfcd52466c openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-11-27 06:56:44.1779667 +0000 UTC m=+242.370010658,LastTimestamp:2025-11-27 06:56:44.1779667 +0000 UTC m=+242.370010658,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Nov 27 06:56:51 crc kubenswrapper[4971]: E1127 06:56:51.496150 4971 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.50:6443: connect: connection refused" Nov 27 06:56:51 crc kubenswrapper[4971]: E1127 06:56:51.496918 4971 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.50:6443: connect: connection refused" Nov 27 06:56:51 crc kubenswrapper[4971]: E1127 06:56:51.497395 4971 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.50:6443: connect: connection refused" Nov 27 06:56:51 crc kubenswrapper[4971]: E1127 06:56:51.498650 4971 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.50:6443: connect: connection refused" Nov 27 06:56:51 crc kubenswrapper[4971]: E1127 06:56:51.499121 4971 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.50:6443: connect: connection refused" Nov 27 06:56:51 crc kubenswrapper[4971]: I1127 06:56:51.499158 4971 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Nov 27 06:56:51 crc kubenswrapper[4971]: E1127 06:56:51.499568 4971 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.50:6443: connect: connection refused" interval="200ms" Nov 27 06:56:51 crc kubenswrapper[4971]: E1127 06:56:51.700781 4971 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.50:6443: connect: connection refused" interval="400ms" Nov 27 06:56:52 crc kubenswrapper[4971]: E1127 06:56:52.101965 4971 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.50:6443: connect: connection refused" interval="800ms" Nov 27 06:56:52 crc kubenswrapper[4971]: I1127 06:56:52.552344 4971 status_manager.go:851] "Failed to get status for pod" podUID="91ba58ad-1ec1-4990-a641-0e8a70e8c8c5" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.50:6443: connect: connection refused" Nov 27 06:56:52 crc kubenswrapper[4971]: E1127 06:56:52.903603 4971 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.50:6443: connect: connection refused" interval="1.6s" Nov 27 06:56:54 crc kubenswrapper[4971]: E1127 06:56:54.505354 4971 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.50:6443: connect: connection refused" interval="3.2s" Nov 27 06:56:56 crc kubenswrapper[4971]: I1127 06:56:56.549811 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:56:56 crc kubenswrapper[4971]: I1127 06:56:56.551270 4971 status_manager.go:851] "Failed to get status for pod" podUID="91ba58ad-1ec1-4990-a641-0e8a70e8c8c5" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.50:6443: connect: connection refused" Nov 27 06:56:56 crc kubenswrapper[4971]: I1127 06:56:56.572421 4971 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="52694deb-5a3b-4131-be17-d81560fac783" Nov 27 06:56:56 crc kubenswrapper[4971]: I1127 06:56:56.572673 4971 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="52694deb-5a3b-4131-be17-d81560fac783" Nov 27 06:56:56 crc kubenswrapper[4971]: E1127 06:56:56.573274 4971 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.50:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:56:56 crc kubenswrapper[4971]: I1127 06:56:56.574137 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:56:56 crc kubenswrapper[4971]: W1127 06:56:56.601732 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-2c2dd39a4384abcb4c1dd425780bdd81825dc91975258160b177cf0c3af7ac91 WatchSource:0}: Error finding container 2c2dd39a4384abcb4c1dd425780bdd81825dc91975258160b177cf0c3af7ac91: Status 404 returned error can't find the container with id 2c2dd39a4384abcb4c1dd425780bdd81825dc91975258160b177cf0c3af7ac91 Nov 27 06:56:56 crc kubenswrapper[4971]: I1127 06:56:56.724808 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"2c2dd39a4384abcb4c1dd425780bdd81825dc91975258160b177cf0c3af7ac91"} Nov 27 06:56:56 crc kubenswrapper[4971]: E1127 06:56:56.775297 4971 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.50:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187bcabfcd52466c openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-11-27 06:56:44.1779667 +0000 UTC m=+242.370010658,LastTimestamp:2025-11-27 06:56:44.1779667 +0000 UTC m=+242.370010658,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Nov 27 06:56:57 crc kubenswrapper[4971]: E1127 06:56:57.706902 4971 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.50:6443: connect: connection refused" interval="6.4s" Nov 27 06:56:57 crc kubenswrapper[4971]: I1127 06:56:57.731239 4971 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="9212c7973c73fa2626767e4a8924711f3c262bfe3ba0c13e0f1d90d3ce6cb2fe" exitCode=0 Nov 27 06:56:57 crc kubenswrapper[4971]: I1127 06:56:57.731314 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"9212c7973c73fa2626767e4a8924711f3c262bfe3ba0c13e0f1d90d3ce6cb2fe"} Nov 27 06:56:57 crc kubenswrapper[4971]: I1127 06:56:57.731720 4971 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="52694deb-5a3b-4131-be17-d81560fac783" Nov 27 06:56:57 crc kubenswrapper[4971]: I1127 06:56:57.731775 4971 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="52694deb-5a3b-4131-be17-d81560fac783" Nov 27 06:56:57 crc kubenswrapper[4971]: I1127 06:56:57.732214 4971 status_manager.go:851] "Failed to get status for pod" podUID="91ba58ad-1ec1-4990-a641-0e8a70e8c8c5" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.50:6443: connect: connection refused" Nov 27 06:56:57 crc kubenswrapper[4971]: E1127 06:56:57.732408 4971 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.50:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:56:57 crc kubenswrapper[4971]: I1127 06:56:57.735133 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Nov 27 06:56:57 crc kubenswrapper[4971]: I1127 06:56:57.735176 4971 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849" exitCode=1 Nov 27 06:56:57 crc kubenswrapper[4971]: I1127 06:56:57.735200 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849"} Nov 27 06:56:57 crc kubenswrapper[4971]: I1127 06:56:57.735568 4971 scope.go:117] "RemoveContainer" containerID="e3d9c8e0cdffee787e3d7101d62ba994c344f839dfdc5615465ec5e7a84a9849" Nov 27 06:56:57 crc kubenswrapper[4971]: I1127 06:56:57.736037 4971 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.50:6443: connect: connection refused" Nov 27 06:56:57 crc kubenswrapper[4971]: I1127 06:56:57.736405 4971 status_manager.go:851] "Failed to get status for pod" podUID="91ba58ad-1ec1-4990-a641-0e8a70e8c8c5" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.50:6443: connect: connection refused" Nov 27 06:56:58 crc kubenswrapper[4971]: I1127 06:56:58.753374 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"ace98787b612e254cc26eee9d0cc25780f14e4a323476854d8606dafff617052"} Nov 27 06:56:58 crc kubenswrapper[4971]: I1127 06:56:58.753419 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"94a8ca1c6e482014319addc7c42ad19e03e8b8937babf4c4febb626c599c8751"} Nov 27 06:56:58 crc kubenswrapper[4971]: I1127 06:56:58.753431 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"968e32a7178e58979bf5ed17f880fbf1678b31a5bac6d0ca7eaeaefa917c892f"} Nov 27 06:56:58 crc kubenswrapper[4971]: I1127 06:56:58.776776 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Nov 27 06:56:58 crc kubenswrapper[4971]: I1127 06:56:58.776832 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"66219eb9fc26c23dd9c44707223afc39d5f501313b55094e404fb46e5111458b"} Nov 27 06:56:59 crc kubenswrapper[4971]: I1127 06:56:59.785913 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"d956fb5f98908728f8a5daa361ed65c47e51dcaf222a59b625d98efe48f90ad3"} Nov 27 06:56:59 crc kubenswrapper[4971]: I1127 06:56:59.786302 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"027f05392bbc5ae5e39825540b680e83f03865792642b885e909c03630afaa3c"} Nov 27 06:56:59 crc kubenswrapper[4971]: I1127 06:56:59.786183 4971 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="52694deb-5a3b-4131-be17-d81560fac783" Nov 27 06:56:59 crc kubenswrapper[4971]: I1127 06:56:59.786368 4971 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="52694deb-5a3b-4131-be17-d81560fac783" Nov 27 06:56:59 crc kubenswrapper[4971]: I1127 06:56:59.786352 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:57:01 crc kubenswrapper[4971]: I1127 06:57:01.574963 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:57:01 crc kubenswrapper[4971]: I1127 06:57:01.575321 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:57:01 crc kubenswrapper[4971]: I1127 06:57:01.585475 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:57:02 crc kubenswrapper[4971]: I1127 06:57:02.069818 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 27 06:57:04 crc kubenswrapper[4971]: I1127 06:57:04.793314 4971 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:57:04 crc kubenswrapper[4971]: I1127 06:57:04.814269 4971 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="52694deb-5a3b-4131-be17-d81560fac783" Nov 27 06:57:04 crc kubenswrapper[4971]: I1127 06:57:04.814302 4971 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="52694deb-5a3b-4131-be17-d81560fac783" Nov 27 06:57:04 crc kubenswrapper[4971]: I1127 06:57:04.820264 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:57:04 crc kubenswrapper[4971]: I1127 06:57:04.828447 4971 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="602f94c8-353f-41f4-a43f-12bc76e5786c" Nov 27 06:57:05 crc kubenswrapper[4971]: I1127 06:57:05.819049 4971 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="52694deb-5a3b-4131-be17-d81560fac783" Nov 27 06:57:05 crc kubenswrapper[4971]: I1127 06:57:05.820185 4971 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="52694deb-5a3b-4131-be17-d81560fac783" Nov 27 06:57:06 crc kubenswrapper[4971]: I1127 06:57:06.701700 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 27 06:57:06 crc kubenswrapper[4971]: I1127 06:57:06.706256 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 27 06:57:12 crc kubenswrapper[4971]: I1127 06:57:12.073932 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 27 06:57:12 crc kubenswrapper[4971]: I1127 06:57:12.561262 4971 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="602f94c8-353f-41f4-a43f-12bc76e5786c" Nov 27 06:57:14 crc kubenswrapper[4971]: I1127 06:57:14.962320 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Nov 27 06:57:15 crc kubenswrapper[4971]: I1127 06:57:15.063317 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Nov 27 06:57:15 crc kubenswrapper[4971]: I1127 06:57:15.116661 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Nov 27 06:57:15 crc kubenswrapper[4971]: I1127 06:57:15.356339 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Nov 27 06:57:16 crc kubenswrapper[4971]: I1127 06:57:16.567702 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Nov 27 06:57:16 crc kubenswrapper[4971]: I1127 06:57:16.802060 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Nov 27 06:57:16 crc kubenswrapper[4971]: I1127 06:57:16.853625 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Nov 27 06:57:16 crc kubenswrapper[4971]: I1127 06:57:16.910762 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Nov 27 06:57:17 crc kubenswrapper[4971]: I1127 06:57:17.128468 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Nov 27 06:57:17 crc kubenswrapper[4971]: I1127 06:57:17.190679 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Nov 27 06:57:17 crc kubenswrapper[4971]: I1127 06:57:17.350614 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Nov 27 06:57:17 crc kubenswrapper[4971]: I1127 06:57:17.391251 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Nov 27 06:57:17 crc kubenswrapper[4971]: I1127 06:57:17.409985 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Nov 27 06:57:17 crc kubenswrapper[4971]: I1127 06:57:17.501880 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Nov 27 06:57:17 crc kubenswrapper[4971]: I1127 06:57:17.593780 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Nov 27 06:57:17 crc kubenswrapper[4971]: I1127 06:57:17.649297 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Nov 27 06:57:17 crc kubenswrapper[4971]: I1127 06:57:17.728617 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Nov 27 06:57:17 crc kubenswrapper[4971]: I1127 06:57:17.778722 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Nov 27 06:57:18 crc kubenswrapper[4971]: I1127 06:57:18.070423 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Nov 27 06:57:18 crc kubenswrapper[4971]: I1127 06:57:18.080449 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Nov 27 06:57:18 crc kubenswrapper[4971]: I1127 06:57:18.101369 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Nov 27 06:57:18 crc kubenswrapper[4971]: I1127 06:57:18.178273 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Nov 27 06:57:18 crc kubenswrapper[4971]: I1127 06:57:18.203484 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Nov 27 06:57:18 crc kubenswrapper[4971]: I1127 06:57:18.242502 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Nov 27 06:57:18 crc kubenswrapper[4971]: I1127 06:57:18.273293 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Nov 27 06:57:18 crc kubenswrapper[4971]: I1127 06:57:18.438792 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Nov 27 06:57:18 crc kubenswrapper[4971]: I1127 06:57:18.453242 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Nov 27 06:57:18 crc kubenswrapper[4971]: I1127 06:57:18.481302 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Nov 27 06:57:18 crc kubenswrapper[4971]: I1127 06:57:18.544496 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Nov 27 06:57:18 crc kubenswrapper[4971]: I1127 06:57:18.592032 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Nov 27 06:57:18 crc kubenswrapper[4971]: I1127 06:57:18.592168 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Nov 27 06:57:18 crc kubenswrapper[4971]: I1127 06:57:18.788593 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Nov 27 06:57:18 crc kubenswrapper[4971]: I1127 06:57:18.802762 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Nov 27 06:57:18 crc kubenswrapper[4971]: I1127 06:57:18.824003 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Nov 27 06:57:18 crc kubenswrapper[4971]: I1127 06:57:18.873997 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Nov 27 06:57:18 crc kubenswrapper[4971]: I1127 06:57:18.912547 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Nov 27 06:57:18 crc kubenswrapper[4971]: I1127 06:57:18.964018 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Nov 27 06:57:18 crc kubenswrapper[4971]: I1127 06:57:18.995457 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Nov 27 06:57:19 crc kubenswrapper[4971]: I1127 06:57:19.074137 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Nov 27 06:57:19 crc kubenswrapper[4971]: I1127 06:57:19.170155 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Nov 27 06:57:19 crc kubenswrapper[4971]: I1127 06:57:19.174820 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Nov 27 06:57:19 crc kubenswrapper[4971]: I1127 06:57:19.214391 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Nov 27 06:57:19 crc kubenswrapper[4971]: I1127 06:57:19.321603 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Nov 27 06:57:19 crc kubenswrapper[4971]: I1127 06:57:19.342456 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Nov 27 06:57:19 crc kubenswrapper[4971]: I1127 06:57:19.370564 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Nov 27 06:57:19 crc kubenswrapper[4971]: I1127 06:57:19.443263 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Nov 27 06:57:19 crc kubenswrapper[4971]: I1127 06:57:19.459703 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Nov 27 06:57:19 crc kubenswrapper[4971]: I1127 06:57:19.510377 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Nov 27 06:57:19 crc kubenswrapper[4971]: I1127 06:57:19.531772 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Nov 27 06:57:19 crc kubenswrapper[4971]: I1127 06:57:19.549004 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Nov 27 06:57:19 crc kubenswrapper[4971]: I1127 06:57:19.639824 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Nov 27 06:57:19 crc kubenswrapper[4971]: I1127 06:57:19.703479 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Nov 27 06:57:19 crc kubenswrapper[4971]: I1127 06:57:19.759481 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Nov 27 06:57:19 crc kubenswrapper[4971]: I1127 06:57:19.769699 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Nov 27 06:57:19 crc kubenswrapper[4971]: I1127 06:57:19.774246 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Nov 27 06:57:19 crc kubenswrapper[4971]: I1127 06:57:19.802064 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Nov 27 06:57:19 crc kubenswrapper[4971]: I1127 06:57:19.845118 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Nov 27 06:57:19 crc kubenswrapper[4971]: I1127 06:57:19.909033 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Nov 27 06:57:19 crc kubenswrapper[4971]: I1127 06:57:19.914496 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Nov 27 06:57:19 crc kubenswrapper[4971]: I1127 06:57:19.923126 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Nov 27 06:57:19 crc kubenswrapper[4971]: I1127 06:57:19.928043 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Nov 27 06:57:19 crc kubenswrapper[4971]: I1127 06:57:19.976103 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Nov 27 06:57:20 crc kubenswrapper[4971]: I1127 06:57:20.008444 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Nov 27 06:57:20 crc kubenswrapper[4971]: I1127 06:57:20.016419 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Nov 27 06:57:20 crc kubenswrapper[4971]: I1127 06:57:20.019134 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Nov 27 06:57:20 crc kubenswrapper[4971]: I1127 06:57:20.020352 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Nov 27 06:57:20 crc kubenswrapper[4971]: I1127 06:57:20.026267 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Nov 27 06:57:20 crc kubenswrapper[4971]: I1127 06:57:20.038475 4971 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Nov 27 06:57:20 crc kubenswrapper[4971]: I1127 06:57:20.088589 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Nov 27 06:57:20 crc kubenswrapper[4971]: I1127 06:57:20.127814 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Nov 27 06:57:20 crc kubenswrapper[4971]: I1127 06:57:20.128043 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Nov 27 06:57:20 crc kubenswrapper[4971]: I1127 06:57:20.153252 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Nov 27 06:57:20 crc kubenswrapper[4971]: I1127 06:57:20.162515 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Nov 27 06:57:20 crc kubenswrapper[4971]: I1127 06:57:20.200434 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Nov 27 06:57:20 crc kubenswrapper[4971]: I1127 06:57:20.233397 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Nov 27 06:57:20 crc kubenswrapper[4971]: I1127 06:57:20.548167 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Nov 27 06:57:20 crc kubenswrapper[4971]: I1127 06:57:20.589320 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Nov 27 06:57:20 crc kubenswrapper[4971]: I1127 06:57:20.591592 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Nov 27 06:57:20 crc kubenswrapper[4971]: I1127 06:57:20.684953 4971 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Nov 27 06:57:20 crc kubenswrapper[4971]: I1127 06:57:20.840764 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Nov 27 06:57:20 crc kubenswrapper[4971]: I1127 06:57:20.883154 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Nov 27 06:57:20 crc kubenswrapper[4971]: I1127 06:57:20.945631 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Nov 27 06:57:21 crc kubenswrapper[4971]: I1127 06:57:21.020611 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Nov 27 06:57:21 crc kubenswrapper[4971]: I1127 06:57:21.063039 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Nov 27 06:57:21 crc kubenswrapper[4971]: I1127 06:57:21.083516 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Nov 27 06:57:21 crc kubenswrapper[4971]: I1127 06:57:21.139480 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Nov 27 06:57:21 crc kubenswrapper[4971]: I1127 06:57:21.162265 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Nov 27 06:57:21 crc kubenswrapper[4971]: I1127 06:57:21.240978 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Nov 27 06:57:21 crc kubenswrapper[4971]: I1127 06:57:21.285689 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Nov 27 06:57:21 crc kubenswrapper[4971]: I1127 06:57:21.353126 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Nov 27 06:57:21 crc kubenswrapper[4971]: I1127 06:57:21.500397 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Nov 27 06:57:21 crc kubenswrapper[4971]: I1127 06:57:21.574855 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Nov 27 06:57:21 crc kubenswrapper[4971]: I1127 06:57:21.621899 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Nov 27 06:57:21 crc kubenswrapper[4971]: I1127 06:57:21.674853 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Nov 27 06:57:21 crc kubenswrapper[4971]: I1127 06:57:21.675621 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Nov 27 06:57:21 crc kubenswrapper[4971]: I1127 06:57:21.703347 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Nov 27 06:57:21 crc kubenswrapper[4971]: I1127 06:57:21.765005 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Nov 27 06:57:21 crc kubenswrapper[4971]: I1127 06:57:21.821403 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Nov 27 06:57:21 crc kubenswrapper[4971]: I1127 06:57:21.846307 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Nov 27 06:57:21 crc kubenswrapper[4971]: I1127 06:57:21.880411 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Nov 27 06:57:21 crc kubenswrapper[4971]: I1127 06:57:21.909513 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Nov 27 06:57:21 crc kubenswrapper[4971]: I1127 06:57:21.971475 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Nov 27 06:57:22 crc kubenswrapper[4971]: I1127 06:57:22.014794 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Nov 27 06:57:22 crc kubenswrapper[4971]: I1127 06:57:22.017984 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Nov 27 06:57:22 crc kubenswrapper[4971]: I1127 06:57:22.022290 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Nov 27 06:57:22 crc kubenswrapper[4971]: I1127 06:57:22.049259 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Nov 27 06:57:22 crc kubenswrapper[4971]: I1127 06:57:22.184157 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Nov 27 06:57:22 crc kubenswrapper[4971]: I1127 06:57:22.296655 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Nov 27 06:57:22 crc kubenswrapper[4971]: I1127 06:57:22.404291 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Nov 27 06:57:22 crc kubenswrapper[4971]: I1127 06:57:22.466384 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Nov 27 06:57:22 crc kubenswrapper[4971]: I1127 06:57:22.526388 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Nov 27 06:57:22 crc kubenswrapper[4971]: I1127 06:57:22.532999 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Nov 27 06:57:22 crc kubenswrapper[4971]: I1127 06:57:22.575155 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Nov 27 06:57:22 crc kubenswrapper[4971]: I1127 06:57:22.575607 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Nov 27 06:57:22 crc kubenswrapper[4971]: I1127 06:57:22.605286 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Nov 27 06:57:22 crc kubenswrapper[4971]: I1127 06:57:22.635459 4971 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Nov 27 06:57:22 crc kubenswrapper[4971]: I1127 06:57:22.678979 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Nov 27 06:57:22 crc kubenswrapper[4971]: I1127 06:57:22.729767 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Nov 27 06:57:22 crc kubenswrapper[4971]: I1127 06:57:22.781636 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Nov 27 06:57:22 crc kubenswrapper[4971]: I1127 06:57:22.852518 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Nov 27 06:57:22 crc kubenswrapper[4971]: I1127 06:57:22.866816 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Nov 27 06:57:22 crc kubenswrapper[4971]: I1127 06:57:22.889650 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Nov 27 06:57:22 crc kubenswrapper[4971]: I1127 06:57:22.919663 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Nov 27 06:57:23 crc kubenswrapper[4971]: I1127 06:57:23.018774 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Nov 27 06:57:23 crc kubenswrapper[4971]: I1127 06:57:23.041406 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Nov 27 06:57:23 crc kubenswrapper[4971]: I1127 06:57:23.200643 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Nov 27 06:57:23 crc kubenswrapper[4971]: I1127 06:57:23.241043 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Nov 27 06:57:23 crc kubenswrapper[4971]: I1127 06:57:23.282867 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Nov 27 06:57:23 crc kubenswrapper[4971]: I1127 06:57:23.287930 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Nov 27 06:57:23 crc kubenswrapper[4971]: I1127 06:57:23.370428 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Nov 27 06:57:23 crc kubenswrapper[4971]: I1127 06:57:23.401546 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Nov 27 06:57:23 crc kubenswrapper[4971]: I1127 06:57:23.416667 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Nov 27 06:57:23 crc kubenswrapper[4971]: I1127 06:57:23.473076 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Nov 27 06:57:23 crc kubenswrapper[4971]: I1127 06:57:23.488687 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Nov 27 06:57:23 crc kubenswrapper[4971]: I1127 06:57:23.494167 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Nov 27 06:57:23 crc kubenswrapper[4971]: I1127 06:57:23.585559 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Nov 27 06:57:23 crc kubenswrapper[4971]: I1127 06:57:23.591865 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Nov 27 06:57:23 crc kubenswrapper[4971]: I1127 06:57:23.776826 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Nov 27 06:57:23 crc kubenswrapper[4971]: I1127 06:57:23.816329 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Nov 27 06:57:23 crc kubenswrapper[4971]: I1127 06:57:23.857448 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Nov 27 06:57:23 crc kubenswrapper[4971]: I1127 06:57:23.973687 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Nov 27 06:57:23 crc kubenswrapper[4971]: I1127 06:57:23.989127 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Nov 27 06:57:24 crc kubenswrapper[4971]: I1127 06:57:24.023975 4971 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Nov 27 06:57:24 crc kubenswrapper[4971]: I1127 06:57:24.041393 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Nov 27 06:57:24 crc kubenswrapper[4971]: I1127 06:57:24.074585 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Nov 27 06:57:24 crc kubenswrapper[4971]: I1127 06:57:24.084018 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Nov 27 06:57:24 crc kubenswrapper[4971]: I1127 06:57:24.109557 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Nov 27 06:57:24 crc kubenswrapper[4971]: I1127 06:57:24.259358 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Nov 27 06:57:24 crc kubenswrapper[4971]: I1127 06:57:24.324744 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Nov 27 06:57:24 crc kubenswrapper[4971]: I1127 06:57:24.362054 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Nov 27 06:57:24 crc kubenswrapper[4971]: I1127 06:57:24.502472 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Nov 27 06:57:24 crc kubenswrapper[4971]: I1127 06:57:24.555770 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Nov 27 06:57:24 crc kubenswrapper[4971]: I1127 06:57:24.565108 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Nov 27 06:57:24 crc kubenswrapper[4971]: I1127 06:57:24.600944 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Nov 27 06:57:24 crc kubenswrapper[4971]: I1127 06:57:24.645190 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Nov 27 06:57:24 crc kubenswrapper[4971]: I1127 06:57:24.712334 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Nov 27 06:57:24 crc kubenswrapper[4971]: I1127 06:57:24.797378 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Nov 27 06:57:24 crc kubenswrapper[4971]: I1127 06:57:24.923553 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Nov 27 06:57:24 crc kubenswrapper[4971]: I1127 06:57:24.954770 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Nov 27 06:57:25 crc kubenswrapper[4971]: I1127 06:57:25.011412 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Nov 27 06:57:25 crc kubenswrapper[4971]: I1127 06:57:25.105590 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Nov 27 06:57:25 crc kubenswrapper[4971]: I1127 06:57:25.122579 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Nov 27 06:57:25 crc kubenswrapper[4971]: I1127 06:57:25.168982 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Nov 27 06:57:25 crc kubenswrapper[4971]: I1127 06:57:25.200732 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Nov 27 06:57:25 crc kubenswrapper[4971]: I1127 06:57:25.246072 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Nov 27 06:57:25 crc kubenswrapper[4971]: I1127 06:57:25.361463 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Nov 27 06:57:25 crc kubenswrapper[4971]: I1127 06:57:25.429891 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Nov 27 06:57:25 crc kubenswrapper[4971]: I1127 06:57:25.429915 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Nov 27 06:57:25 crc kubenswrapper[4971]: I1127 06:57:25.489013 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Nov 27 06:57:25 crc kubenswrapper[4971]: I1127 06:57:25.503388 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Nov 27 06:57:25 crc kubenswrapper[4971]: I1127 06:57:25.535172 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Nov 27 06:57:25 crc kubenswrapper[4971]: I1127 06:57:25.545609 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Nov 27 06:57:25 crc kubenswrapper[4971]: I1127 06:57:25.550215 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Nov 27 06:57:25 crc kubenswrapper[4971]: I1127 06:57:25.611308 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Nov 27 06:57:25 crc kubenswrapper[4971]: I1127 06:57:25.633721 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Nov 27 06:57:25 crc kubenswrapper[4971]: I1127 06:57:25.758724 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Nov 27 06:57:25 crc kubenswrapper[4971]: I1127 06:57:25.762723 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Nov 27 06:57:25 crc kubenswrapper[4971]: I1127 06:57:25.778243 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Nov 27 06:57:25 crc kubenswrapper[4971]: I1127 06:57:25.804132 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Nov 27 06:57:25 crc kubenswrapper[4971]: I1127 06:57:25.837195 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Nov 27 06:57:25 crc kubenswrapper[4971]: I1127 06:57:25.858448 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Nov 27 06:57:25 crc kubenswrapper[4971]: I1127 06:57:25.858480 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Nov 27 06:57:25 crc kubenswrapper[4971]: I1127 06:57:25.861823 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Nov 27 06:57:25 crc kubenswrapper[4971]: I1127 06:57:25.892911 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Nov 27 06:57:25 crc kubenswrapper[4971]: I1127 06:57:25.928412 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Nov 27 06:57:25 crc kubenswrapper[4971]: I1127 06:57:25.990290 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Nov 27 06:57:26 crc kubenswrapper[4971]: I1127 06:57:26.145423 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Nov 27 06:57:26 crc kubenswrapper[4971]: I1127 06:57:26.213116 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Nov 27 06:57:26 crc kubenswrapper[4971]: I1127 06:57:26.229552 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Nov 27 06:57:26 crc kubenswrapper[4971]: I1127 06:57:26.244385 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Nov 27 06:57:26 crc kubenswrapper[4971]: I1127 06:57:26.250748 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Nov 27 06:57:26 crc kubenswrapper[4971]: I1127 06:57:26.331962 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Nov 27 06:57:26 crc kubenswrapper[4971]: I1127 06:57:26.378152 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Nov 27 06:57:26 crc kubenswrapper[4971]: I1127 06:57:26.475937 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Nov 27 06:57:26 crc kubenswrapper[4971]: I1127 06:57:26.480833 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Nov 27 06:57:26 crc kubenswrapper[4971]: I1127 06:57:26.859221 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Nov 27 06:57:26 crc kubenswrapper[4971]: I1127 06:57:26.893147 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Nov 27 06:57:26 crc kubenswrapper[4971]: I1127 06:57:26.941167 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Nov 27 06:57:26 crc kubenswrapper[4971]: I1127 06:57:26.979612 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Nov 27 06:57:27 crc kubenswrapper[4971]: I1127 06:57:27.013116 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Nov 27 06:57:27 crc kubenswrapper[4971]: I1127 06:57:27.052387 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Nov 27 06:57:27 crc kubenswrapper[4971]: I1127 06:57:27.060316 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Nov 27 06:57:27 crc kubenswrapper[4971]: I1127 06:57:27.114160 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Nov 27 06:57:27 crc kubenswrapper[4971]: I1127 06:57:27.151193 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Nov 27 06:57:27 crc kubenswrapper[4971]: I1127 06:57:27.263108 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Nov 27 06:57:27 crc kubenswrapper[4971]: I1127 06:57:27.310603 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Nov 27 06:57:27 crc kubenswrapper[4971]: I1127 06:57:27.401997 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Nov 27 06:57:27 crc kubenswrapper[4971]: I1127 06:57:27.466132 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Nov 27 06:57:27 crc kubenswrapper[4971]: I1127 06:57:27.507691 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Nov 27 06:57:27 crc kubenswrapper[4971]: I1127 06:57:27.538321 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Nov 27 06:57:27 crc kubenswrapper[4971]: I1127 06:57:27.571303 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Nov 27 06:57:27 crc kubenswrapper[4971]: I1127 06:57:27.674228 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Nov 27 06:57:27 crc kubenswrapper[4971]: I1127 06:57:27.694906 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Nov 27 06:57:27 crc kubenswrapper[4971]: I1127 06:57:27.825528 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Nov 27 06:57:27 crc kubenswrapper[4971]: I1127 06:57:27.905047 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Nov 27 06:57:27 crc kubenswrapper[4971]: I1127 06:57:27.991055 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Nov 27 06:57:28 crc kubenswrapper[4971]: I1127 06:57:28.028812 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Nov 27 06:57:28 crc kubenswrapper[4971]: I1127 06:57:28.085050 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Nov 27 06:57:28 crc kubenswrapper[4971]: I1127 06:57:28.121674 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Nov 27 06:57:28 crc kubenswrapper[4971]: I1127 06:57:28.298868 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Nov 27 06:57:28 crc kubenswrapper[4971]: I1127 06:57:28.306224 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Nov 27 06:57:28 crc kubenswrapper[4971]: I1127 06:57:28.440762 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Nov 27 06:57:28 crc kubenswrapper[4971]: I1127 06:57:28.772759 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Nov 27 06:57:28 crc kubenswrapper[4971]: I1127 06:57:28.809776 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Nov 27 06:57:28 crc kubenswrapper[4971]: I1127 06:57:28.907148 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Nov 27 06:57:29 crc kubenswrapper[4971]: I1127 06:57:29.012453 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Nov 27 06:57:29 crc kubenswrapper[4971]: I1127 06:57:29.086336 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Nov 27 06:57:29 crc kubenswrapper[4971]: I1127 06:57:29.125789 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Nov 27 06:57:29 crc kubenswrapper[4971]: I1127 06:57:29.128338 4971 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Nov 27 06:57:29 crc kubenswrapper[4971]: I1127 06:57:29.133744 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Nov 27 06:57:29 crc kubenswrapper[4971]: I1127 06:57:29.133809 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Nov 27 06:57:29 crc kubenswrapper[4971]: I1127 06:57:29.138076 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 27 06:57:29 crc kubenswrapper[4971]: I1127 06:57:29.151656 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=25.151640043 podStartE2EDuration="25.151640043s" podCreationTimestamp="2025-11-27 06:57:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:57:29.149978166 +0000 UTC m=+287.342022104" watchObservedRunningTime="2025-11-27 06:57:29.151640043 +0000 UTC m=+287.343683961" Nov 27 06:57:29 crc kubenswrapper[4971]: I1127 06:57:29.269485 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Nov 27 06:57:29 crc kubenswrapper[4971]: I1127 06:57:29.318852 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Nov 27 06:57:29 crc kubenswrapper[4971]: I1127 06:57:29.438903 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Nov 27 06:57:29 crc kubenswrapper[4971]: I1127 06:57:29.727198 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Nov 27 06:57:29 crc kubenswrapper[4971]: I1127 06:57:29.727263 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Nov 27 06:57:29 crc kubenswrapper[4971]: I1127 06:57:29.806566 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Nov 27 06:57:29 crc kubenswrapper[4971]: I1127 06:57:29.816948 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Nov 27 06:57:29 crc kubenswrapper[4971]: I1127 06:57:29.829523 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Nov 27 06:57:29 crc kubenswrapper[4971]: I1127 06:57:29.838612 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Nov 27 06:57:30 crc kubenswrapper[4971]: I1127 06:57:30.104934 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Nov 27 06:57:30 crc kubenswrapper[4971]: I1127 06:57:30.371997 4971 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Nov 27 06:57:30 crc kubenswrapper[4971]: I1127 06:57:30.518223 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Nov 27 06:57:30 crc kubenswrapper[4971]: I1127 06:57:30.976449 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Nov 27 06:57:31 crc kubenswrapper[4971]: I1127 06:57:31.190947 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Nov 27 06:57:31 crc kubenswrapper[4971]: I1127 06:57:31.364126 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Nov 27 06:57:31 crc kubenswrapper[4971]: I1127 06:57:31.799838 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Nov 27 06:57:32 crc kubenswrapper[4971]: I1127 06:57:32.001297 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Nov 27 06:57:32 crc kubenswrapper[4971]: I1127 06:57:32.137479 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Nov 27 06:57:32 crc kubenswrapper[4971]: I1127 06:57:32.334488 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Nov 27 06:57:32 crc kubenswrapper[4971]: I1127 06:57:32.489592 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Nov 27 06:57:32 crc kubenswrapper[4971]: I1127 06:57:32.652525 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Nov 27 06:57:32 crc kubenswrapper[4971]: I1127 06:57:32.670911 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Nov 27 06:57:33 crc kubenswrapper[4971]: I1127 06:57:33.621089 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Nov 27 06:57:38 crc kubenswrapper[4971]: I1127 06:57:38.633244 4971 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Nov 27 06:57:38 crc kubenswrapper[4971]: I1127 06:57:38.634005 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://a7aa3dfad75690eeaeae812838ab898bfecae62227727f5bb58a697f249a2c66" gracePeriod=5 Nov 27 06:57:44 crc kubenswrapper[4971]: I1127 06:57:44.018422 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Nov 27 06:57:44 crc kubenswrapper[4971]: I1127 06:57:44.019120 4971 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="a7aa3dfad75690eeaeae812838ab898bfecae62227727f5bb58a697f249a2c66" exitCode=137 Nov 27 06:57:44 crc kubenswrapper[4971]: I1127 06:57:44.198189 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Nov 27 06:57:44 crc kubenswrapper[4971]: I1127 06:57:44.198264 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 27 06:57:44 crc kubenswrapper[4971]: I1127 06:57:44.243582 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Nov 27 06:57:44 crc kubenswrapper[4971]: I1127 06:57:44.243686 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Nov 27 06:57:44 crc kubenswrapper[4971]: I1127 06:57:44.243708 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Nov 27 06:57:44 crc kubenswrapper[4971]: I1127 06:57:44.243697 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 06:57:44 crc kubenswrapper[4971]: I1127 06:57:44.243763 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Nov 27 06:57:44 crc kubenswrapper[4971]: I1127 06:57:44.243827 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Nov 27 06:57:44 crc kubenswrapper[4971]: I1127 06:57:44.243879 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 06:57:44 crc kubenswrapper[4971]: I1127 06:57:44.243904 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 06:57:44 crc kubenswrapper[4971]: I1127 06:57:44.244042 4971 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Nov 27 06:57:44 crc kubenswrapper[4971]: I1127 06:57:44.244055 4971 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Nov 27 06:57:44 crc kubenswrapper[4971]: I1127 06:57:44.244063 4971 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Nov 27 06:57:44 crc kubenswrapper[4971]: I1127 06:57:44.244078 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 06:57:44 crc kubenswrapper[4971]: I1127 06:57:44.252311 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 06:57:44 crc kubenswrapper[4971]: I1127 06:57:44.345511 4971 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Nov 27 06:57:44 crc kubenswrapper[4971]: I1127 06:57:44.345587 4971 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Nov 27 06:57:44 crc kubenswrapper[4971]: I1127 06:57:44.560127 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Nov 27 06:57:45 crc kubenswrapper[4971]: I1127 06:57:45.024338 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Nov 27 06:57:45 crc kubenswrapper[4971]: I1127 06:57:45.024402 4971 scope.go:117] "RemoveContainer" containerID="a7aa3dfad75690eeaeae812838ab898bfecae62227727f5bb58a697f249a2c66" Nov 27 06:57:45 crc kubenswrapper[4971]: I1127 06:57:45.024460 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 27 06:58:39 crc kubenswrapper[4971]: I1127 06:58:39.807591 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-qznqv"] Nov 27 06:58:39 crc kubenswrapper[4971]: E1127 06:58:39.808266 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91ba58ad-1ec1-4990-a641-0e8a70e8c8c5" containerName="installer" Nov 27 06:58:39 crc kubenswrapper[4971]: I1127 06:58:39.808278 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="91ba58ad-1ec1-4990-a641-0e8a70e8c8c5" containerName="installer" Nov 27 06:58:39 crc kubenswrapper[4971]: E1127 06:58:39.808291 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Nov 27 06:58:39 crc kubenswrapper[4971]: I1127 06:58:39.808297 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Nov 27 06:58:39 crc kubenswrapper[4971]: I1127 06:58:39.808398 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Nov 27 06:58:39 crc kubenswrapper[4971]: I1127 06:58:39.808410 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="91ba58ad-1ec1-4990-a641-0e8a70e8c8c5" containerName="installer" Nov 27 06:58:39 crc kubenswrapper[4971]: I1127 06:58:39.808825 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-qznqv" Nov 27 06:58:39 crc kubenswrapper[4971]: I1127 06:58:39.818709 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-qznqv"] Nov 27 06:58:39 crc kubenswrapper[4971]: I1127 06:58:39.990493 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/f2603bf9-9436-470a-affe-f88d8d6c9514-registry-certificates\") pod \"image-registry-66df7c8f76-qznqv\" (UID: \"f2603bf9-9436-470a-affe-f88d8d6c9514\") " pod="openshift-image-registry/image-registry-66df7c8f76-qznqv" Nov 27 06:58:39 crc kubenswrapper[4971]: I1127 06:58:39.990549 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/f2603bf9-9436-470a-affe-f88d8d6c9514-installation-pull-secrets\") pod \"image-registry-66df7c8f76-qznqv\" (UID: \"f2603bf9-9436-470a-affe-f88d8d6c9514\") " pod="openshift-image-registry/image-registry-66df7c8f76-qznqv" Nov 27 06:58:39 crc kubenswrapper[4971]: I1127 06:58:39.990602 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-qznqv\" (UID: \"f2603bf9-9436-470a-affe-f88d8d6c9514\") " pod="openshift-image-registry/image-registry-66df7c8f76-qznqv" Nov 27 06:58:39 crc kubenswrapper[4971]: I1127 06:58:39.990619 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/f2603bf9-9436-470a-affe-f88d8d6c9514-registry-tls\") pod \"image-registry-66df7c8f76-qznqv\" (UID: \"f2603bf9-9436-470a-affe-f88d8d6c9514\") " pod="openshift-image-registry/image-registry-66df7c8f76-qznqv" Nov 27 06:58:39 crc kubenswrapper[4971]: I1127 06:58:39.990650 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nm52l\" (UniqueName: \"kubernetes.io/projected/f2603bf9-9436-470a-affe-f88d8d6c9514-kube-api-access-nm52l\") pod \"image-registry-66df7c8f76-qznqv\" (UID: \"f2603bf9-9436-470a-affe-f88d8d6c9514\") " pod="openshift-image-registry/image-registry-66df7c8f76-qznqv" Nov 27 06:58:39 crc kubenswrapper[4971]: I1127 06:58:39.990757 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/f2603bf9-9436-470a-affe-f88d8d6c9514-ca-trust-extracted\") pod \"image-registry-66df7c8f76-qznqv\" (UID: \"f2603bf9-9436-470a-affe-f88d8d6c9514\") " pod="openshift-image-registry/image-registry-66df7c8f76-qznqv" Nov 27 06:58:39 crc kubenswrapper[4971]: I1127 06:58:39.990862 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f2603bf9-9436-470a-affe-f88d8d6c9514-trusted-ca\") pod \"image-registry-66df7c8f76-qznqv\" (UID: \"f2603bf9-9436-470a-affe-f88d8d6c9514\") " pod="openshift-image-registry/image-registry-66df7c8f76-qznqv" Nov 27 06:58:39 crc kubenswrapper[4971]: I1127 06:58:39.990913 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f2603bf9-9436-470a-affe-f88d8d6c9514-bound-sa-token\") pod \"image-registry-66df7c8f76-qznqv\" (UID: \"f2603bf9-9436-470a-affe-f88d8d6c9514\") " pod="openshift-image-registry/image-registry-66df7c8f76-qznqv" Nov 27 06:58:40 crc kubenswrapper[4971]: I1127 06:58:40.018345 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-qznqv\" (UID: \"f2603bf9-9436-470a-affe-f88d8d6c9514\") " pod="openshift-image-registry/image-registry-66df7c8f76-qznqv" Nov 27 06:58:40 crc kubenswrapper[4971]: I1127 06:58:40.092524 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/f2603bf9-9436-470a-affe-f88d8d6c9514-registry-certificates\") pod \"image-registry-66df7c8f76-qznqv\" (UID: \"f2603bf9-9436-470a-affe-f88d8d6c9514\") " pod="openshift-image-registry/image-registry-66df7c8f76-qznqv" Nov 27 06:58:40 crc kubenswrapper[4971]: I1127 06:58:40.092609 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/f2603bf9-9436-470a-affe-f88d8d6c9514-installation-pull-secrets\") pod \"image-registry-66df7c8f76-qznqv\" (UID: \"f2603bf9-9436-470a-affe-f88d8d6c9514\") " pod="openshift-image-registry/image-registry-66df7c8f76-qznqv" Nov 27 06:58:40 crc kubenswrapper[4971]: I1127 06:58:40.092662 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/f2603bf9-9436-470a-affe-f88d8d6c9514-registry-tls\") pod \"image-registry-66df7c8f76-qznqv\" (UID: \"f2603bf9-9436-470a-affe-f88d8d6c9514\") " pod="openshift-image-registry/image-registry-66df7c8f76-qznqv" Nov 27 06:58:40 crc kubenswrapper[4971]: I1127 06:58:40.092702 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nm52l\" (UniqueName: \"kubernetes.io/projected/f2603bf9-9436-470a-affe-f88d8d6c9514-kube-api-access-nm52l\") pod \"image-registry-66df7c8f76-qznqv\" (UID: \"f2603bf9-9436-470a-affe-f88d8d6c9514\") " pod="openshift-image-registry/image-registry-66df7c8f76-qznqv" Nov 27 06:58:40 crc kubenswrapper[4971]: I1127 06:58:40.092729 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/f2603bf9-9436-470a-affe-f88d8d6c9514-ca-trust-extracted\") pod \"image-registry-66df7c8f76-qznqv\" (UID: \"f2603bf9-9436-470a-affe-f88d8d6c9514\") " pod="openshift-image-registry/image-registry-66df7c8f76-qznqv" Nov 27 06:58:40 crc kubenswrapper[4971]: I1127 06:58:40.092759 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f2603bf9-9436-470a-affe-f88d8d6c9514-trusted-ca\") pod \"image-registry-66df7c8f76-qznqv\" (UID: \"f2603bf9-9436-470a-affe-f88d8d6c9514\") " pod="openshift-image-registry/image-registry-66df7c8f76-qznqv" Nov 27 06:58:40 crc kubenswrapper[4971]: I1127 06:58:40.092790 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f2603bf9-9436-470a-affe-f88d8d6c9514-bound-sa-token\") pod \"image-registry-66df7c8f76-qznqv\" (UID: \"f2603bf9-9436-470a-affe-f88d8d6c9514\") " pod="openshift-image-registry/image-registry-66df7c8f76-qznqv" Nov 27 06:58:40 crc kubenswrapper[4971]: I1127 06:58:40.093657 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/f2603bf9-9436-470a-affe-f88d8d6c9514-ca-trust-extracted\") pod \"image-registry-66df7c8f76-qznqv\" (UID: \"f2603bf9-9436-470a-affe-f88d8d6c9514\") " pod="openshift-image-registry/image-registry-66df7c8f76-qznqv" Nov 27 06:58:40 crc kubenswrapper[4971]: I1127 06:58:40.093935 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f2603bf9-9436-470a-affe-f88d8d6c9514-trusted-ca\") pod \"image-registry-66df7c8f76-qznqv\" (UID: \"f2603bf9-9436-470a-affe-f88d8d6c9514\") " pod="openshift-image-registry/image-registry-66df7c8f76-qznqv" Nov 27 06:58:40 crc kubenswrapper[4971]: I1127 06:58:40.094586 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/f2603bf9-9436-470a-affe-f88d8d6c9514-registry-certificates\") pod \"image-registry-66df7c8f76-qznqv\" (UID: \"f2603bf9-9436-470a-affe-f88d8d6c9514\") " pod="openshift-image-registry/image-registry-66df7c8f76-qznqv" Nov 27 06:58:40 crc kubenswrapper[4971]: I1127 06:58:40.097774 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/f2603bf9-9436-470a-affe-f88d8d6c9514-installation-pull-secrets\") pod \"image-registry-66df7c8f76-qznqv\" (UID: \"f2603bf9-9436-470a-affe-f88d8d6c9514\") " pod="openshift-image-registry/image-registry-66df7c8f76-qznqv" Nov 27 06:58:40 crc kubenswrapper[4971]: I1127 06:58:40.097974 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/f2603bf9-9436-470a-affe-f88d8d6c9514-registry-tls\") pod \"image-registry-66df7c8f76-qznqv\" (UID: \"f2603bf9-9436-470a-affe-f88d8d6c9514\") " pod="openshift-image-registry/image-registry-66df7c8f76-qznqv" Nov 27 06:58:40 crc kubenswrapper[4971]: I1127 06:58:40.112006 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f2603bf9-9436-470a-affe-f88d8d6c9514-bound-sa-token\") pod \"image-registry-66df7c8f76-qznqv\" (UID: \"f2603bf9-9436-470a-affe-f88d8d6c9514\") " pod="openshift-image-registry/image-registry-66df7c8f76-qznqv" Nov 27 06:58:40 crc kubenswrapper[4971]: I1127 06:58:40.115250 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nm52l\" (UniqueName: \"kubernetes.io/projected/f2603bf9-9436-470a-affe-f88d8d6c9514-kube-api-access-nm52l\") pod \"image-registry-66df7c8f76-qznqv\" (UID: \"f2603bf9-9436-470a-affe-f88d8d6c9514\") " pod="openshift-image-registry/image-registry-66df7c8f76-qznqv" Nov 27 06:58:40 crc kubenswrapper[4971]: I1127 06:58:40.124918 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-qznqv" Nov 27 06:58:40 crc kubenswrapper[4971]: I1127 06:58:40.548949 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-qznqv"] Nov 27 06:58:41 crc kubenswrapper[4971]: I1127 06:58:41.362573 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-qznqv" event={"ID":"f2603bf9-9436-470a-affe-f88d8d6c9514","Type":"ContainerStarted","Data":"d038273f99ea18a1b191cb52fe83b27e2e2f5e5b7337978a99e10366fe513b91"} Nov 27 06:58:41 crc kubenswrapper[4971]: I1127 06:58:41.362615 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-qznqv" event={"ID":"f2603bf9-9436-470a-affe-f88d8d6c9514","Type":"ContainerStarted","Data":"bc5b91c93783899874f913dd6ef0102241c1853b05d8b69b8e4c1be434784a91"} Nov 27 06:58:41 crc kubenswrapper[4971]: I1127 06:58:41.362737 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-qznqv" Nov 27 06:58:41 crc kubenswrapper[4971]: I1127 06:58:41.387307 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-qznqv" podStartSLOduration=2.387292843 podStartE2EDuration="2.387292843s" podCreationTimestamp="2025-11-27 06:58:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:58:41.383996407 +0000 UTC m=+359.576040365" watchObservedRunningTime="2025-11-27 06:58:41.387292843 +0000 UTC m=+359.579336761" Nov 27 06:58:56 crc kubenswrapper[4971]: I1127 06:58:56.413418 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 06:58:56 crc kubenswrapper[4971]: I1127 06:58:56.414129 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 06:59:00 crc kubenswrapper[4971]: I1127 06:59:00.130116 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-qznqv" Nov 27 06:59:00 crc kubenswrapper[4971]: I1127 06:59:00.187797 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-rn8q9"] Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.073692 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fhgrm"] Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.075131 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-fhgrm" podUID="8978d4d3-664f-40f4-b33c-83cd92577bc3" containerName="registry-server" containerID="cri-o://b7a1cde51bbf318cdf838480249f3afd0bb260ed774d61fdfe43c67c9b42a1c2" gracePeriod=30 Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.086087 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rnpmr"] Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.087867 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-rnpmr" podUID="2bec9dd3-b528-4b67-b949-8e9eb074e222" containerName="registry-server" containerID="cri-o://951993ff140652fc2efa39e6a84fef2b79d0a9d8777b3ea466d9979e8e9e7ae3" gracePeriod=30 Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.099829 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-m7fh9"] Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.100089 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-m7fh9" podUID="6df59c92-6515-4bf1-aad4-57daf0d8fc76" containerName="marketplace-operator" containerID="cri-o://96b79214b2981b78df80e586577ef173ce2597f65a1f86b3440300d2f27dd60e" gracePeriod=30 Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.123696 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qfvr4"] Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.123955 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-qfvr4" podUID="12325c6c-a817-46ae-b776-01213f064dbf" containerName="registry-server" containerID="cri-o://ba3227ee0632cff4ea50b0ac425afb773f8b72eba30aa9fd27b8bd9d07d2e1d0" gracePeriod=30 Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.133898 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7pr9t"] Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.134371 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-7pr9t" podUID="6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c" containerName="registry-server" containerID="cri-o://79fa646f743f7f4059a17ac081237d0b2fff084f67c728c80a9864f67667ed79" gracePeriod=30 Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.140755 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-ctzhf"] Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.141658 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-ctzhf" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.158937 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-ctzhf"] Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.261300 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75jc4\" (UniqueName: \"kubernetes.io/projected/9304f55c-4ec0-4828-bfe6-1024a5ec719e-kube-api-access-75jc4\") pod \"marketplace-operator-79b997595-ctzhf\" (UID: \"9304f55c-4ec0-4828-bfe6-1024a5ec719e\") " pod="openshift-marketplace/marketplace-operator-79b997595-ctzhf" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.261401 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9304f55c-4ec0-4828-bfe6-1024a5ec719e-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-ctzhf\" (UID: \"9304f55c-4ec0-4828-bfe6-1024a5ec719e\") " pod="openshift-marketplace/marketplace-operator-79b997595-ctzhf" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.261449 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9304f55c-4ec0-4828-bfe6-1024a5ec719e-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-ctzhf\" (UID: \"9304f55c-4ec0-4828-bfe6-1024a5ec719e\") " pod="openshift-marketplace/marketplace-operator-79b997595-ctzhf" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.362293 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75jc4\" (UniqueName: \"kubernetes.io/projected/9304f55c-4ec0-4828-bfe6-1024a5ec719e-kube-api-access-75jc4\") pod \"marketplace-operator-79b997595-ctzhf\" (UID: \"9304f55c-4ec0-4828-bfe6-1024a5ec719e\") " pod="openshift-marketplace/marketplace-operator-79b997595-ctzhf" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.362407 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9304f55c-4ec0-4828-bfe6-1024a5ec719e-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-ctzhf\" (UID: \"9304f55c-4ec0-4828-bfe6-1024a5ec719e\") " pod="openshift-marketplace/marketplace-operator-79b997595-ctzhf" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.362456 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9304f55c-4ec0-4828-bfe6-1024a5ec719e-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-ctzhf\" (UID: \"9304f55c-4ec0-4828-bfe6-1024a5ec719e\") " pod="openshift-marketplace/marketplace-operator-79b997595-ctzhf" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.364205 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9304f55c-4ec0-4828-bfe6-1024a5ec719e-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-ctzhf\" (UID: \"9304f55c-4ec0-4828-bfe6-1024a5ec719e\") " pod="openshift-marketplace/marketplace-operator-79b997595-ctzhf" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.377881 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9304f55c-4ec0-4828-bfe6-1024a5ec719e-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-ctzhf\" (UID: \"9304f55c-4ec0-4828-bfe6-1024a5ec719e\") " pod="openshift-marketplace/marketplace-operator-79b997595-ctzhf" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.382391 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75jc4\" (UniqueName: \"kubernetes.io/projected/9304f55c-4ec0-4828-bfe6-1024a5ec719e-kube-api-access-75jc4\") pod \"marketplace-operator-79b997595-ctzhf\" (UID: \"9304f55c-4ec0-4828-bfe6-1024a5ec719e\") " pod="openshift-marketplace/marketplace-operator-79b997595-ctzhf" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.553204 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-ctzhf" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.563154 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rnpmr" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.575221 4971 generic.go:334] "Generic (PLEG): container finished" podID="6df59c92-6515-4bf1-aad4-57daf0d8fc76" containerID="96b79214b2981b78df80e586577ef173ce2597f65a1f86b3440300d2f27dd60e" exitCode=0 Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.575286 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-m7fh9" event={"ID":"6df59c92-6515-4bf1-aad4-57daf0d8fc76","Type":"ContainerDied","Data":"96b79214b2981b78df80e586577ef173ce2597f65a1f86b3440300d2f27dd60e"} Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.577187 4971 generic.go:334] "Generic (PLEG): container finished" podID="12325c6c-a817-46ae-b776-01213f064dbf" containerID="ba3227ee0632cff4ea50b0ac425afb773f8b72eba30aa9fd27b8bd9d07d2e1d0" exitCode=0 Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.577239 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qfvr4" event={"ID":"12325c6c-a817-46ae-b776-01213f064dbf","Type":"ContainerDied","Data":"ba3227ee0632cff4ea50b0ac425afb773f8b72eba30aa9fd27b8bd9d07d2e1d0"} Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.579220 4971 generic.go:334] "Generic (PLEG): container finished" podID="6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c" containerID="79fa646f743f7f4059a17ac081237d0b2fff084f67c728c80a9864f67667ed79" exitCode=0 Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.579274 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7pr9t" event={"ID":"6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c","Type":"ContainerDied","Data":"79fa646f743f7f4059a17ac081237d0b2fff084f67c728c80a9864f67667ed79"} Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.589680 4971 generic.go:334] "Generic (PLEG): container finished" podID="2bec9dd3-b528-4b67-b949-8e9eb074e222" containerID="951993ff140652fc2efa39e6a84fef2b79d0a9d8777b3ea466d9979e8e9e7ae3" exitCode=0 Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.589741 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rnpmr" event={"ID":"2bec9dd3-b528-4b67-b949-8e9eb074e222","Type":"ContainerDied","Data":"951993ff140652fc2efa39e6a84fef2b79d0a9d8777b3ea466d9979e8e9e7ae3"} Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.589767 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rnpmr" event={"ID":"2bec9dd3-b528-4b67-b949-8e9eb074e222","Type":"ContainerDied","Data":"a67bf21a6930c219142d4109f1a4c16f549c8dce2190a58c46bc0aef7c26cd8b"} Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.589784 4971 scope.go:117] "RemoveContainer" containerID="951993ff140652fc2efa39e6a84fef2b79d0a9d8777b3ea466d9979e8e9e7ae3" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.589896 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rnpmr" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.592586 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fhgrm" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.597894 4971 generic.go:334] "Generic (PLEG): container finished" podID="8978d4d3-664f-40f4-b33c-83cd92577bc3" containerID="b7a1cde51bbf318cdf838480249f3afd0bb260ed774d61fdfe43c67c9b42a1c2" exitCode=0 Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.598034 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fhgrm" event={"ID":"8978d4d3-664f-40f4-b33c-83cd92577bc3","Type":"ContainerDied","Data":"b7a1cde51bbf318cdf838480249f3afd0bb260ed774d61fdfe43c67c9b42a1c2"} Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.609721 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qfvr4" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.610272 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-m7fh9" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.631775 4971 scope.go:117] "RemoveContainer" containerID="6269307f566369ece89a79544f896d99d6641ebbea8f82b35145c0eb6cbaceb6" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.665907 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-thx9h\" (UniqueName: \"kubernetes.io/projected/2bec9dd3-b528-4b67-b949-8e9eb074e222-kube-api-access-thx9h\") pod \"2bec9dd3-b528-4b67-b949-8e9eb074e222\" (UID: \"2bec9dd3-b528-4b67-b949-8e9eb074e222\") " Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.666048 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2bec9dd3-b528-4b67-b949-8e9eb074e222-utilities\") pod \"2bec9dd3-b528-4b67-b949-8e9eb074e222\" (UID: \"2bec9dd3-b528-4b67-b949-8e9eb074e222\") " Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.666133 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2bec9dd3-b528-4b67-b949-8e9eb074e222-catalog-content\") pod \"2bec9dd3-b528-4b67-b949-8e9eb074e222\" (UID: \"2bec9dd3-b528-4b67-b949-8e9eb074e222\") " Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.667294 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2bec9dd3-b528-4b67-b949-8e9eb074e222-utilities" (OuterVolumeSpecName: "utilities") pod "2bec9dd3-b528-4b67-b949-8e9eb074e222" (UID: "2bec9dd3-b528-4b67-b949-8e9eb074e222"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.671346 4971 scope.go:117] "RemoveContainer" containerID="8fcb05713671593dd4936adedcd6c2b5af92a4b9f923ebbccb85c1684fedd4d1" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.677376 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2bec9dd3-b528-4b67-b949-8e9eb074e222-kube-api-access-thx9h" (OuterVolumeSpecName: "kube-api-access-thx9h") pod "2bec9dd3-b528-4b67-b949-8e9eb074e222" (UID: "2bec9dd3-b528-4b67-b949-8e9eb074e222"). InnerVolumeSpecName "kube-api-access-thx9h". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.677940 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2bec9dd3-b528-4b67-b949-8e9eb074e222-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.693370 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7pr9t" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.700839 4971 scope.go:117] "RemoveContainer" containerID="951993ff140652fc2efa39e6a84fef2b79d0a9d8777b3ea466d9979e8e9e7ae3" Nov 27 06:59:18 crc kubenswrapper[4971]: E1127 06:59:18.701302 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"951993ff140652fc2efa39e6a84fef2b79d0a9d8777b3ea466d9979e8e9e7ae3\": container with ID starting with 951993ff140652fc2efa39e6a84fef2b79d0a9d8777b3ea466d9979e8e9e7ae3 not found: ID does not exist" containerID="951993ff140652fc2efa39e6a84fef2b79d0a9d8777b3ea466d9979e8e9e7ae3" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.701344 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"951993ff140652fc2efa39e6a84fef2b79d0a9d8777b3ea466d9979e8e9e7ae3"} err="failed to get container status \"951993ff140652fc2efa39e6a84fef2b79d0a9d8777b3ea466d9979e8e9e7ae3\": rpc error: code = NotFound desc = could not find container \"951993ff140652fc2efa39e6a84fef2b79d0a9d8777b3ea466d9979e8e9e7ae3\": container with ID starting with 951993ff140652fc2efa39e6a84fef2b79d0a9d8777b3ea466d9979e8e9e7ae3 not found: ID does not exist" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.701375 4971 scope.go:117] "RemoveContainer" containerID="6269307f566369ece89a79544f896d99d6641ebbea8f82b35145c0eb6cbaceb6" Nov 27 06:59:18 crc kubenswrapper[4971]: E1127 06:59:18.701783 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6269307f566369ece89a79544f896d99d6641ebbea8f82b35145c0eb6cbaceb6\": container with ID starting with 6269307f566369ece89a79544f896d99d6641ebbea8f82b35145c0eb6cbaceb6 not found: ID does not exist" containerID="6269307f566369ece89a79544f896d99d6641ebbea8f82b35145c0eb6cbaceb6" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.701823 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6269307f566369ece89a79544f896d99d6641ebbea8f82b35145c0eb6cbaceb6"} err="failed to get container status \"6269307f566369ece89a79544f896d99d6641ebbea8f82b35145c0eb6cbaceb6\": rpc error: code = NotFound desc = could not find container \"6269307f566369ece89a79544f896d99d6641ebbea8f82b35145c0eb6cbaceb6\": container with ID starting with 6269307f566369ece89a79544f896d99d6641ebbea8f82b35145c0eb6cbaceb6 not found: ID does not exist" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.701845 4971 scope.go:117] "RemoveContainer" containerID="8fcb05713671593dd4936adedcd6c2b5af92a4b9f923ebbccb85c1684fedd4d1" Nov 27 06:59:18 crc kubenswrapper[4971]: E1127 06:59:18.708504 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8fcb05713671593dd4936adedcd6c2b5af92a4b9f923ebbccb85c1684fedd4d1\": container with ID starting with 8fcb05713671593dd4936adedcd6c2b5af92a4b9f923ebbccb85c1684fedd4d1 not found: ID does not exist" containerID="8fcb05713671593dd4936adedcd6c2b5af92a4b9f923ebbccb85c1684fedd4d1" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.708566 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8fcb05713671593dd4936adedcd6c2b5af92a4b9f923ebbccb85c1684fedd4d1"} err="failed to get container status \"8fcb05713671593dd4936adedcd6c2b5af92a4b9f923ebbccb85c1684fedd4d1\": rpc error: code = NotFound desc = could not find container \"8fcb05713671593dd4936adedcd6c2b5af92a4b9f923ebbccb85c1684fedd4d1\": container with ID starting with 8fcb05713671593dd4936adedcd6c2b5af92a4b9f923ebbccb85c1684fedd4d1 not found: ID does not exist" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.708597 4971 scope.go:117] "RemoveContainer" containerID="b7a1cde51bbf318cdf838480249f3afd0bb260ed774d61fdfe43c67c9b42a1c2" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.728265 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2bec9dd3-b528-4b67-b949-8e9eb074e222-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2bec9dd3-b528-4b67-b949-8e9eb074e222" (UID: "2bec9dd3-b528-4b67-b949-8e9eb074e222"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.731084 4971 scope.go:117] "RemoveContainer" containerID="b462bf549ccf023ad9cda607d873082c532d9d373f7361bae301c46ce448bedb" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.752950 4971 scope.go:117] "RemoveContainer" containerID="71b67f7b32a915365aca80dfd8a4110bac67dd4de2065ef88791d3241b053b61" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.779108 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8978d4d3-664f-40f4-b33c-83cd92577bc3-catalog-content\") pod \"8978d4d3-664f-40f4-b33c-83cd92577bc3\" (UID: \"8978d4d3-664f-40f4-b33c-83cd92577bc3\") " Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.779176 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8978d4d3-664f-40f4-b33c-83cd92577bc3-utilities\") pod \"8978d4d3-664f-40f4-b33c-83cd92577bc3\" (UID: \"8978d4d3-664f-40f4-b33c-83cd92577bc3\") " Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.779200 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12325c6c-a817-46ae-b776-01213f064dbf-utilities\") pod \"12325c6c-a817-46ae-b776-01213f064dbf\" (UID: \"12325c6c-a817-46ae-b776-01213f064dbf\") " Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.779240 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qpcx6\" (UniqueName: \"kubernetes.io/projected/12325c6c-a817-46ae-b776-01213f064dbf-kube-api-access-qpcx6\") pod \"12325c6c-a817-46ae-b776-01213f064dbf\" (UID: \"12325c6c-a817-46ae-b776-01213f064dbf\") " Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.779260 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c-utilities\") pod \"6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c\" (UID: \"6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c\") " Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.779288 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12325c6c-a817-46ae-b776-01213f064dbf-catalog-content\") pod \"12325c6c-a817-46ae-b776-01213f064dbf\" (UID: \"12325c6c-a817-46ae-b776-01213f064dbf\") " Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.779359 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c-catalog-content\") pod \"6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c\" (UID: \"6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c\") " Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.779376 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gftcn\" (UniqueName: \"kubernetes.io/projected/8978d4d3-664f-40f4-b33c-83cd92577bc3-kube-api-access-gftcn\") pod \"8978d4d3-664f-40f4-b33c-83cd92577bc3\" (UID: \"8978d4d3-664f-40f4-b33c-83cd92577bc3\") " Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.779420 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t9q5c\" (UniqueName: \"kubernetes.io/projected/6df59c92-6515-4bf1-aad4-57daf0d8fc76-kube-api-access-t9q5c\") pod \"6df59c92-6515-4bf1-aad4-57daf0d8fc76\" (UID: \"6df59c92-6515-4bf1-aad4-57daf0d8fc76\") " Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.779445 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6df59c92-6515-4bf1-aad4-57daf0d8fc76-marketplace-trusted-ca\") pod \"6df59c92-6515-4bf1-aad4-57daf0d8fc76\" (UID: \"6df59c92-6515-4bf1-aad4-57daf0d8fc76\") " Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.779489 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/6df59c92-6515-4bf1-aad4-57daf0d8fc76-marketplace-operator-metrics\") pod \"6df59c92-6515-4bf1-aad4-57daf0d8fc76\" (UID: \"6df59c92-6515-4bf1-aad4-57daf0d8fc76\") " Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.779561 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lbz5t\" (UniqueName: \"kubernetes.io/projected/6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c-kube-api-access-lbz5t\") pod \"6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c\" (UID: \"6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c\") " Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.779974 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-thx9h\" (UniqueName: \"kubernetes.io/projected/2bec9dd3-b528-4b67-b949-8e9eb074e222-kube-api-access-thx9h\") on node \"crc\" DevicePath \"\"" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.780001 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2bec9dd3-b528-4b67-b949-8e9eb074e222-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.780489 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/12325c6c-a817-46ae-b776-01213f064dbf-utilities" (OuterVolumeSpecName: "utilities") pod "12325c6c-a817-46ae-b776-01213f064dbf" (UID: "12325c6c-a817-46ae-b776-01213f064dbf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.780903 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c-utilities" (OuterVolumeSpecName: "utilities") pod "6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c" (UID: "6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.781743 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8978d4d3-664f-40f4-b33c-83cd92577bc3-utilities" (OuterVolumeSpecName: "utilities") pod "8978d4d3-664f-40f4-b33c-83cd92577bc3" (UID: "8978d4d3-664f-40f4-b33c-83cd92577bc3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.781854 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6df59c92-6515-4bf1-aad4-57daf0d8fc76-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "6df59c92-6515-4bf1-aad4-57daf0d8fc76" (UID: "6df59c92-6515-4bf1-aad4-57daf0d8fc76"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.783732 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12325c6c-a817-46ae-b776-01213f064dbf-kube-api-access-qpcx6" (OuterVolumeSpecName: "kube-api-access-qpcx6") pod "12325c6c-a817-46ae-b776-01213f064dbf" (UID: "12325c6c-a817-46ae-b776-01213f064dbf"). InnerVolumeSpecName "kube-api-access-qpcx6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.785466 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c-kube-api-access-lbz5t" (OuterVolumeSpecName: "kube-api-access-lbz5t") pod "6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c" (UID: "6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c"). InnerVolumeSpecName "kube-api-access-lbz5t". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.785606 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6df59c92-6515-4bf1-aad4-57daf0d8fc76-kube-api-access-t9q5c" (OuterVolumeSpecName: "kube-api-access-t9q5c") pod "6df59c92-6515-4bf1-aad4-57daf0d8fc76" (UID: "6df59c92-6515-4bf1-aad4-57daf0d8fc76"). InnerVolumeSpecName "kube-api-access-t9q5c". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.786140 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8978d4d3-664f-40f4-b33c-83cd92577bc3-kube-api-access-gftcn" (OuterVolumeSpecName: "kube-api-access-gftcn") pod "8978d4d3-664f-40f4-b33c-83cd92577bc3" (UID: "8978d4d3-664f-40f4-b33c-83cd92577bc3"). InnerVolumeSpecName "kube-api-access-gftcn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.788544 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6df59c92-6515-4bf1-aad4-57daf0d8fc76-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "6df59c92-6515-4bf1-aad4-57daf0d8fc76" (UID: "6df59c92-6515-4bf1-aad4-57daf0d8fc76"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.805824 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/12325c6c-a817-46ae-b776-01213f064dbf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "12325c6c-a817-46ae-b776-01213f064dbf" (UID: "12325c6c-a817-46ae-b776-01213f064dbf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.839144 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8978d4d3-664f-40f4-b33c-83cd92577bc3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8978d4d3-664f-40f4-b33c-83cd92577bc3" (UID: "8978d4d3-664f-40f4-b33c-83cd92577bc3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.881291 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c" (UID: "6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.881860 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lbz5t\" (UniqueName: \"kubernetes.io/projected/6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c-kube-api-access-lbz5t\") on node \"crc\" DevicePath \"\"" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.881885 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8978d4d3-664f-40f4-b33c-83cd92577bc3-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.881900 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8978d4d3-664f-40f4-b33c-83cd92577bc3-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.881916 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12325c6c-a817-46ae-b776-01213f064dbf-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.881929 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qpcx6\" (UniqueName: \"kubernetes.io/projected/12325c6c-a817-46ae-b776-01213f064dbf-kube-api-access-qpcx6\") on node \"crc\" DevicePath \"\"" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.881941 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.881953 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12325c6c-a817-46ae-b776-01213f064dbf-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.881965 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.881977 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gftcn\" (UniqueName: \"kubernetes.io/projected/8978d4d3-664f-40f4-b33c-83cd92577bc3-kube-api-access-gftcn\") on node \"crc\" DevicePath \"\"" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.881990 4971 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6df59c92-6515-4bf1-aad4-57daf0d8fc76-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.882002 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t9q5c\" (UniqueName: \"kubernetes.io/projected/6df59c92-6515-4bf1-aad4-57daf0d8fc76-kube-api-access-t9q5c\") on node \"crc\" DevicePath \"\"" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.882015 4971 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/6df59c92-6515-4bf1-aad4-57daf0d8fc76-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.918776 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rnpmr"] Nov 27 06:59:18 crc kubenswrapper[4971]: I1127 06:59:18.921778 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-rnpmr"] Nov 27 06:59:19 crc kubenswrapper[4971]: I1127 06:59:19.090006 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-ctzhf"] Nov 27 06:59:19 crc kubenswrapper[4971]: I1127 06:59:19.617734 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7pr9t" event={"ID":"6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c","Type":"ContainerDied","Data":"cf136719246fb2fbbd7867638b088e6c65e7ef06384a5ed4274749e24574e691"} Nov 27 06:59:19 crc kubenswrapper[4971]: I1127 06:59:19.618193 4971 scope.go:117] "RemoveContainer" containerID="79fa646f743f7f4059a17ac081237d0b2fff084f67c728c80a9864f67667ed79" Nov 27 06:59:19 crc kubenswrapper[4971]: I1127 06:59:19.617785 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7pr9t" Nov 27 06:59:19 crc kubenswrapper[4971]: I1127 06:59:19.620300 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qfvr4" event={"ID":"12325c6c-a817-46ae-b776-01213f064dbf","Type":"ContainerDied","Data":"2bd668bed8eb62deed3238a0d76995fb7e9ce11337c4dcfe5d15fcf3bf99cc87"} Nov 27 06:59:19 crc kubenswrapper[4971]: I1127 06:59:19.620420 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qfvr4" Nov 27 06:59:19 crc kubenswrapper[4971]: I1127 06:59:19.623000 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fhgrm" event={"ID":"8978d4d3-664f-40f4-b33c-83cd92577bc3","Type":"ContainerDied","Data":"017b5f39748f3450e81a862768bae7d3e1c0528e4668861949318bdd353cf4ae"} Nov 27 06:59:19 crc kubenswrapper[4971]: I1127 06:59:19.623095 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fhgrm" Nov 27 06:59:19 crc kubenswrapper[4971]: I1127 06:59:19.627561 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-ctzhf" event={"ID":"9304f55c-4ec0-4828-bfe6-1024a5ec719e","Type":"ContainerStarted","Data":"4ec31e0143b35e9ad9623c1bddfc385e9b5feee18fb0da0e86b064deb0093c6e"} Nov 27 06:59:19 crc kubenswrapper[4971]: I1127 06:59:19.627598 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-ctzhf" event={"ID":"9304f55c-4ec0-4828-bfe6-1024a5ec719e","Type":"ContainerStarted","Data":"4d60012f09e1c2b036aa9890b4c4fcef677839ef62ca847f20014e0c232d5ae9"} Nov 27 06:59:19 crc kubenswrapper[4971]: I1127 06:59:19.627777 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-ctzhf" Nov 27 06:59:19 crc kubenswrapper[4971]: I1127 06:59:19.628991 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-m7fh9" event={"ID":"6df59c92-6515-4bf1-aad4-57daf0d8fc76","Type":"ContainerDied","Data":"c71f053172ce11236b4467cdc2d146fba4751dc7c438d7e279921e05375653f9"} Nov 27 06:59:19 crc kubenswrapper[4971]: I1127 06:59:19.629054 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-m7fh9" Nov 27 06:59:19 crc kubenswrapper[4971]: I1127 06:59:19.631450 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-ctzhf" Nov 27 06:59:19 crc kubenswrapper[4971]: I1127 06:59:19.636285 4971 scope.go:117] "RemoveContainer" containerID="771d9ad12566e5a483ac39cf336bbf56d8b49d98d00750fd6eee3f29f7ff9670" Nov 27 06:59:19 crc kubenswrapper[4971]: I1127 06:59:19.658563 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-ctzhf" podStartSLOduration=1.658525301 podStartE2EDuration="1.658525301s" podCreationTimestamp="2025-11-27 06:59:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 06:59:19.653414409 +0000 UTC m=+397.845458337" watchObservedRunningTime="2025-11-27 06:59:19.658525301 +0000 UTC m=+397.850569209" Nov 27 06:59:19 crc kubenswrapper[4971]: I1127 06:59:19.665496 4971 scope.go:117] "RemoveContainer" containerID="091514dc329b7c2121a5cd29126223055b6006787bd87b45acf23c5c6fa04c58" Nov 27 06:59:19 crc kubenswrapper[4971]: I1127 06:59:19.681920 4971 scope.go:117] "RemoveContainer" containerID="ba3227ee0632cff4ea50b0ac425afb773f8b72eba30aa9fd27b8bd9d07d2e1d0" Nov 27 06:59:19 crc kubenswrapper[4971]: I1127 06:59:19.709177 4971 scope.go:117] "RemoveContainer" containerID="353330776d82599c62b540162c29b1c3a1197f997124e798b0ea069a6553b707" Nov 27 06:59:19 crc kubenswrapper[4971]: I1127 06:59:19.728973 4971 scope.go:117] "RemoveContainer" containerID="2889e4ac031d4ab036308eb77bb034fc39693c92bbc0872f14e4b6ef7b9a2e8e" Nov 27 06:59:19 crc kubenswrapper[4971]: I1127 06:59:19.732363 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qfvr4"] Nov 27 06:59:19 crc kubenswrapper[4971]: I1127 06:59:19.742136 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-qfvr4"] Nov 27 06:59:19 crc kubenswrapper[4971]: I1127 06:59:19.748246 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7pr9t"] Nov 27 06:59:19 crc kubenswrapper[4971]: I1127 06:59:19.751553 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-7pr9t"] Nov 27 06:59:19 crc kubenswrapper[4971]: I1127 06:59:19.753209 4971 scope.go:117] "RemoveContainer" containerID="96b79214b2981b78df80e586577ef173ce2597f65a1f86b3440300d2f27dd60e" Nov 27 06:59:19 crc kubenswrapper[4971]: I1127 06:59:19.754906 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-m7fh9"] Nov 27 06:59:19 crc kubenswrapper[4971]: I1127 06:59:19.759502 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-m7fh9"] Nov 27 06:59:19 crc kubenswrapper[4971]: I1127 06:59:19.764754 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fhgrm"] Nov 27 06:59:19 crc kubenswrapper[4971]: I1127 06:59:19.779044 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-fhgrm"] Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.291794 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-s6lp8"] Nov 27 06:59:20 crc kubenswrapper[4971]: E1127 06:59:20.292290 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12325c6c-a817-46ae-b776-01213f064dbf" containerName="registry-server" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.292303 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="12325c6c-a817-46ae-b776-01213f064dbf" containerName="registry-server" Nov 27 06:59:20 crc kubenswrapper[4971]: E1127 06:59:20.292313 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bec9dd3-b528-4b67-b949-8e9eb074e222" containerName="registry-server" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.292319 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bec9dd3-b528-4b67-b949-8e9eb074e222" containerName="registry-server" Nov 27 06:59:20 crc kubenswrapper[4971]: E1127 06:59:20.292326 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c" containerName="extract-utilities" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.292333 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c" containerName="extract-utilities" Nov 27 06:59:20 crc kubenswrapper[4971]: E1127 06:59:20.292340 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8978d4d3-664f-40f4-b33c-83cd92577bc3" containerName="extract-content" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.292347 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="8978d4d3-664f-40f4-b33c-83cd92577bc3" containerName="extract-content" Nov 27 06:59:20 crc kubenswrapper[4971]: E1127 06:59:20.292360 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c" containerName="extract-content" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.292368 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c" containerName="extract-content" Nov 27 06:59:20 crc kubenswrapper[4971]: E1127 06:59:20.292379 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8978d4d3-664f-40f4-b33c-83cd92577bc3" containerName="registry-server" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.292385 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="8978d4d3-664f-40f4-b33c-83cd92577bc3" containerName="registry-server" Nov 27 06:59:20 crc kubenswrapper[4971]: E1127 06:59:20.292393 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12325c6c-a817-46ae-b776-01213f064dbf" containerName="extract-utilities" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.292400 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="12325c6c-a817-46ae-b776-01213f064dbf" containerName="extract-utilities" Nov 27 06:59:20 crc kubenswrapper[4971]: E1127 06:59:20.292411 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6df59c92-6515-4bf1-aad4-57daf0d8fc76" containerName="marketplace-operator" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.292419 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="6df59c92-6515-4bf1-aad4-57daf0d8fc76" containerName="marketplace-operator" Nov 27 06:59:20 crc kubenswrapper[4971]: E1127 06:59:20.292430 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bec9dd3-b528-4b67-b949-8e9eb074e222" containerName="extract-content" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.292436 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bec9dd3-b528-4b67-b949-8e9eb074e222" containerName="extract-content" Nov 27 06:59:20 crc kubenswrapper[4971]: E1127 06:59:20.292444 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c" containerName="registry-server" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.292450 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c" containerName="registry-server" Nov 27 06:59:20 crc kubenswrapper[4971]: E1127 06:59:20.292461 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8978d4d3-664f-40f4-b33c-83cd92577bc3" containerName="extract-utilities" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.292468 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="8978d4d3-664f-40f4-b33c-83cd92577bc3" containerName="extract-utilities" Nov 27 06:59:20 crc kubenswrapper[4971]: E1127 06:59:20.292479 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12325c6c-a817-46ae-b776-01213f064dbf" containerName="extract-content" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.292486 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="12325c6c-a817-46ae-b776-01213f064dbf" containerName="extract-content" Nov 27 06:59:20 crc kubenswrapper[4971]: E1127 06:59:20.292499 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bec9dd3-b528-4b67-b949-8e9eb074e222" containerName="extract-utilities" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.292507 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bec9dd3-b528-4b67-b949-8e9eb074e222" containerName="extract-utilities" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.292654 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="12325c6c-a817-46ae-b776-01213f064dbf" containerName="registry-server" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.292667 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="8978d4d3-664f-40f4-b33c-83cd92577bc3" containerName="registry-server" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.292677 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c" containerName="registry-server" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.292687 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="6df59c92-6515-4bf1-aad4-57daf0d8fc76" containerName="marketplace-operator" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.292699 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bec9dd3-b528-4b67-b949-8e9eb074e222" containerName="registry-server" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.293351 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s6lp8" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.296465 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.299033 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-s6lp8"] Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.302564 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8gdv\" (UniqueName: \"kubernetes.io/projected/8c727031-a0df-4bca-be8b-e8211a220302-kube-api-access-t8gdv\") pod \"redhat-marketplace-s6lp8\" (UID: \"8c727031-a0df-4bca-be8b-e8211a220302\") " pod="openshift-marketplace/redhat-marketplace-s6lp8" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.302611 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c727031-a0df-4bca-be8b-e8211a220302-catalog-content\") pod \"redhat-marketplace-s6lp8\" (UID: \"8c727031-a0df-4bca-be8b-e8211a220302\") " pod="openshift-marketplace/redhat-marketplace-s6lp8" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.302665 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c727031-a0df-4bca-be8b-e8211a220302-utilities\") pod \"redhat-marketplace-s6lp8\" (UID: \"8c727031-a0df-4bca-be8b-e8211a220302\") " pod="openshift-marketplace/redhat-marketplace-s6lp8" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.403687 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8gdv\" (UniqueName: \"kubernetes.io/projected/8c727031-a0df-4bca-be8b-e8211a220302-kube-api-access-t8gdv\") pod \"redhat-marketplace-s6lp8\" (UID: \"8c727031-a0df-4bca-be8b-e8211a220302\") " pod="openshift-marketplace/redhat-marketplace-s6lp8" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.403757 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c727031-a0df-4bca-be8b-e8211a220302-catalog-content\") pod \"redhat-marketplace-s6lp8\" (UID: \"8c727031-a0df-4bca-be8b-e8211a220302\") " pod="openshift-marketplace/redhat-marketplace-s6lp8" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.403814 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c727031-a0df-4bca-be8b-e8211a220302-utilities\") pod \"redhat-marketplace-s6lp8\" (UID: \"8c727031-a0df-4bca-be8b-e8211a220302\") " pod="openshift-marketplace/redhat-marketplace-s6lp8" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.404370 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c727031-a0df-4bca-be8b-e8211a220302-utilities\") pod \"redhat-marketplace-s6lp8\" (UID: \"8c727031-a0df-4bca-be8b-e8211a220302\") " pod="openshift-marketplace/redhat-marketplace-s6lp8" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.404385 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c727031-a0df-4bca-be8b-e8211a220302-catalog-content\") pod \"redhat-marketplace-s6lp8\" (UID: \"8c727031-a0df-4bca-be8b-e8211a220302\") " pod="openshift-marketplace/redhat-marketplace-s6lp8" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.424316 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8gdv\" (UniqueName: \"kubernetes.io/projected/8c727031-a0df-4bca-be8b-e8211a220302-kube-api-access-t8gdv\") pod \"redhat-marketplace-s6lp8\" (UID: \"8c727031-a0df-4bca-be8b-e8211a220302\") " pod="openshift-marketplace/redhat-marketplace-s6lp8" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.487525 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-j7mvd"] Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.488721 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-j7mvd" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.500511 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.513816 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-j7mvd"] Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.523074 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhx7m\" (UniqueName: \"kubernetes.io/projected/6f79433c-d33b-4b19-9582-2900de4137a1-kube-api-access-bhx7m\") pod \"redhat-operators-j7mvd\" (UID: \"6f79433c-d33b-4b19-9582-2900de4137a1\") " pod="openshift-marketplace/redhat-operators-j7mvd" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.524214 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f79433c-d33b-4b19-9582-2900de4137a1-catalog-content\") pod \"redhat-operators-j7mvd\" (UID: \"6f79433c-d33b-4b19-9582-2900de4137a1\") " pod="openshift-marketplace/redhat-operators-j7mvd" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.524304 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f79433c-d33b-4b19-9582-2900de4137a1-utilities\") pod \"redhat-operators-j7mvd\" (UID: \"6f79433c-d33b-4b19-9582-2900de4137a1\") " pod="openshift-marketplace/redhat-operators-j7mvd" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.563038 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="12325c6c-a817-46ae-b776-01213f064dbf" path="/var/lib/kubelet/pods/12325c6c-a817-46ae-b776-01213f064dbf/volumes" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.564231 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2bec9dd3-b528-4b67-b949-8e9eb074e222" path="/var/lib/kubelet/pods/2bec9dd3-b528-4b67-b949-8e9eb074e222/volumes" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.565048 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c" path="/var/lib/kubelet/pods/6dd9ee5f-ff22-401e-86c2-eb7f809a3f8c/volumes" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.566523 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6df59c92-6515-4bf1-aad4-57daf0d8fc76" path="/var/lib/kubelet/pods/6df59c92-6515-4bf1-aad4-57daf0d8fc76/volumes" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.567181 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8978d4d3-664f-40f4-b33c-83cd92577bc3" path="/var/lib/kubelet/pods/8978d4d3-664f-40f4-b33c-83cd92577bc3/volumes" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.612039 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s6lp8" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.625845 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhx7m\" (UniqueName: \"kubernetes.io/projected/6f79433c-d33b-4b19-9582-2900de4137a1-kube-api-access-bhx7m\") pod \"redhat-operators-j7mvd\" (UID: \"6f79433c-d33b-4b19-9582-2900de4137a1\") " pod="openshift-marketplace/redhat-operators-j7mvd" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.625924 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f79433c-d33b-4b19-9582-2900de4137a1-catalog-content\") pod \"redhat-operators-j7mvd\" (UID: \"6f79433c-d33b-4b19-9582-2900de4137a1\") " pod="openshift-marketplace/redhat-operators-j7mvd" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.625972 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f79433c-d33b-4b19-9582-2900de4137a1-utilities\") pod \"redhat-operators-j7mvd\" (UID: \"6f79433c-d33b-4b19-9582-2900de4137a1\") " pod="openshift-marketplace/redhat-operators-j7mvd" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.626563 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f79433c-d33b-4b19-9582-2900de4137a1-utilities\") pod \"redhat-operators-j7mvd\" (UID: \"6f79433c-d33b-4b19-9582-2900de4137a1\") " pod="openshift-marketplace/redhat-operators-j7mvd" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.626869 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f79433c-d33b-4b19-9582-2900de4137a1-catalog-content\") pod \"redhat-operators-j7mvd\" (UID: \"6f79433c-d33b-4b19-9582-2900de4137a1\") " pod="openshift-marketplace/redhat-operators-j7mvd" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.650830 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhx7m\" (UniqueName: \"kubernetes.io/projected/6f79433c-d33b-4b19-9582-2900de4137a1-kube-api-access-bhx7m\") pod \"redhat-operators-j7mvd\" (UID: \"6f79433c-d33b-4b19-9582-2900de4137a1\") " pod="openshift-marketplace/redhat-operators-j7mvd" Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.813007 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-s6lp8"] Nov 27 06:59:20 crc kubenswrapper[4971]: W1127 06:59:20.818937 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8c727031_a0df_4bca_be8b_e8211a220302.slice/crio-3cd64908ae06f6dfdf66faec9c3bae0bbab1db15edb6eae4116256f9478887da WatchSource:0}: Error finding container 3cd64908ae06f6dfdf66faec9c3bae0bbab1db15edb6eae4116256f9478887da: Status 404 returned error can't find the container with id 3cd64908ae06f6dfdf66faec9c3bae0bbab1db15edb6eae4116256f9478887da Nov 27 06:59:20 crc kubenswrapper[4971]: I1127 06:59:20.838161 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-j7mvd" Nov 27 06:59:21 crc kubenswrapper[4971]: I1127 06:59:21.025101 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-j7mvd"] Nov 27 06:59:21 crc kubenswrapper[4971]: W1127 06:59:21.069605 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6f79433c_d33b_4b19_9582_2900de4137a1.slice/crio-a2d17524fca1da37d0c32483389affa756f91f7e1e123153cf6404bff9e65540 WatchSource:0}: Error finding container a2d17524fca1da37d0c32483389affa756f91f7e1e123153cf6404bff9e65540: Status 404 returned error can't find the container with id a2d17524fca1da37d0c32483389affa756f91f7e1e123153cf6404bff9e65540 Nov 27 06:59:21 crc kubenswrapper[4971]: I1127 06:59:21.645173 4971 generic.go:334] "Generic (PLEG): container finished" podID="8c727031-a0df-4bca-be8b-e8211a220302" containerID="4a2714b317ae43f7256cc6a3dcfcf2013fac4d91afca0ae59b163cdaf1c2feb3" exitCode=0 Nov 27 06:59:21 crc kubenswrapper[4971]: I1127 06:59:21.645219 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s6lp8" event={"ID":"8c727031-a0df-4bca-be8b-e8211a220302","Type":"ContainerDied","Data":"4a2714b317ae43f7256cc6a3dcfcf2013fac4d91afca0ae59b163cdaf1c2feb3"} Nov 27 06:59:21 crc kubenswrapper[4971]: I1127 06:59:21.645249 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s6lp8" event={"ID":"8c727031-a0df-4bca-be8b-e8211a220302","Type":"ContainerStarted","Data":"3cd64908ae06f6dfdf66faec9c3bae0bbab1db15edb6eae4116256f9478887da"} Nov 27 06:59:21 crc kubenswrapper[4971]: I1127 06:59:21.646866 4971 generic.go:334] "Generic (PLEG): container finished" podID="6f79433c-d33b-4b19-9582-2900de4137a1" containerID="bd5f0fb8c49fa9083a8d084dc94b8db25dcc99e7024fc57080d53abf2eb4563c" exitCode=0 Nov 27 06:59:21 crc kubenswrapper[4971]: I1127 06:59:21.646901 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j7mvd" event={"ID":"6f79433c-d33b-4b19-9582-2900de4137a1","Type":"ContainerDied","Data":"bd5f0fb8c49fa9083a8d084dc94b8db25dcc99e7024fc57080d53abf2eb4563c"} Nov 27 06:59:21 crc kubenswrapper[4971]: I1127 06:59:21.646929 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j7mvd" event={"ID":"6f79433c-d33b-4b19-9582-2900de4137a1","Type":"ContainerStarted","Data":"a2d17524fca1da37d0c32483389affa756f91f7e1e123153cf6404bff9e65540"} Nov 27 06:59:22 crc kubenswrapper[4971]: I1127 06:59:22.652991 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j7mvd" event={"ID":"6f79433c-d33b-4b19-9582-2900de4137a1","Type":"ContainerStarted","Data":"56e6ce4353c87a28a1b7c4339ebb216d08ef4578f29782f3e4d21a3463dcc5f2"} Nov 27 06:59:22 crc kubenswrapper[4971]: I1127 06:59:22.656427 4971 generic.go:334] "Generic (PLEG): container finished" podID="8c727031-a0df-4bca-be8b-e8211a220302" containerID="be8d93af58ab5db8a4277bd17b6be6f5ac8b4fed91b5641ff5af5c7e65f2335a" exitCode=0 Nov 27 06:59:22 crc kubenswrapper[4971]: I1127 06:59:22.656478 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s6lp8" event={"ID":"8c727031-a0df-4bca-be8b-e8211a220302","Type":"ContainerDied","Data":"be8d93af58ab5db8a4277bd17b6be6f5ac8b4fed91b5641ff5af5c7e65f2335a"} Nov 27 06:59:22 crc kubenswrapper[4971]: I1127 06:59:22.707270 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-jp5gl"] Nov 27 06:59:22 crc kubenswrapper[4971]: I1127 06:59:22.708261 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jp5gl" Nov 27 06:59:22 crc kubenswrapper[4971]: I1127 06:59:22.713154 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Nov 27 06:59:22 crc kubenswrapper[4971]: I1127 06:59:22.719225 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jp5gl"] Nov 27 06:59:22 crc kubenswrapper[4971]: I1127 06:59:22.756149 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9300284e-8f0c-42eb-a339-820379f2b91c-utilities\") pod \"community-operators-jp5gl\" (UID: \"9300284e-8f0c-42eb-a339-820379f2b91c\") " pod="openshift-marketplace/community-operators-jp5gl" Nov 27 06:59:22 crc kubenswrapper[4971]: I1127 06:59:22.756219 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9300284e-8f0c-42eb-a339-820379f2b91c-catalog-content\") pod \"community-operators-jp5gl\" (UID: \"9300284e-8f0c-42eb-a339-820379f2b91c\") " pod="openshift-marketplace/community-operators-jp5gl" Nov 27 06:59:22 crc kubenswrapper[4971]: I1127 06:59:22.756330 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86l6c\" (UniqueName: \"kubernetes.io/projected/9300284e-8f0c-42eb-a339-820379f2b91c-kube-api-access-86l6c\") pod \"community-operators-jp5gl\" (UID: \"9300284e-8f0c-42eb-a339-820379f2b91c\") " pod="openshift-marketplace/community-operators-jp5gl" Nov 27 06:59:22 crc kubenswrapper[4971]: I1127 06:59:22.857289 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9300284e-8f0c-42eb-a339-820379f2b91c-catalog-content\") pod \"community-operators-jp5gl\" (UID: \"9300284e-8f0c-42eb-a339-820379f2b91c\") " pod="openshift-marketplace/community-operators-jp5gl" Nov 27 06:59:22 crc kubenswrapper[4971]: I1127 06:59:22.857365 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-86l6c\" (UniqueName: \"kubernetes.io/projected/9300284e-8f0c-42eb-a339-820379f2b91c-kube-api-access-86l6c\") pod \"community-operators-jp5gl\" (UID: \"9300284e-8f0c-42eb-a339-820379f2b91c\") " pod="openshift-marketplace/community-operators-jp5gl" Nov 27 06:59:22 crc kubenswrapper[4971]: I1127 06:59:22.857434 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9300284e-8f0c-42eb-a339-820379f2b91c-utilities\") pod \"community-operators-jp5gl\" (UID: \"9300284e-8f0c-42eb-a339-820379f2b91c\") " pod="openshift-marketplace/community-operators-jp5gl" Nov 27 06:59:22 crc kubenswrapper[4971]: I1127 06:59:22.857874 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9300284e-8f0c-42eb-a339-820379f2b91c-utilities\") pod \"community-operators-jp5gl\" (UID: \"9300284e-8f0c-42eb-a339-820379f2b91c\") " pod="openshift-marketplace/community-operators-jp5gl" Nov 27 06:59:22 crc kubenswrapper[4971]: I1127 06:59:22.858145 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9300284e-8f0c-42eb-a339-820379f2b91c-catalog-content\") pod \"community-operators-jp5gl\" (UID: \"9300284e-8f0c-42eb-a339-820379f2b91c\") " pod="openshift-marketplace/community-operators-jp5gl" Nov 27 06:59:22 crc kubenswrapper[4971]: I1127 06:59:22.884099 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-86l6c\" (UniqueName: \"kubernetes.io/projected/9300284e-8f0c-42eb-a339-820379f2b91c-kube-api-access-86l6c\") pod \"community-operators-jp5gl\" (UID: \"9300284e-8f0c-42eb-a339-820379f2b91c\") " pod="openshift-marketplace/community-operators-jp5gl" Nov 27 06:59:22 crc kubenswrapper[4971]: I1127 06:59:22.893786 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-pthvj"] Nov 27 06:59:22 crc kubenswrapper[4971]: I1127 06:59:22.894794 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pthvj" Nov 27 06:59:22 crc kubenswrapper[4971]: I1127 06:59:22.902107 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Nov 27 06:59:22 crc kubenswrapper[4971]: I1127 06:59:22.909761 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pthvj"] Nov 27 06:59:22 crc kubenswrapper[4971]: I1127 06:59:22.958466 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rn994\" (UniqueName: \"kubernetes.io/projected/e22bb0db-bf56-410d-8f07-3b6e9c99b15e-kube-api-access-rn994\") pod \"certified-operators-pthvj\" (UID: \"e22bb0db-bf56-410d-8f07-3b6e9c99b15e\") " pod="openshift-marketplace/certified-operators-pthvj" Nov 27 06:59:22 crc kubenswrapper[4971]: I1127 06:59:22.958546 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e22bb0db-bf56-410d-8f07-3b6e9c99b15e-catalog-content\") pod \"certified-operators-pthvj\" (UID: \"e22bb0db-bf56-410d-8f07-3b6e9c99b15e\") " pod="openshift-marketplace/certified-operators-pthvj" Nov 27 06:59:22 crc kubenswrapper[4971]: I1127 06:59:22.958578 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e22bb0db-bf56-410d-8f07-3b6e9c99b15e-utilities\") pod \"certified-operators-pthvj\" (UID: \"e22bb0db-bf56-410d-8f07-3b6e9c99b15e\") " pod="openshift-marketplace/certified-operators-pthvj" Nov 27 06:59:23 crc kubenswrapper[4971]: I1127 06:59:23.026260 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jp5gl" Nov 27 06:59:23 crc kubenswrapper[4971]: I1127 06:59:23.059426 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e22bb0db-bf56-410d-8f07-3b6e9c99b15e-catalog-content\") pod \"certified-operators-pthvj\" (UID: \"e22bb0db-bf56-410d-8f07-3b6e9c99b15e\") " pod="openshift-marketplace/certified-operators-pthvj" Nov 27 06:59:23 crc kubenswrapper[4971]: I1127 06:59:23.059499 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e22bb0db-bf56-410d-8f07-3b6e9c99b15e-utilities\") pod \"certified-operators-pthvj\" (UID: \"e22bb0db-bf56-410d-8f07-3b6e9c99b15e\") " pod="openshift-marketplace/certified-operators-pthvj" Nov 27 06:59:23 crc kubenswrapper[4971]: I1127 06:59:23.059587 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rn994\" (UniqueName: \"kubernetes.io/projected/e22bb0db-bf56-410d-8f07-3b6e9c99b15e-kube-api-access-rn994\") pod \"certified-operators-pthvj\" (UID: \"e22bb0db-bf56-410d-8f07-3b6e9c99b15e\") " pod="openshift-marketplace/certified-operators-pthvj" Nov 27 06:59:23 crc kubenswrapper[4971]: I1127 06:59:23.060567 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e22bb0db-bf56-410d-8f07-3b6e9c99b15e-catalog-content\") pod \"certified-operators-pthvj\" (UID: \"e22bb0db-bf56-410d-8f07-3b6e9c99b15e\") " pod="openshift-marketplace/certified-operators-pthvj" Nov 27 06:59:23 crc kubenswrapper[4971]: I1127 06:59:23.060690 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e22bb0db-bf56-410d-8f07-3b6e9c99b15e-utilities\") pod \"certified-operators-pthvj\" (UID: \"e22bb0db-bf56-410d-8f07-3b6e9c99b15e\") " pod="openshift-marketplace/certified-operators-pthvj" Nov 27 06:59:23 crc kubenswrapper[4971]: I1127 06:59:23.080409 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rn994\" (UniqueName: \"kubernetes.io/projected/e22bb0db-bf56-410d-8f07-3b6e9c99b15e-kube-api-access-rn994\") pod \"certified-operators-pthvj\" (UID: \"e22bb0db-bf56-410d-8f07-3b6e9c99b15e\") " pod="openshift-marketplace/certified-operators-pthvj" Nov 27 06:59:23 crc kubenswrapper[4971]: I1127 06:59:23.242353 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jp5gl"] Nov 27 06:59:23 crc kubenswrapper[4971]: I1127 06:59:23.261263 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pthvj" Nov 27 06:59:23 crc kubenswrapper[4971]: I1127 06:59:23.441969 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pthvj"] Nov 27 06:59:23 crc kubenswrapper[4971]: I1127 06:59:23.668966 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s6lp8" event={"ID":"8c727031-a0df-4bca-be8b-e8211a220302","Type":"ContainerStarted","Data":"57165d87a809cb8d14f9eef207d1c97f8e8f8a7701699215a1e0b24f3db46bbf"} Nov 27 06:59:23 crc kubenswrapper[4971]: I1127 06:59:23.672275 4971 generic.go:334] "Generic (PLEG): container finished" podID="6f79433c-d33b-4b19-9582-2900de4137a1" containerID="56e6ce4353c87a28a1b7c4339ebb216d08ef4578f29782f3e4d21a3463dcc5f2" exitCode=0 Nov 27 06:59:23 crc kubenswrapper[4971]: I1127 06:59:23.672396 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j7mvd" event={"ID":"6f79433c-d33b-4b19-9582-2900de4137a1","Type":"ContainerDied","Data":"56e6ce4353c87a28a1b7c4339ebb216d08ef4578f29782f3e4d21a3463dcc5f2"} Nov 27 06:59:23 crc kubenswrapper[4971]: I1127 06:59:23.676368 4971 generic.go:334] "Generic (PLEG): container finished" podID="9300284e-8f0c-42eb-a339-820379f2b91c" containerID="3e909acb4be6176018a798548e63330e73faced5e499823208284ab2f5bf1d4f" exitCode=0 Nov 27 06:59:23 crc kubenswrapper[4971]: I1127 06:59:23.676586 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jp5gl" event={"ID":"9300284e-8f0c-42eb-a339-820379f2b91c","Type":"ContainerDied","Data":"3e909acb4be6176018a798548e63330e73faced5e499823208284ab2f5bf1d4f"} Nov 27 06:59:23 crc kubenswrapper[4971]: I1127 06:59:23.676669 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jp5gl" event={"ID":"9300284e-8f0c-42eb-a339-820379f2b91c","Type":"ContainerStarted","Data":"454214d93e5dc9f6eefaa52bd8479dad87d2ba51a0d868b250951dbf4539a9d8"} Nov 27 06:59:23 crc kubenswrapper[4971]: I1127 06:59:23.680039 4971 generic.go:334] "Generic (PLEG): container finished" podID="e22bb0db-bf56-410d-8f07-3b6e9c99b15e" containerID="2496911d29c8054af3d8e9b965cb68546683749ae2af2735cd6711b2e6cefccd" exitCode=0 Nov 27 06:59:23 crc kubenswrapper[4971]: I1127 06:59:23.680081 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pthvj" event={"ID":"e22bb0db-bf56-410d-8f07-3b6e9c99b15e","Type":"ContainerDied","Data":"2496911d29c8054af3d8e9b965cb68546683749ae2af2735cd6711b2e6cefccd"} Nov 27 06:59:23 crc kubenswrapper[4971]: I1127 06:59:23.680106 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pthvj" event={"ID":"e22bb0db-bf56-410d-8f07-3b6e9c99b15e","Type":"ContainerStarted","Data":"a1bc42b44ed7bd109c17e4b16371cccb2a92c1d237cb388086718eeff9b90134"} Nov 27 06:59:23 crc kubenswrapper[4971]: I1127 06:59:23.697128 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-s6lp8" podStartSLOduration=1.959619378 podStartE2EDuration="3.697107989s" podCreationTimestamp="2025-11-27 06:59:20 +0000 UTC" firstStartedPulling="2025-11-27 06:59:21.646267816 +0000 UTC m=+399.838311734" lastFinishedPulling="2025-11-27 06:59:23.383756427 +0000 UTC m=+401.575800345" observedRunningTime="2025-11-27 06:59:23.693266734 +0000 UTC m=+401.885310662" watchObservedRunningTime="2025-11-27 06:59:23.697107989 +0000 UTC m=+401.889151917" Nov 27 06:59:24 crc kubenswrapper[4971]: I1127 06:59:24.690934 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jp5gl" event={"ID":"9300284e-8f0c-42eb-a339-820379f2b91c","Type":"ContainerStarted","Data":"7dbd9cd76c6ada3cd6803b994ee52c820a52bd00519d02406f81974e6158a947"} Nov 27 06:59:24 crc kubenswrapper[4971]: I1127 06:59:24.693184 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pthvj" event={"ID":"e22bb0db-bf56-410d-8f07-3b6e9c99b15e","Type":"ContainerStarted","Data":"def3a620538f01b98414b7aea4df42df57e943427ac253a0bd6918141b133841"} Nov 27 06:59:24 crc kubenswrapper[4971]: I1127 06:59:24.703145 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j7mvd" event={"ID":"6f79433c-d33b-4b19-9582-2900de4137a1","Type":"ContainerStarted","Data":"c53c597849c67141f067e5c7ba517cde9180d7b10d302dffdc87ed6c67b50c10"} Nov 27 06:59:24 crc kubenswrapper[4971]: I1127 06:59:24.778590 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-j7mvd" podStartSLOduration=2.177645441 podStartE2EDuration="4.778571855s" podCreationTimestamp="2025-11-27 06:59:20 +0000 UTC" firstStartedPulling="2025-11-27 06:59:21.647932066 +0000 UTC m=+399.839975984" lastFinishedPulling="2025-11-27 06:59:24.24885848 +0000 UTC m=+402.440902398" observedRunningTime="2025-11-27 06:59:24.754021793 +0000 UTC m=+402.946065711" watchObservedRunningTime="2025-11-27 06:59:24.778571855 +0000 UTC m=+402.970615773" Nov 27 06:59:25 crc kubenswrapper[4971]: I1127 06:59:25.242236 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" podUID="227860ca-baf3-4b07-b0b3-9a29a9eedab6" containerName="registry" containerID="cri-o://f7afc7a3453babc87b8684c325843cb9da949e0d4fb663c480b5d3207488ba10" gracePeriod=30 Nov 27 06:59:25 crc kubenswrapper[4971]: I1127 06:59:25.552835 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:59:25 crc kubenswrapper[4971]: I1127 06:59:25.598752 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/227860ca-baf3-4b07-b0b3-9a29a9eedab6-registry-certificates\") pod \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " Nov 27 06:59:25 crc kubenswrapper[4971]: I1127 06:59:25.598796 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/227860ca-baf3-4b07-b0b3-9a29a9eedab6-ca-trust-extracted\") pod \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " Nov 27 06:59:25 crc kubenswrapper[4971]: I1127 06:59:25.598828 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xfdp8\" (UniqueName: \"kubernetes.io/projected/227860ca-baf3-4b07-b0b3-9a29a9eedab6-kube-api-access-xfdp8\") pod \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " Nov 27 06:59:25 crc kubenswrapper[4971]: I1127 06:59:25.598855 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/227860ca-baf3-4b07-b0b3-9a29a9eedab6-trusted-ca\") pod \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " Nov 27 06:59:25 crc kubenswrapper[4971]: I1127 06:59:25.598875 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/227860ca-baf3-4b07-b0b3-9a29a9eedab6-registry-tls\") pod \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " Nov 27 06:59:25 crc kubenswrapper[4971]: I1127 06:59:25.598911 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/227860ca-baf3-4b07-b0b3-9a29a9eedab6-bound-sa-token\") pod \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " Nov 27 06:59:25 crc kubenswrapper[4971]: I1127 06:59:25.599883 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/227860ca-baf3-4b07-b0b3-9a29a9eedab6-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "227860ca-baf3-4b07-b0b3-9a29a9eedab6" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:59:25 crc kubenswrapper[4971]: I1127 06:59:25.599955 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/227860ca-baf3-4b07-b0b3-9a29a9eedab6-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "227860ca-baf3-4b07-b0b3-9a29a9eedab6" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 06:59:25 crc kubenswrapper[4971]: I1127 06:59:25.600100 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " Nov 27 06:59:25 crc kubenswrapper[4971]: I1127 06:59:25.600159 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/227860ca-baf3-4b07-b0b3-9a29a9eedab6-installation-pull-secrets\") pod \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\" (UID: \"227860ca-baf3-4b07-b0b3-9a29a9eedab6\") " Nov 27 06:59:25 crc kubenswrapper[4971]: I1127 06:59:25.600370 4971 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/227860ca-baf3-4b07-b0b3-9a29a9eedab6-registry-certificates\") on node \"crc\" DevicePath \"\"" Nov 27 06:59:25 crc kubenswrapper[4971]: I1127 06:59:25.600386 4971 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/227860ca-baf3-4b07-b0b3-9a29a9eedab6-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 27 06:59:25 crc kubenswrapper[4971]: I1127 06:59:25.608368 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/227860ca-baf3-4b07-b0b3-9a29a9eedab6-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "227860ca-baf3-4b07-b0b3-9a29a9eedab6" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:59:25 crc kubenswrapper[4971]: I1127 06:59:25.608977 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/227860ca-baf3-4b07-b0b3-9a29a9eedab6-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "227860ca-baf3-4b07-b0b3-9a29a9eedab6" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:59:25 crc kubenswrapper[4971]: I1127 06:59:25.611235 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "227860ca-baf3-4b07-b0b3-9a29a9eedab6" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Nov 27 06:59:25 crc kubenswrapper[4971]: I1127 06:59:25.612940 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/227860ca-baf3-4b07-b0b3-9a29a9eedab6-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "227860ca-baf3-4b07-b0b3-9a29a9eedab6" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 06:59:25 crc kubenswrapper[4971]: I1127 06:59:25.620896 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/227860ca-baf3-4b07-b0b3-9a29a9eedab6-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "227860ca-baf3-4b07-b0b3-9a29a9eedab6" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 06:59:25 crc kubenswrapper[4971]: I1127 06:59:25.621903 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/227860ca-baf3-4b07-b0b3-9a29a9eedab6-kube-api-access-xfdp8" (OuterVolumeSpecName: "kube-api-access-xfdp8") pod "227860ca-baf3-4b07-b0b3-9a29a9eedab6" (UID: "227860ca-baf3-4b07-b0b3-9a29a9eedab6"). InnerVolumeSpecName "kube-api-access-xfdp8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 06:59:25 crc kubenswrapper[4971]: I1127 06:59:25.702236 4971 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/227860ca-baf3-4b07-b0b3-9a29a9eedab6-registry-tls\") on node \"crc\" DevicePath \"\"" Nov 27 06:59:25 crc kubenswrapper[4971]: I1127 06:59:25.702285 4971 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/227860ca-baf3-4b07-b0b3-9a29a9eedab6-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 27 06:59:25 crc kubenswrapper[4971]: I1127 06:59:25.702301 4971 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/227860ca-baf3-4b07-b0b3-9a29a9eedab6-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Nov 27 06:59:25 crc kubenswrapper[4971]: I1127 06:59:25.702318 4971 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/227860ca-baf3-4b07-b0b3-9a29a9eedab6-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Nov 27 06:59:25 crc kubenswrapper[4971]: I1127 06:59:25.702329 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xfdp8\" (UniqueName: \"kubernetes.io/projected/227860ca-baf3-4b07-b0b3-9a29a9eedab6-kube-api-access-xfdp8\") on node \"crc\" DevicePath \"\"" Nov 27 06:59:25 crc kubenswrapper[4971]: I1127 06:59:25.713397 4971 generic.go:334] "Generic (PLEG): container finished" podID="9300284e-8f0c-42eb-a339-820379f2b91c" containerID="7dbd9cd76c6ada3cd6803b994ee52c820a52bd00519d02406f81974e6158a947" exitCode=0 Nov 27 06:59:25 crc kubenswrapper[4971]: I1127 06:59:25.713508 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jp5gl" event={"ID":"9300284e-8f0c-42eb-a339-820379f2b91c","Type":"ContainerDied","Data":"7dbd9cd76c6ada3cd6803b994ee52c820a52bd00519d02406f81974e6158a947"} Nov 27 06:59:25 crc kubenswrapper[4971]: I1127 06:59:25.722967 4971 generic.go:334] "Generic (PLEG): container finished" podID="e22bb0db-bf56-410d-8f07-3b6e9c99b15e" containerID="def3a620538f01b98414b7aea4df42df57e943427ac253a0bd6918141b133841" exitCode=0 Nov 27 06:59:25 crc kubenswrapper[4971]: I1127 06:59:25.723081 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pthvj" event={"ID":"e22bb0db-bf56-410d-8f07-3b6e9c99b15e","Type":"ContainerDied","Data":"def3a620538f01b98414b7aea4df42df57e943427ac253a0bd6918141b133841"} Nov 27 06:59:25 crc kubenswrapper[4971]: I1127 06:59:25.726226 4971 generic.go:334] "Generic (PLEG): container finished" podID="227860ca-baf3-4b07-b0b3-9a29a9eedab6" containerID="f7afc7a3453babc87b8684c325843cb9da949e0d4fb663c480b5d3207488ba10" exitCode=0 Nov 27 06:59:25 crc kubenswrapper[4971]: I1127 06:59:25.726341 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" event={"ID":"227860ca-baf3-4b07-b0b3-9a29a9eedab6","Type":"ContainerDied","Data":"f7afc7a3453babc87b8684c325843cb9da949e0d4fb663c480b5d3207488ba10"} Nov 27 06:59:25 crc kubenswrapper[4971]: I1127 06:59:25.726380 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" event={"ID":"227860ca-baf3-4b07-b0b3-9a29a9eedab6","Type":"ContainerDied","Data":"c1b19a9c8726abf46ebd5db73bb73ebd0e9d3dc15eed1e04c5ba43cd36ba4cf9"} Nov 27 06:59:25 crc kubenswrapper[4971]: I1127 06:59:25.726400 4971 scope.go:117] "RemoveContainer" containerID="f7afc7a3453babc87b8684c325843cb9da949e0d4fb663c480b5d3207488ba10" Nov 27 06:59:25 crc kubenswrapper[4971]: I1127 06:59:25.727657 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-rn8q9" Nov 27 06:59:25 crc kubenswrapper[4971]: I1127 06:59:25.750542 4971 scope.go:117] "RemoveContainer" containerID="f7afc7a3453babc87b8684c325843cb9da949e0d4fb663c480b5d3207488ba10" Nov 27 06:59:25 crc kubenswrapper[4971]: E1127 06:59:25.750967 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f7afc7a3453babc87b8684c325843cb9da949e0d4fb663c480b5d3207488ba10\": container with ID starting with f7afc7a3453babc87b8684c325843cb9da949e0d4fb663c480b5d3207488ba10 not found: ID does not exist" containerID="f7afc7a3453babc87b8684c325843cb9da949e0d4fb663c480b5d3207488ba10" Nov 27 06:59:25 crc kubenswrapper[4971]: I1127 06:59:25.750989 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7afc7a3453babc87b8684c325843cb9da949e0d4fb663c480b5d3207488ba10"} err="failed to get container status \"f7afc7a3453babc87b8684c325843cb9da949e0d4fb663c480b5d3207488ba10\": rpc error: code = NotFound desc = could not find container \"f7afc7a3453babc87b8684c325843cb9da949e0d4fb663c480b5d3207488ba10\": container with ID starting with f7afc7a3453babc87b8684c325843cb9da949e0d4fb663c480b5d3207488ba10 not found: ID does not exist" Nov 27 06:59:25 crc kubenswrapper[4971]: I1127 06:59:25.797708 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-rn8q9"] Nov 27 06:59:25 crc kubenswrapper[4971]: I1127 06:59:25.806102 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-rn8q9"] Nov 27 06:59:26 crc kubenswrapper[4971]: I1127 06:59:26.413553 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 06:59:26 crc kubenswrapper[4971]: I1127 06:59:26.413999 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 06:59:26 crc kubenswrapper[4971]: I1127 06:59:26.557223 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="227860ca-baf3-4b07-b0b3-9a29a9eedab6" path="/var/lib/kubelet/pods/227860ca-baf3-4b07-b0b3-9a29a9eedab6/volumes" Nov 27 06:59:26 crc kubenswrapper[4971]: I1127 06:59:26.735659 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jp5gl" event={"ID":"9300284e-8f0c-42eb-a339-820379f2b91c","Type":"ContainerStarted","Data":"8a15d15b5be2dd90274ada67dbb42d451b41a458531a59ada5a7e8ccd26f99f4"} Nov 27 06:59:26 crc kubenswrapper[4971]: I1127 06:59:26.738542 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pthvj" event={"ID":"e22bb0db-bf56-410d-8f07-3b6e9c99b15e","Type":"ContainerStarted","Data":"dbf8a69731d3c9ea03293cdc4b2ef56d962ea08885ac7327c8b40e0993323a59"} Nov 27 06:59:26 crc kubenswrapper[4971]: I1127 06:59:26.759276 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-jp5gl" podStartSLOduration=2.2813420669999998 podStartE2EDuration="4.759250308s" podCreationTimestamp="2025-11-27 06:59:22 +0000 UTC" firstStartedPulling="2025-11-27 06:59:23.678510205 +0000 UTC m=+401.870554123" lastFinishedPulling="2025-11-27 06:59:26.156418446 +0000 UTC m=+404.348462364" observedRunningTime="2025-11-27 06:59:26.753805626 +0000 UTC m=+404.945849554" watchObservedRunningTime="2025-11-27 06:59:26.759250308 +0000 UTC m=+404.951294226" Nov 27 06:59:30 crc kubenswrapper[4971]: I1127 06:59:30.612280 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-s6lp8" Nov 27 06:59:30 crc kubenswrapper[4971]: I1127 06:59:30.612611 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-s6lp8" Nov 27 06:59:30 crc kubenswrapper[4971]: I1127 06:59:30.656783 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-s6lp8" Nov 27 06:59:30 crc kubenswrapper[4971]: I1127 06:59:30.674873 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-pthvj" podStartSLOduration=5.91074633 podStartE2EDuration="8.674855514s" podCreationTimestamp="2025-11-27 06:59:22 +0000 UTC" firstStartedPulling="2025-11-27 06:59:23.683447372 +0000 UTC m=+401.875491290" lastFinishedPulling="2025-11-27 06:59:26.447556556 +0000 UTC m=+404.639600474" observedRunningTime="2025-11-27 06:59:26.782943454 +0000 UTC m=+404.974987392" watchObservedRunningTime="2025-11-27 06:59:30.674855514 +0000 UTC m=+408.866899452" Nov 27 06:59:30 crc kubenswrapper[4971]: I1127 06:59:30.800978 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-s6lp8" Nov 27 06:59:30 crc kubenswrapper[4971]: I1127 06:59:30.838912 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-j7mvd" Nov 27 06:59:30 crc kubenswrapper[4971]: I1127 06:59:30.839230 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-j7mvd" Nov 27 06:59:30 crc kubenswrapper[4971]: I1127 06:59:30.881966 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-j7mvd" Nov 27 06:59:31 crc kubenswrapper[4971]: I1127 06:59:31.797359 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-j7mvd" Nov 27 06:59:33 crc kubenswrapper[4971]: I1127 06:59:33.026789 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-jp5gl" Nov 27 06:59:33 crc kubenswrapper[4971]: I1127 06:59:33.026849 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-jp5gl" Nov 27 06:59:33 crc kubenswrapper[4971]: I1127 06:59:33.087118 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-jp5gl" Nov 27 06:59:33 crc kubenswrapper[4971]: I1127 06:59:33.262344 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-pthvj" Nov 27 06:59:33 crc kubenswrapper[4971]: I1127 06:59:33.264118 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-pthvj" Nov 27 06:59:33 crc kubenswrapper[4971]: I1127 06:59:33.306063 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-pthvj" Nov 27 06:59:33 crc kubenswrapper[4971]: I1127 06:59:33.814998 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-jp5gl" Nov 27 06:59:33 crc kubenswrapper[4971]: I1127 06:59:33.837608 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-pthvj" Nov 27 06:59:56 crc kubenswrapper[4971]: I1127 06:59:56.413327 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 06:59:56 crc kubenswrapper[4971]: I1127 06:59:56.413970 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 06:59:56 crc kubenswrapper[4971]: I1127 06:59:56.414030 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 06:59:56 crc kubenswrapper[4971]: I1127 06:59:56.415938 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"40313d1ab55e3e184ca9e9b1dba415d9d63c5d44213d0c1e944e6167764f0f6a"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 06:59:56 crc kubenswrapper[4971]: I1127 06:59:56.417477 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://40313d1ab55e3e184ca9e9b1dba415d9d63c5d44213d0c1e944e6167764f0f6a" gracePeriod=600 Nov 27 06:59:56 crc kubenswrapper[4971]: I1127 06:59:56.888364 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="40313d1ab55e3e184ca9e9b1dba415d9d63c5d44213d0c1e944e6167764f0f6a" exitCode=0 Nov 27 06:59:56 crc kubenswrapper[4971]: I1127 06:59:56.888426 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"40313d1ab55e3e184ca9e9b1dba415d9d63c5d44213d0c1e944e6167764f0f6a"} Nov 27 06:59:56 crc kubenswrapper[4971]: I1127 06:59:56.888829 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"889a6364d4b69a2da40304e10c9f2e2768f2d166cd940fb1adc52ad22d73e737"} Nov 27 06:59:56 crc kubenswrapper[4971]: I1127 06:59:56.888863 4971 scope.go:117] "RemoveContainer" containerID="0ab84d3988bbf8f7ccadd3784180cba2bc1d82199d091137a29d6dc4abf9ed20" Nov 27 07:00:00 crc kubenswrapper[4971]: I1127 07:00:00.164331 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403780-v8rfr"] Nov 27 07:00:00 crc kubenswrapper[4971]: E1127 07:00:00.164981 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="227860ca-baf3-4b07-b0b3-9a29a9eedab6" containerName="registry" Nov 27 07:00:00 crc kubenswrapper[4971]: I1127 07:00:00.164993 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="227860ca-baf3-4b07-b0b3-9a29a9eedab6" containerName="registry" Nov 27 07:00:00 crc kubenswrapper[4971]: I1127 07:00:00.165079 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="227860ca-baf3-4b07-b0b3-9a29a9eedab6" containerName="registry" Nov 27 07:00:00 crc kubenswrapper[4971]: I1127 07:00:00.165449 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403780-v8rfr" Nov 27 07:00:00 crc kubenswrapper[4971]: I1127 07:00:00.168192 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 27 07:00:00 crc kubenswrapper[4971]: I1127 07:00:00.168258 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 27 07:00:00 crc kubenswrapper[4971]: I1127 07:00:00.184760 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403780-v8rfr"] Nov 27 07:00:00 crc kubenswrapper[4971]: I1127 07:00:00.299465 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rq2zr\" (UniqueName: \"kubernetes.io/projected/1b8e584a-c179-4555-a366-1be20fe8ceff-kube-api-access-rq2zr\") pod \"collect-profiles-29403780-v8rfr\" (UID: \"1b8e584a-c179-4555-a366-1be20fe8ceff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403780-v8rfr" Nov 27 07:00:00 crc kubenswrapper[4971]: I1127 07:00:00.299786 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1b8e584a-c179-4555-a366-1be20fe8ceff-secret-volume\") pod \"collect-profiles-29403780-v8rfr\" (UID: \"1b8e584a-c179-4555-a366-1be20fe8ceff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403780-v8rfr" Nov 27 07:00:00 crc kubenswrapper[4971]: I1127 07:00:00.299879 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1b8e584a-c179-4555-a366-1be20fe8ceff-config-volume\") pod \"collect-profiles-29403780-v8rfr\" (UID: \"1b8e584a-c179-4555-a366-1be20fe8ceff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403780-v8rfr" Nov 27 07:00:00 crc kubenswrapper[4971]: I1127 07:00:00.401345 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1b8e584a-c179-4555-a366-1be20fe8ceff-secret-volume\") pod \"collect-profiles-29403780-v8rfr\" (UID: \"1b8e584a-c179-4555-a366-1be20fe8ceff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403780-v8rfr" Nov 27 07:00:00 crc kubenswrapper[4971]: I1127 07:00:00.401419 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1b8e584a-c179-4555-a366-1be20fe8ceff-config-volume\") pod \"collect-profiles-29403780-v8rfr\" (UID: \"1b8e584a-c179-4555-a366-1be20fe8ceff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403780-v8rfr" Nov 27 07:00:00 crc kubenswrapper[4971]: I1127 07:00:00.401465 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rq2zr\" (UniqueName: \"kubernetes.io/projected/1b8e584a-c179-4555-a366-1be20fe8ceff-kube-api-access-rq2zr\") pod \"collect-profiles-29403780-v8rfr\" (UID: \"1b8e584a-c179-4555-a366-1be20fe8ceff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403780-v8rfr" Nov 27 07:00:00 crc kubenswrapper[4971]: I1127 07:00:00.402584 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1b8e584a-c179-4555-a366-1be20fe8ceff-config-volume\") pod \"collect-profiles-29403780-v8rfr\" (UID: \"1b8e584a-c179-4555-a366-1be20fe8ceff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403780-v8rfr" Nov 27 07:00:00 crc kubenswrapper[4971]: I1127 07:00:00.407305 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1b8e584a-c179-4555-a366-1be20fe8ceff-secret-volume\") pod \"collect-profiles-29403780-v8rfr\" (UID: \"1b8e584a-c179-4555-a366-1be20fe8ceff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403780-v8rfr" Nov 27 07:00:00 crc kubenswrapper[4971]: I1127 07:00:00.418885 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rq2zr\" (UniqueName: \"kubernetes.io/projected/1b8e584a-c179-4555-a366-1be20fe8ceff-kube-api-access-rq2zr\") pod \"collect-profiles-29403780-v8rfr\" (UID: \"1b8e584a-c179-4555-a366-1be20fe8ceff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403780-v8rfr" Nov 27 07:00:00 crc kubenswrapper[4971]: I1127 07:00:00.481260 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403780-v8rfr" Nov 27 07:00:00 crc kubenswrapper[4971]: I1127 07:00:00.642971 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403780-v8rfr"] Nov 27 07:00:00 crc kubenswrapper[4971]: W1127 07:00:00.651729 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1b8e584a_c179_4555_a366_1be20fe8ceff.slice/crio-94a7460df1ee2711bbb3dde69a55f9e45567b04677aa94774991edbfc59ad404 WatchSource:0}: Error finding container 94a7460df1ee2711bbb3dde69a55f9e45567b04677aa94774991edbfc59ad404: Status 404 returned error can't find the container with id 94a7460df1ee2711bbb3dde69a55f9e45567b04677aa94774991edbfc59ad404 Nov 27 07:00:00 crc kubenswrapper[4971]: I1127 07:00:00.911199 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403780-v8rfr" event={"ID":"1b8e584a-c179-4555-a366-1be20fe8ceff","Type":"ContainerStarted","Data":"73d80397be3db23d8723608eb13ba4e74e54d2bf24b60748b7c9caebfff69f29"} Nov 27 07:00:00 crc kubenswrapper[4971]: I1127 07:00:00.911531 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403780-v8rfr" event={"ID":"1b8e584a-c179-4555-a366-1be20fe8ceff","Type":"ContainerStarted","Data":"94a7460df1ee2711bbb3dde69a55f9e45567b04677aa94774991edbfc59ad404"} Nov 27 07:00:00 crc kubenswrapper[4971]: I1127 07:00:00.923891 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29403780-v8rfr" podStartSLOduration=0.923870475 podStartE2EDuration="923.870475ms" podCreationTimestamp="2025-11-27 07:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:00:00.922529115 +0000 UTC m=+439.114573033" watchObservedRunningTime="2025-11-27 07:00:00.923870475 +0000 UTC m=+439.115914413" Nov 27 07:00:01 crc kubenswrapper[4971]: I1127 07:00:01.921857 4971 generic.go:334] "Generic (PLEG): container finished" podID="1b8e584a-c179-4555-a366-1be20fe8ceff" containerID="73d80397be3db23d8723608eb13ba4e74e54d2bf24b60748b7c9caebfff69f29" exitCode=0 Nov 27 07:00:01 crc kubenswrapper[4971]: I1127 07:00:01.921913 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403780-v8rfr" event={"ID":"1b8e584a-c179-4555-a366-1be20fe8ceff","Type":"ContainerDied","Data":"73d80397be3db23d8723608eb13ba4e74e54d2bf24b60748b7c9caebfff69f29"} Nov 27 07:00:03 crc kubenswrapper[4971]: I1127 07:00:03.093843 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403780-v8rfr" Nov 27 07:00:03 crc kubenswrapper[4971]: I1127 07:00:03.235901 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1b8e584a-c179-4555-a366-1be20fe8ceff-secret-volume\") pod \"1b8e584a-c179-4555-a366-1be20fe8ceff\" (UID: \"1b8e584a-c179-4555-a366-1be20fe8ceff\") " Nov 27 07:00:03 crc kubenswrapper[4971]: I1127 07:00:03.235983 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rq2zr\" (UniqueName: \"kubernetes.io/projected/1b8e584a-c179-4555-a366-1be20fe8ceff-kube-api-access-rq2zr\") pod \"1b8e584a-c179-4555-a366-1be20fe8ceff\" (UID: \"1b8e584a-c179-4555-a366-1be20fe8ceff\") " Nov 27 07:00:03 crc kubenswrapper[4971]: I1127 07:00:03.236094 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1b8e584a-c179-4555-a366-1be20fe8ceff-config-volume\") pod \"1b8e584a-c179-4555-a366-1be20fe8ceff\" (UID: \"1b8e584a-c179-4555-a366-1be20fe8ceff\") " Nov 27 07:00:03 crc kubenswrapper[4971]: I1127 07:00:03.236776 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1b8e584a-c179-4555-a366-1be20fe8ceff-config-volume" (OuterVolumeSpecName: "config-volume") pod "1b8e584a-c179-4555-a366-1be20fe8ceff" (UID: "1b8e584a-c179-4555-a366-1be20fe8ceff"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:00:03 crc kubenswrapper[4971]: I1127 07:00:03.241701 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b8e584a-c179-4555-a366-1be20fe8ceff-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "1b8e584a-c179-4555-a366-1be20fe8ceff" (UID: "1b8e584a-c179-4555-a366-1be20fe8ceff"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:00:03 crc kubenswrapper[4971]: I1127 07:00:03.241822 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b8e584a-c179-4555-a366-1be20fe8ceff-kube-api-access-rq2zr" (OuterVolumeSpecName: "kube-api-access-rq2zr") pod "1b8e584a-c179-4555-a366-1be20fe8ceff" (UID: "1b8e584a-c179-4555-a366-1be20fe8ceff"). InnerVolumeSpecName "kube-api-access-rq2zr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:00:03 crc kubenswrapper[4971]: I1127 07:00:03.337277 4971 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1b8e584a-c179-4555-a366-1be20fe8ceff-config-volume\") on node \"crc\" DevicePath \"\"" Nov 27 07:00:03 crc kubenswrapper[4971]: I1127 07:00:03.337313 4971 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1b8e584a-c179-4555-a366-1be20fe8ceff-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 27 07:00:03 crc kubenswrapper[4971]: I1127 07:00:03.337322 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rq2zr\" (UniqueName: \"kubernetes.io/projected/1b8e584a-c179-4555-a366-1be20fe8ceff-kube-api-access-rq2zr\") on node \"crc\" DevicePath \"\"" Nov 27 07:00:03 crc kubenswrapper[4971]: I1127 07:00:03.934848 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403780-v8rfr" event={"ID":"1b8e584a-c179-4555-a366-1be20fe8ceff","Type":"ContainerDied","Data":"94a7460df1ee2711bbb3dde69a55f9e45567b04677aa94774991edbfc59ad404"} Nov 27 07:00:03 crc kubenswrapper[4971]: I1127 07:00:03.934914 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="94a7460df1ee2711bbb3dde69a55f9e45567b04677aa94774991edbfc59ad404" Nov 27 07:00:03 crc kubenswrapper[4971]: I1127 07:00:03.934999 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403780-v8rfr" Nov 27 07:01:56 crc kubenswrapper[4971]: I1127 07:01:56.413688 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 07:01:56 crc kubenswrapper[4971]: I1127 07:01:56.414316 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 07:02:26 crc kubenswrapper[4971]: I1127 07:02:26.413728 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 07:02:26 crc kubenswrapper[4971]: I1127 07:02:26.414373 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 07:02:56 crc kubenswrapper[4971]: I1127 07:02:56.413984 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 07:02:56 crc kubenswrapper[4971]: I1127 07:02:56.414957 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 07:02:56 crc kubenswrapper[4971]: I1127 07:02:56.415034 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 07:02:56 crc kubenswrapper[4971]: I1127 07:02:56.415978 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"889a6364d4b69a2da40304e10c9f2e2768f2d166cd940fb1adc52ad22d73e737"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 07:02:56 crc kubenswrapper[4971]: I1127 07:02:56.416103 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://889a6364d4b69a2da40304e10c9f2e2768f2d166cd940fb1adc52ad22d73e737" gracePeriod=600 Nov 27 07:02:57 crc kubenswrapper[4971]: I1127 07:02:57.362578 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="889a6364d4b69a2da40304e10c9f2e2768f2d166cd940fb1adc52ad22d73e737" exitCode=0 Nov 27 07:02:57 crc kubenswrapper[4971]: I1127 07:02:57.362754 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"889a6364d4b69a2da40304e10c9f2e2768f2d166cd940fb1adc52ad22d73e737"} Nov 27 07:02:57 crc kubenswrapper[4971]: I1127 07:02:57.363155 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"74798b3e4b4c86a92a8e5dce55abcc5cc8f9c222cd75d6d3af831d281c1bb219"} Nov 27 07:02:57 crc kubenswrapper[4971]: I1127 07:02:57.363301 4971 scope.go:117] "RemoveContainer" containerID="40313d1ab55e3e184ca9e9b1dba415d9d63c5d44213d0c1e944e6167764f0f6a" Nov 27 07:03:19 crc kubenswrapper[4971]: E1127 07:03:19.485761 4971 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/system.slice/NetworkManager-dispatcher.service\": RecentStats: unable to find data in memory cache]" Nov 27 07:04:56 crc kubenswrapper[4971]: I1127 07:04:56.412826 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 07:04:56 crc kubenswrapper[4971]: I1127 07:04:56.413444 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 07:05:18 crc kubenswrapper[4971]: I1127 07:05:18.348616 4971 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Nov 27 07:05:26 crc kubenswrapper[4971]: I1127 07:05:26.413389 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 07:05:26 crc kubenswrapper[4971]: I1127 07:05:26.413903 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 07:05:56 crc kubenswrapper[4971]: I1127 07:05:56.413195 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 07:05:56 crc kubenswrapper[4971]: I1127 07:05:56.413914 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 07:05:56 crc kubenswrapper[4971]: I1127 07:05:56.413961 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 07:05:56 crc kubenswrapper[4971]: I1127 07:05:56.414474 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"74798b3e4b4c86a92a8e5dce55abcc5cc8f9c222cd75d6d3af831d281c1bb219"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 07:05:56 crc kubenswrapper[4971]: I1127 07:05:56.414550 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://74798b3e4b4c86a92a8e5dce55abcc5cc8f9c222cd75d6d3af831d281c1bb219" gracePeriod=600 Nov 27 07:05:57 crc kubenswrapper[4971]: I1127 07:05:57.428170 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="74798b3e4b4c86a92a8e5dce55abcc5cc8f9c222cd75d6d3af831d281c1bb219" exitCode=0 Nov 27 07:05:57 crc kubenswrapper[4971]: I1127 07:05:57.428243 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"74798b3e4b4c86a92a8e5dce55abcc5cc8f9c222cd75d6d3af831d281c1bb219"} Nov 27 07:05:57 crc kubenswrapper[4971]: I1127 07:05:57.428550 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"4b10f4e9a06fcbdd9ab8460b0527e15bbc485600c51c9ce5ae3fa173ac3281f7"} Nov 27 07:05:57 crc kubenswrapper[4971]: I1127 07:05:57.428581 4971 scope.go:117] "RemoveContainer" containerID="889a6364d4b69a2da40304e10c9f2e2768f2d166cd940fb1adc52ad22d73e737" Nov 27 07:06:47 crc kubenswrapper[4971]: I1127 07:06:47.822965 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-d8ddn"] Nov 27 07:06:47 crc kubenswrapper[4971]: E1127 07:06:47.824017 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b8e584a-c179-4555-a366-1be20fe8ceff" containerName="collect-profiles" Nov 27 07:06:47 crc kubenswrapper[4971]: I1127 07:06:47.824048 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b8e584a-c179-4555-a366-1be20fe8ceff" containerName="collect-profiles" Nov 27 07:06:47 crc kubenswrapper[4971]: I1127 07:06:47.824286 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b8e584a-c179-4555-a366-1be20fe8ceff" containerName="collect-profiles" Nov 27 07:06:47 crc kubenswrapper[4971]: I1127 07:06:47.826336 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d8ddn" Nov 27 07:06:47 crc kubenswrapper[4971]: I1127 07:06:47.831583 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-d8ddn"] Nov 27 07:06:47 crc kubenswrapper[4971]: I1127 07:06:47.971146 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0182f9a1-f6e3-492f-a607-d9813e5fb543-utilities\") pod \"community-operators-d8ddn\" (UID: \"0182f9a1-f6e3-492f-a607-d9813e5fb543\") " pod="openshift-marketplace/community-operators-d8ddn" Nov 27 07:06:47 crc kubenswrapper[4971]: I1127 07:06:47.971448 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0182f9a1-f6e3-492f-a607-d9813e5fb543-catalog-content\") pod \"community-operators-d8ddn\" (UID: \"0182f9a1-f6e3-492f-a607-d9813e5fb543\") " pod="openshift-marketplace/community-operators-d8ddn" Nov 27 07:06:47 crc kubenswrapper[4971]: I1127 07:06:47.971512 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gmbcx\" (UniqueName: \"kubernetes.io/projected/0182f9a1-f6e3-492f-a607-d9813e5fb543-kube-api-access-gmbcx\") pod \"community-operators-d8ddn\" (UID: \"0182f9a1-f6e3-492f-a607-d9813e5fb543\") " pod="openshift-marketplace/community-operators-d8ddn" Nov 27 07:06:48 crc kubenswrapper[4971]: I1127 07:06:48.072375 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gmbcx\" (UniqueName: \"kubernetes.io/projected/0182f9a1-f6e3-492f-a607-d9813e5fb543-kube-api-access-gmbcx\") pod \"community-operators-d8ddn\" (UID: \"0182f9a1-f6e3-492f-a607-d9813e5fb543\") " pod="openshift-marketplace/community-operators-d8ddn" Nov 27 07:06:48 crc kubenswrapper[4971]: I1127 07:06:48.072454 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0182f9a1-f6e3-492f-a607-d9813e5fb543-utilities\") pod \"community-operators-d8ddn\" (UID: \"0182f9a1-f6e3-492f-a607-d9813e5fb543\") " pod="openshift-marketplace/community-operators-d8ddn" Nov 27 07:06:48 crc kubenswrapper[4971]: I1127 07:06:48.072470 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0182f9a1-f6e3-492f-a607-d9813e5fb543-catalog-content\") pod \"community-operators-d8ddn\" (UID: \"0182f9a1-f6e3-492f-a607-d9813e5fb543\") " pod="openshift-marketplace/community-operators-d8ddn" Nov 27 07:06:48 crc kubenswrapper[4971]: I1127 07:06:48.072986 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0182f9a1-f6e3-492f-a607-d9813e5fb543-utilities\") pod \"community-operators-d8ddn\" (UID: \"0182f9a1-f6e3-492f-a607-d9813e5fb543\") " pod="openshift-marketplace/community-operators-d8ddn" Nov 27 07:06:48 crc kubenswrapper[4971]: I1127 07:06:48.073239 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0182f9a1-f6e3-492f-a607-d9813e5fb543-catalog-content\") pod \"community-operators-d8ddn\" (UID: \"0182f9a1-f6e3-492f-a607-d9813e5fb543\") " pod="openshift-marketplace/community-operators-d8ddn" Nov 27 07:06:48 crc kubenswrapper[4971]: I1127 07:06:48.092044 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gmbcx\" (UniqueName: \"kubernetes.io/projected/0182f9a1-f6e3-492f-a607-d9813e5fb543-kube-api-access-gmbcx\") pod \"community-operators-d8ddn\" (UID: \"0182f9a1-f6e3-492f-a607-d9813e5fb543\") " pod="openshift-marketplace/community-operators-d8ddn" Nov 27 07:06:48 crc kubenswrapper[4971]: I1127 07:06:48.181044 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d8ddn" Nov 27 07:06:48 crc kubenswrapper[4971]: I1127 07:06:48.426488 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-d8ddn"] Nov 27 07:06:48 crc kubenswrapper[4971]: I1127 07:06:48.677108 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d8ddn" event={"ID":"0182f9a1-f6e3-492f-a607-d9813e5fb543","Type":"ContainerStarted","Data":"d308f9a78185e9385ae271079d81258a424817f5cf3a2c572e364cbe8a236954"} Nov 27 07:06:49 crc kubenswrapper[4971]: I1127 07:06:49.684043 4971 generic.go:334] "Generic (PLEG): container finished" podID="0182f9a1-f6e3-492f-a607-d9813e5fb543" containerID="823182930fc1dab2b7a0c4eb2d3887d1af69485b77ecc0070bbb2c1b160f9133" exitCode=0 Nov 27 07:06:49 crc kubenswrapper[4971]: I1127 07:06:49.684133 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d8ddn" event={"ID":"0182f9a1-f6e3-492f-a607-d9813e5fb543","Type":"ContainerDied","Data":"823182930fc1dab2b7a0c4eb2d3887d1af69485b77ecc0070bbb2c1b160f9133"} Nov 27 07:06:49 crc kubenswrapper[4971]: I1127 07:06:49.685814 4971 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 27 07:06:51 crc kubenswrapper[4971]: I1127 07:06:51.700031 4971 generic.go:334] "Generic (PLEG): container finished" podID="0182f9a1-f6e3-492f-a607-d9813e5fb543" containerID="eafab568cd4a61901778a14fa3a45d3dadbf8b69d594c793aaada486347f0178" exitCode=0 Nov 27 07:06:51 crc kubenswrapper[4971]: I1127 07:06:51.700122 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d8ddn" event={"ID":"0182f9a1-f6e3-492f-a607-d9813e5fb543","Type":"ContainerDied","Data":"eafab568cd4a61901778a14fa3a45d3dadbf8b69d594c793aaada486347f0178"} Nov 27 07:06:52 crc kubenswrapper[4971]: I1127 07:06:52.711167 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d8ddn" event={"ID":"0182f9a1-f6e3-492f-a607-d9813e5fb543","Type":"ContainerStarted","Data":"e0731ed656bb493bf1f0c593e5fb461dc712a42f829c2f401855822941ee7ca2"} Nov 27 07:06:58 crc kubenswrapper[4971]: I1127 07:06:58.181754 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-d8ddn" Nov 27 07:06:58 crc kubenswrapper[4971]: I1127 07:06:58.182121 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-d8ddn" Nov 27 07:06:58 crc kubenswrapper[4971]: I1127 07:06:58.220111 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-d8ddn" Nov 27 07:06:58 crc kubenswrapper[4971]: I1127 07:06:58.239725 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-d8ddn" podStartSLOduration=8.689215413 podStartE2EDuration="11.239705923s" podCreationTimestamp="2025-11-27 07:06:47 +0000 UTC" firstStartedPulling="2025-11-27 07:06:49.685516169 +0000 UTC m=+847.877560077" lastFinishedPulling="2025-11-27 07:06:52.236006659 +0000 UTC m=+850.428050587" observedRunningTime="2025-11-27 07:06:52.736099793 +0000 UTC m=+850.928143721" watchObservedRunningTime="2025-11-27 07:06:58.239705923 +0000 UTC m=+856.431749841" Nov 27 07:06:58 crc kubenswrapper[4971]: I1127 07:06:58.783657 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-d8ddn" Nov 27 07:06:58 crc kubenswrapper[4971]: I1127 07:06:58.827255 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-d8ddn"] Nov 27 07:07:00 crc kubenswrapper[4971]: I1127 07:07:00.752279 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-d8ddn" podUID="0182f9a1-f6e3-492f-a607-d9813e5fb543" containerName="registry-server" containerID="cri-o://e0731ed656bb493bf1f0c593e5fb461dc712a42f829c2f401855822941ee7ca2" gracePeriod=2 Nov 27 07:07:01 crc kubenswrapper[4971]: I1127 07:07:01.112267 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d8ddn" Nov 27 07:07:01 crc kubenswrapper[4971]: I1127 07:07:01.266048 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0182f9a1-f6e3-492f-a607-d9813e5fb543-utilities\") pod \"0182f9a1-f6e3-492f-a607-d9813e5fb543\" (UID: \"0182f9a1-f6e3-492f-a607-d9813e5fb543\") " Nov 27 07:07:01 crc kubenswrapper[4971]: I1127 07:07:01.266155 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0182f9a1-f6e3-492f-a607-d9813e5fb543-catalog-content\") pod \"0182f9a1-f6e3-492f-a607-d9813e5fb543\" (UID: \"0182f9a1-f6e3-492f-a607-d9813e5fb543\") " Nov 27 07:07:01 crc kubenswrapper[4971]: I1127 07:07:01.266225 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gmbcx\" (UniqueName: \"kubernetes.io/projected/0182f9a1-f6e3-492f-a607-d9813e5fb543-kube-api-access-gmbcx\") pod \"0182f9a1-f6e3-492f-a607-d9813e5fb543\" (UID: \"0182f9a1-f6e3-492f-a607-d9813e5fb543\") " Nov 27 07:07:01 crc kubenswrapper[4971]: I1127 07:07:01.268005 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0182f9a1-f6e3-492f-a607-d9813e5fb543-utilities" (OuterVolumeSpecName: "utilities") pod "0182f9a1-f6e3-492f-a607-d9813e5fb543" (UID: "0182f9a1-f6e3-492f-a607-d9813e5fb543"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:07:01 crc kubenswrapper[4971]: I1127 07:07:01.272905 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0182f9a1-f6e3-492f-a607-d9813e5fb543-kube-api-access-gmbcx" (OuterVolumeSpecName: "kube-api-access-gmbcx") pod "0182f9a1-f6e3-492f-a607-d9813e5fb543" (UID: "0182f9a1-f6e3-492f-a607-d9813e5fb543"). InnerVolumeSpecName "kube-api-access-gmbcx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:07:01 crc kubenswrapper[4971]: I1127 07:07:01.368201 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0182f9a1-f6e3-492f-a607-d9813e5fb543-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 07:07:01 crc kubenswrapper[4971]: I1127 07:07:01.368597 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gmbcx\" (UniqueName: \"kubernetes.io/projected/0182f9a1-f6e3-492f-a607-d9813e5fb543-kube-api-access-gmbcx\") on node \"crc\" DevicePath \"\"" Nov 27 07:07:01 crc kubenswrapper[4971]: I1127 07:07:01.761942 4971 generic.go:334] "Generic (PLEG): container finished" podID="0182f9a1-f6e3-492f-a607-d9813e5fb543" containerID="e0731ed656bb493bf1f0c593e5fb461dc712a42f829c2f401855822941ee7ca2" exitCode=0 Nov 27 07:07:01 crc kubenswrapper[4971]: I1127 07:07:01.761990 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d8ddn" event={"ID":"0182f9a1-f6e3-492f-a607-d9813e5fb543","Type":"ContainerDied","Data":"e0731ed656bb493bf1f0c593e5fb461dc712a42f829c2f401855822941ee7ca2"} Nov 27 07:07:01 crc kubenswrapper[4971]: I1127 07:07:01.762029 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d8ddn" event={"ID":"0182f9a1-f6e3-492f-a607-d9813e5fb543","Type":"ContainerDied","Data":"d308f9a78185e9385ae271079d81258a424817f5cf3a2c572e364cbe8a236954"} Nov 27 07:07:01 crc kubenswrapper[4971]: I1127 07:07:01.762031 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d8ddn" Nov 27 07:07:01 crc kubenswrapper[4971]: I1127 07:07:01.762050 4971 scope.go:117] "RemoveContainer" containerID="e0731ed656bb493bf1f0c593e5fb461dc712a42f829c2f401855822941ee7ca2" Nov 27 07:07:01 crc kubenswrapper[4971]: I1127 07:07:01.780105 4971 scope.go:117] "RemoveContainer" containerID="eafab568cd4a61901778a14fa3a45d3dadbf8b69d594c793aaada486347f0178" Nov 27 07:07:01 crc kubenswrapper[4971]: I1127 07:07:01.799801 4971 scope.go:117] "RemoveContainer" containerID="823182930fc1dab2b7a0c4eb2d3887d1af69485b77ecc0070bbb2c1b160f9133" Nov 27 07:07:01 crc kubenswrapper[4971]: I1127 07:07:01.814561 4971 scope.go:117] "RemoveContainer" containerID="e0731ed656bb493bf1f0c593e5fb461dc712a42f829c2f401855822941ee7ca2" Nov 27 07:07:01 crc kubenswrapper[4971]: E1127 07:07:01.814919 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0731ed656bb493bf1f0c593e5fb461dc712a42f829c2f401855822941ee7ca2\": container with ID starting with e0731ed656bb493bf1f0c593e5fb461dc712a42f829c2f401855822941ee7ca2 not found: ID does not exist" containerID="e0731ed656bb493bf1f0c593e5fb461dc712a42f829c2f401855822941ee7ca2" Nov 27 07:07:01 crc kubenswrapper[4971]: I1127 07:07:01.814961 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0731ed656bb493bf1f0c593e5fb461dc712a42f829c2f401855822941ee7ca2"} err="failed to get container status \"e0731ed656bb493bf1f0c593e5fb461dc712a42f829c2f401855822941ee7ca2\": rpc error: code = NotFound desc = could not find container \"e0731ed656bb493bf1f0c593e5fb461dc712a42f829c2f401855822941ee7ca2\": container with ID starting with e0731ed656bb493bf1f0c593e5fb461dc712a42f829c2f401855822941ee7ca2 not found: ID does not exist" Nov 27 07:07:01 crc kubenswrapper[4971]: I1127 07:07:01.814989 4971 scope.go:117] "RemoveContainer" containerID="eafab568cd4a61901778a14fa3a45d3dadbf8b69d594c793aaada486347f0178" Nov 27 07:07:01 crc kubenswrapper[4971]: E1127 07:07:01.815480 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eafab568cd4a61901778a14fa3a45d3dadbf8b69d594c793aaada486347f0178\": container with ID starting with eafab568cd4a61901778a14fa3a45d3dadbf8b69d594c793aaada486347f0178 not found: ID does not exist" containerID="eafab568cd4a61901778a14fa3a45d3dadbf8b69d594c793aaada486347f0178" Nov 27 07:07:01 crc kubenswrapper[4971]: I1127 07:07:01.815574 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eafab568cd4a61901778a14fa3a45d3dadbf8b69d594c793aaada486347f0178"} err="failed to get container status \"eafab568cd4a61901778a14fa3a45d3dadbf8b69d594c793aaada486347f0178\": rpc error: code = NotFound desc = could not find container \"eafab568cd4a61901778a14fa3a45d3dadbf8b69d594c793aaada486347f0178\": container with ID starting with eafab568cd4a61901778a14fa3a45d3dadbf8b69d594c793aaada486347f0178 not found: ID does not exist" Nov 27 07:07:01 crc kubenswrapper[4971]: I1127 07:07:01.815696 4971 scope.go:117] "RemoveContainer" containerID="823182930fc1dab2b7a0c4eb2d3887d1af69485b77ecc0070bbb2c1b160f9133" Nov 27 07:07:01 crc kubenswrapper[4971]: E1127 07:07:01.815994 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"823182930fc1dab2b7a0c4eb2d3887d1af69485b77ecc0070bbb2c1b160f9133\": container with ID starting with 823182930fc1dab2b7a0c4eb2d3887d1af69485b77ecc0070bbb2c1b160f9133 not found: ID does not exist" containerID="823182930fc1dab2b7a0c4eb2d3887d1af69485b77ecc0070bbb2c1b160f9133" Nov 27 07:07:01 crc kubenswrapper[4971]: I1127 07:07:01.816037 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"823182930fc1dab2b7a0c4eb2d3887d1af69485b77ecc0070bbb2c1b160f9133"} err="failed to get container status \"823182930fc1dab2b7a0c4eb2d3887d1af69485b77ecc0070bbb2c1b160f9133\": rpc error: code = NotFound desc = could not find container \"823182930fc1dab2b7a0c4eb2d3887d1af69485b77ecc0070bbb2c1b160f9133\": container with ID starting with 823182930fc1dab2b7a0c4eb2d3887d1af69485b77ecc0070bbb2c1b160f9133 not found: ID does not exist" Nov 27 07:07:02 crc kubenswrapper[4971]: I1127 07:07:02.364510 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0182f9a1-f6e3-492f-a607-d9813e5fb543-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0182f9a1-f6e3-492f-a607-d9813e5fb543" (UID: "0182f9a1-f6e3-492f-a607-d9813e5fb543"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:07:02 crc kubenswrapper[4971]: I1127 07:07:02.382282 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0182f9a1-f6e3-492f-a607-d9813e5fb543-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 07:07:02 crc kubenswrapper[4971]: I1127 07:07:02.681881 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-d8ddn"] Nov 27 07:07:02 crc kubenswrapper[4971]: I1127 07:07:02.685455 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-d8ddn"] Nov 27 07:07:04 crc kubenswrapper[4971]: I1127 07:07:04.556954 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0182f9a1-f6e3-492f-a607-d9813e5fb543" path="/var/lib/kubelet/pods/0182f9a1-f6e3-492f-a607-d9813e5fb543/volumes" Nov 27 07:07:20 crc kubenswrapper[4971]: I1127 07:07:20.758290 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-56nwb"] Nov 27 07:07:20 crc kubenswrapper[4971]: I1127 07:07:20.759330 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="ovn-controller" containerID="cri-o://de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc" gracePeriod=30 Nov 27 07:07:20 crc kubenswrapper[4971]: I1127 07:07:20.759761 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="sbdb" containerID="cri-o://d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349" gracePeriod=30 Nov 27 07:07:20 crc kubenswrapper[4971]: I1127 07:07:20.759813 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="nbdb" containerID="cri-o://ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae" gracePeriod=30 Nov 27 07:07:20 crc kubenswrapper[4971]: I1127 07:07:20.759852 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="northd" containerID="cri-o://cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57" gracePeriod=30 Nov 27 07:07:20 crc kubenswrapper[4971]: I1127 07:07:20.759896 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c" gracePeriod=30 Nov 27 07:07:20 crc kubenswrapper[4971]: I1127 07:07:20.759936 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="kube-rbac-proxy-node" containerID="cri-o://f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d" gracePeriod=30 Nov 27 07:07:20 crc kubenswrapper[4971]: I1127 07:07:20.759974 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="ovn-acl-logging" containerID="cri-o://04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890" gracePeriod=30 Nov 27 07:07:20 crc kubenswrapper[4971]: I1127 07:07:20.798420 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="ovnkube-controller" containerID="cri-o://312b81b8085de1c59a0a2fbd3fc064e8f3764519af180bb36f94404a0b7a2853" gracePeriod=30 Nov 27 07:07:20 crc kubenswrapper[4971]: I1127 07:07:20.888109 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lr9p6_a2136014-aa8f-48e5-bccf-64cdd3cbc5f9/kube-multus/2.log" Nov 27 07:07:20 crc kubenswrapper[4971]: I1127 07:07:20.890423 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lr9p6_a2136014-aa8f-48e5-bccf-64cdd3cbc5f9/kube-multus/1.log" Nov 27 07:07:20 crc kubenswrapper[4971]: I1127 07:07:20.890480 4971 generic.go:334] "Generic (PLEG): container finished" podID="a2136014-aa8f-48e5-bccf-64cdd3cbc5f9" containerID="54adff5ac26e8635c3ca81209e1d7f16afe8025ee101874e4e650bd5caa3120d" exitCode=2 Nov 27 07:07:20 crc kubenswrapper[4971]: I1127 07:07:20.890519 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-lr9p6" event={"ID":"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9","Type":"ContainerDied","Data":"54adff5ac26e8635c3ca81209e1d7f16afe8025ee101874e4e650bd5caa3120d"} Nov 27 07:07:20 crc kubenswrapper[4971]: I1127 07:07:20.890777 4971 scope.go:117] "RemoveContainer" containerID="a2536efee5c523594e02d64a0d1113af4945a84c32d06c78f840ef292143578e" Nov 27 07:07:20 crc kubenswrapper[4971]: I1127 07:07:20.891369 4971 scope.go:117] "RemoveContainer" containerID="54adff5ac26e8635c3ca81209e1d7f16afe8025ee101874e4e650bd5caa3120d" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.141525 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-56nwb_47c0abbf-6e9f-4bca-b3ca-bd896be15f2c/ovnkube-controller/3.log" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.144233 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-56nwb_47c0abbf-6e9f-4bca-b3ca-bd896be15f2c/ovn-acl-logging/0.log" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.144685 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-56nwb_47c0abbf-6e9f-4bca-b3ca-bd896be15f2c/ovn-controller/0.log" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.145120 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.202219 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-wvbzj"] Nov 27 07:07:21 crc kubenswrapper[4971]: E1127 07:07:21.202423 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0182f9a1-f6e3-492f-a607-d9813e5fb543" containerName="extract-utilities" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.202435 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="0182f9a1-f6e3-492f-a607-d9813e5fb543" containerName="extract-utilities" Nov 27 07:07:21 crc kubenswrapper[4971]: E1127 07:07:21.202445 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="nbdb" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.202451 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="nbdb" Nov 27 07:07:21 crc kubenswrapper[4971]: E1127 07:07:21.202458 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="ovnkube-controller" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.202464 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="ovnkube-controller" Nov 27 07:07:21 crc kubenswrapper[4971]: E1127 07:07:21.202472 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="northd" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.202477 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="northd" Nov 27 07:07:21 crc kubenswrapper[4971]: E1127 07:07:21.202485 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="ovnkube-controller" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.202493 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="ovnkube-controller" Nov 27 07:07:21 crc kubenswrapper[4971]: E1127 07:07:21.202500 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="ovnkube-controller" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.202506 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="ovnkube-controller" Nov 27 07:07:21 crc kubenswrapper[4971]: E1127 07:07:21.202515 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0182f9a1-f6e3-492f-a607-d9813e5fb543" containerName="extract-content" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.202520 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="0182f9a1-f6e3-492f-a607-d9813e5fb543" containerName="extract-content" Nov 27 07:07:21 crc kubenswrapper[4971]: E1127 07:07:21.202548 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="kube-rbac-proxy-node" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.202554 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="kube-rbac-proxy-node" Nov 27 07:07:21 crc kubenswrapper[4971]: E1127 07:07:21.202564 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="kubecfg-setup" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.202569 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="kubecfg-setup" Nov 27 07:07:21 crc kubenswrapper[4971]: E1127 07:07:21.202577 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="ovn-controller" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.202582 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="ovn-controller" Nov 27 07:07:21 crc kubenswrapper[4971]: E1127 07:07:21.202589 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="kube-rbac-proxy-ovn-metrics" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.202595 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="kube-rbac-proxy-ovn-metrics" Nov 27 07:07:21 crc kubenswrapper[4971]: E1127 07:07:21.202602 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="sbdb" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.202607 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="sbdb" Nov 27 07:07:21 crc kubenswrapper[4971]: E1127 07:07:21.202615 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="ovn-acl-logging" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.202621 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="ovn-acl-logging" Nov 27 07:07:21 crc kubenswrapper[4971]: E1127 07:07:21.202628 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0182f9a1-f6e3-492f-a607-d9813e5fb543" containerName="registry-server" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.202634 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="0182f9a1-f6e3-492f-a607-d9813e5fb543" containerName="registry-server" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.202720 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="ovn-controller" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.202730 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="ovnkube-controller" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.202737 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="ovnkube-controller" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.202744 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="ovnkube-controller" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.202751 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="northd" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.202761 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="nbdb" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.202769 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="ovn-acl-logging" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.202775 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="sbdb" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.202785 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="0182f9a1-f6e3-492f-a607-d9813e5fb543" containerName="registry-server" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.202793 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="kube-rbac-proxy-node" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.202801 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="kube-rbac-proxy-ovn-metrics" Nov 27 07:07:21 crc kubenswrapper[4971]: E1127 07:07:21.202903 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="ovnkube-controller" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.202917 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="ovnkube-controller" Nov 27 07:07:21 crc kubenswrapper[4971]: E1127 07:07:21.202924 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="ovnkube-controller" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.202929 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="ovnkube-controller" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.203017 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="ovnkube-controller" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.203024 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerName="ovnkube-controller" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.204510 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253143 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-systemd-units\") pod \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253203 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-kubelet\") pod \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253222 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-run-netns\") pod \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253247 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k9lhg\" (UniqueName: \"kubernetes.io/projected/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-kube-api-access-k9lhg\") pod \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253279 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-node-log\") pod \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253299 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-ovnkube-script-lib\") pod \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253317 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-ovn-node-metrics-cert\") pod \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253331 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-etc-openvswitch\") pod \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253316 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" (UID: "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253354 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-ovnkube-config\") pod \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253379 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-run-ovn-kubernetes\") pod \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253401 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-run-systemd\") pod \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253422 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-cni-netd\") pod \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253439 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-cni-bin\") pod \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253456 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-slash\") pod \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253338 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" (UID: "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253486 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-run-ovn\") pod \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253318 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" (UID: "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253504 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-env-overrides\") pod \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253526 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-log-socket\") pod \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253558 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253590 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-var-lib-openvswitch\") pod \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253607 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-run-openvswitch\") pod \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\" (UID: \"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c\") " Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253720 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-host-cni-bin\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253741 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-node-log\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253757 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-log-socket\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253774 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-host-kubelet\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253803 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-host-cni-netd\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253821 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-systemd-units\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253835 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-env-overrides\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253851 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-run-ovn\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253868 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-ovnkube-config\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253890 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-run-openvswitch\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253906 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-run-systemd\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253928 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-host-run-ovn-kubernetes\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253945 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-ovn-node-metrics-cert\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253959 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8qz7c\" (UniqueName: \"kubernetes.io/projected/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-kube-api-access-8qz7c\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253435 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-node-log" (OuterVolumeSpecName: "node-log") pod "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" (UID: "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253469 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" (UID: "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253521 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" (UID: "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253558 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-slash" (OuterVolumeSpecName: "host-slash") pod "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" (UID: "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253733 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" (UID: "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253761 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" (UID: "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253781 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" (UID: "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253801 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" (UID: "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253818 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" (UID: "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253873 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" (UID: "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253927 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" (UID: "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.253949 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" (UID: "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.254042 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-log-socket" (OuterVolumeSpecName: "log-socket") pod "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" (UID: "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.254065 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-var-lib-openvswitch\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.254023 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" (UID: "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.254304 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.254357 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-host-run-netns\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.254378 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-etc-openvswitch\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.254408 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-host-slash\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.254474 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-ovnkube-script-lib\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.254709 4971 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.254731 4971 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-run-openvswitch\") on node \"crc\" DevicePath \"\"" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.254748 4971 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-systemd-units\") on node \"crc\" DevicePath \"\"" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.254760 4971 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-kubelet\") on node \"crc\" DevicePath \"\"" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.254773 4971 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-run-netns\") on node \"crc\" DevicePath \"\"" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.254785 4971 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-node-log\") on node \"crc\" DevicePath \"\"" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.254797 4971 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.254808 4971 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.254820 4971 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-ovnkube-config\") on node \"crc\" DevicePath \"\"" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.254832 4971 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.254847 4971 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-cni-netd\") on node \"crc\" DevicePath \"\"" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.254858 4971 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-cni-bin\") on node \"crc\" DevicePath \"\"" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.254871 4971 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-slash\") on node \"crc\" DevicePath \"\"" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.254884 4971 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-run-ovn\") on node \"crc\" DevicePath \"\"" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.254895 4971 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-env-overrides\") on node \"crc\" DevicePath \"\"" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.254906 4971 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-log-socket\") on node \"crc\" DevicePath \"\"" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.254918 4971 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.260732 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" (UID: "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.260800 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-kube-api-access-k9lhg" (OuterVolumeSpecName: "kube-api-access-k9lhg") pod "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" (UID: "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c"). InnerVolumeSpecName "kube-api-access-k9lhg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.266915 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" (UID: "47c0abbf-6e9f-4bca-b3ca-bd896be15f2c"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.355988 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-host-cni-bin\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.356044 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-node-log\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.356062 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-log-socket\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.356078 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-host-kubelet\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.356102 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-host-cni-netd\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.356117 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-systemd-units\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.356136 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-env-overrides\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.356153 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-run-ovn\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.356153 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-host-cni-bin\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.356199 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-log-socket\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.356169 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-ovnkube-config\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.356169 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-host-kubelet\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.356195 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-node-log\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.356234 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-systemd-units\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.356281 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-run-systemd\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.356311 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-run-openvswitch\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.356327 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-run-systemd\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.356349 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8qz7c\" (UniqueName: \"kubernetes.io/projected/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-kube-api-access-8qz7c\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.356245 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-host-cni-netd\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.356377 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-run-openvswitch\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.356378 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-host-run-ovn-kubernetes\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.356405 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-host-run-ovn-kubernetes\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.356281 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-run-ovn\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.356435 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-ovn-node-metrics-cert\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.356469 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-var-lib-openvswitch\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.356726 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.356771 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-host-run-netns\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.356802 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-etc-openvswitch\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.356834 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-host-slash\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.356866 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-ovnkube-script-lib\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.356948 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k9lhg\" (UniqueName: \"kubernetes.io/projected/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-kube-api-access-k9lhg\") on node \"crc\" DevicePath \"\"" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.356971 4971 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.356983 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-host-run-netns\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.356988 4971 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c-run-systemd\") on node \"crc\" DevicePath \"\"" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.357020 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-var-lib-openvswitch\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.357052 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.357083 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-etc-openvswitch\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.357104 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-env-overrides\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.357113 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-host-slash\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.357159 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-ovnkube-config\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.357976 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-ovnkube-script-lib\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.362282 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-ovn-node-metrics-cert\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.384951 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8qz7c\" (UniqueName: \"kubernetes.io/projected/6af4f2c1-29ff-4c34-a586-5cf3d357c97f-kube-api-access-8qz7c\") pod \"ovnkube-node-wvbzj\" (UID: \"6af4f2c1-29ff-4c34-a586-5cf3d357c97f\") " pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.520773 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:21 crc kubenswrapper[4971]: W1127 07:07:21.538584 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6af4f2c1_29ff_4c34_a586_5cf3d357c97f.slice/crio-d0c9f791ccb82d12d7354d94a9722fbcfb65bd09b76ab38c94961dbddc82994e WatchSource:0}: Error finding container d0c9f791ccb82d12d7354d94a9722fbcfb65bd09b76ab38c94961dbddc82994e: Status 404 returned error can't find the container with id d0c9f791ccb82d12d7354d94a9722fbcfb65bd09b76ab38c94961dbddc82994e Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.897756 4971 generic.go:334] "Generic (PLEG): container finished" podID="6af4f2c1-29ff-4c34-a586-5cf3d357c97f" containerID="d0f9a489feb323d74e0c7e304aac75665cda53895a1622a305f3fd942387f753" exitCode=0 Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.897827 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" event={"ID":"6af4f2c1-29ff-4c34-a586-5cf3d357c97f","Type":"ContainerDied","Data":"d0f9a489feb323d74e0c7e304aac75665cda53895a1622a305f3fd942387f753"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.897945 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" event={"ID":"6af4f2c1-29ff-4c34-a586-5cf3d357c97f","Type":"ContainerStarted","Data":"d0c9f791ccb82d12d7354d94a9722fbcfb65bd09b76ab38c94961dbddc82994e"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.900709 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-lr9p6_a2136014-aa8f-48e5-bccf-64cdd3cbc5f9/kube-multus/2.log" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.900791 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-lr9p6" event={"ID":"a2136014-aa8f-48e5-bccf-64cdd3cbc5f9","Type":"ContainerStarted","Data":"2e5b0ecda7026485dd6b6b08cd0be99b71d34020a07e450811082dd50f3a23b8"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.903431 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-56nwb_47c0abbf-6e9f-4bca-b3ca-bd896be15f2c/ovnkube-controller/3.log" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.909223 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-56nwb_47c0abbf-6e9f-4bca-b3ca-bd896be15f2c/ovn-acl-logging/0.log" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.909819 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-56nwb_47c0abbf-6e9f-4bca-b3ca-bd896be15f2c/ovn-controller/0.log" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910255 4971 generic.go:334] "Generic (PLEG): container finished" podID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerID="312b81b8085de1c59a0a2fbd3fc064e8f3764519af180bb36f94404a0b7a2853" exitCode=0 Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910284 4971 generic.go:334] "Generic (PLEG): container finished" podID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerID="d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349" exitCode=0 Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910291 4971 generic.go:334] "Generic (PLEG): container finished" podID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerID="ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae" exitCode=0 Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910299 4971 generic.go:334] "Generic (PLEG): container finished" podID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerID="cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57" exitCode=0 Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910333 4971 generic.go:334] "Generic (PLEG): container finished" podID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerID="e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c" exitCode=0 Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910342 4971 generic.go:334] "Generic (PLEG): container finished" podID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerID="f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d" exitCode=0 Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910350 4971 generic.go:334] "Generic (PLEG): container finished" podID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerID="04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890" exitCode=143 Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910358 4971 generic.go:334] "Generic (PLEG): container finished" podID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" containerID="de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc" exitCode=143 Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910406 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" event={"ID":"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c","Type":"ContainerDied","Data":"312b81b8085de1c59a0a2fbd3fc064e8f3764519af180bb36f94404a0b7a2853"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910442 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" event={"ID":"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c","Type":"ContainerDied","Data":"d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910455 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" event={"ID":"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c","Type":"ContainerDied","Data":"ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910491 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" event={"ID":"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c","Type":"ContainerDied","Data":"cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910509 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" event={"ID":"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c","Type":"ContainerDied","Data":"e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910521 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" event={"ID":"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c","Type":"ContainerDied","Data":"f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910597 4971 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910609 4971 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910615 4971 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910620 4971 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910627 4971 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910632 4971 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910637 4971 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910642 4971 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910647 4971 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910675 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" event={"ID":"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c","Type":"ContainerDied","Data":"04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910687 4971 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"312b81b8085de1c59a0a2fbd3fc064e8f3764519af180bb36f94404a0b7a2853"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910695 4971 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910700 4971 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910705 4971 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910710 4971 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910716 4971 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910720 4971 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910726 4971 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910751 4971 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910757 4971 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910765 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" event={"ID":"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c","Type":"ContainerDied","Data":"de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910773 4971 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"312b81b8085de1c59a0a2fbd3fc064e8f3764519af180bb36f94404a0b7a2853"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910781 4971 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910787 4971 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910792 4971 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910797 4971 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910802 4971 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910825 4971 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910832 4971 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910837 4971 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910843 4971 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910850 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" event={"ID":"47c0abbf-6e9f-4bca-b3ca-bd896be15f2c","Type":"ContainerDied","Data":"3b0bfc1c99b394f40dc7f888ce48e484b6104167251dbb2005702aac8a60e29b"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910859 4971 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"312b81b8085de1c59a0a2fbd3fc064e8f3764519af180bb36f94404a0b7a2853"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910869 4971 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910874 4971 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910882 4971 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910889 4971 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910896 4971 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910904 4971 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910911 4971 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910917 4971 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910924 4971 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a"} Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.910942 4971 scope.go:117] "RemoveContainer" containerID="312b81b8085de1c59a0a2fbd3fc064e8f3764519af180bb36f94404a0b7a2853" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.911087 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-56nwb" Nov 27 07:07:21 crc kubenswrapper[4971]: I1127 07:07:21.980687 4971 scope.go:117] "RemoveContainer" containerID="91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.008933 4971 scope.go:117] "RemoveContainer" containerID="d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.016280 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-56nwb"] Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.019218 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-56nwb"] Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.042418 4971 scope.go:117] "RemoveContainer" containerID="ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.054163 4971 scope.go:117] "RemoveContainer" containerID="cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.066804 4971 scope.go:117] "RemoveContainer" containerID="e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.094902 4971 scope.go:117] "RemoveContainer" containerID="f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.118881 4971 scope.go:117] "RemoveContainer" containerID="04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.155053 4971 scope.go:117] "RemoveContainer" containerID="de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.168821 4971 scope.go:117] "RemoveContainer" containerID="8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.189560 4971 scope.go:117] "RemoveContainer" containerID="312b81b8085de1c59a0a2fbd3fc064e8f3764519af180bb36f94404a0b7a2853" Nov 27 07:07:22 crc kubenswrapper[4971]: E1127 07:07:22.190109 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"312b81b8085de1c59a0a2fbd3fc064e8f3764519af180bb36f94404a0b7a2853\": container with ID starting with 312b81b8085de1c59a0a2fbd3fc064e8f3764519af180bb36f94404a0b7a2853 not found: ID does not exist" containerID="312b81b8085de1c59a0a2fbd3fc064e8f3764519af180bb36f94404a0b7a2853" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.190156 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"312b81b8085de1c59a0a2fbd3fc064e8f3764519af180bb36f94404a0b7a2853"} err="failed to get container status \"312b81b8085de1c59a0a2fbd3fc064e8f3764519af180bb36f94404a0b7a2853\": rpc error: code = NotFound desc = could not find container \"312b81b8085de1c59a0a2fbd3fc064e8f3764519af180bb36f94404a0b7a2853\": container with ID starting with 312b81b8085de1c59a0a2fbd3fc064e8f3764519af180bb36f94404a0b7a2853 not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.190185 4971 scope.go:117] "RemoveContainer" containerID="91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85" Nov 27 07:07:22 crc kubenswrapper[4971]: E1127 07:07:22.190446 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85\": container with ID starting with 91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85 not found: ID does not exist" containerID="91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.190471 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85"} err="failed to get container status \"91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85\": rpc error: code = NotFound desc = could not find container \"91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85\": container with ID starting with 91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85 not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.190484 4971 scope.go:117] "RemoveContainer" containerID="d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349" Nov 27 07:07:22 crc kubenswrapper[4971]: E1127 07:07:22.191259 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349\": container with ID starting with d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349 not found: ID does not exist" containerID="d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.191285 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349"} err="failed to get container status \"d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349\": rpc error: code = NotFound desc = could not find container \"d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349\": container with ID starting with d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349 not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.191300 4971 scope.go:117] "RemoveContainer" containerID="ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae" Nov 27 07:07:22 crc kubenswrapper[4971]: E1127 07:07:22.191527 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae\": container with ID starting with ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae not found: ID does not exist" containerID="ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.191556 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae"} err="failed to get container status \"ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae\": rpc error: code = NotFound desc = could not find container \"ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae\": container with ID starting with ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.191568 4971 scope.go:117] "RemoveContainer" containerID="cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57" Nov 27 07:07:22 crc kubenswrapper[4971]: E1127 07:07:22.191796 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57\": container with ID starting with cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57 not found: ID does not exist" containerID="cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.191815 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57"} err="failed to get container status \"cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57\": rpc error: code = NotFound desc = could not find container \"cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57\": container with ID starting with cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57 not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.191828 4971 scope.go:117] "RemoveContainer" containerID="e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c" Nov 27 07:07:22 crc kubenswrapper[4971]: E1127 07:07:22.192040 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c\": container with ID starting with e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c not found: ID does not exist" containerID="e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.192061 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c"} err="failed to get container status \"e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c\": rpc error: code = NotFound desc = could not find container \"e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c\": container with ID starting with e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.192073 4971 scope.go:117] "RemoveContainer" containerID="f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d" Nov 27 07:07:22 crc kubenswrapper[4971]: E1127 07:07:22.192505 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d\": container with ID starting with f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d not found: ID does not exist" containerID="f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.192542 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d"} err="failed to get container status \"f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d\": rpc error: code = NotFound desc = could not find container \"f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d\": container with ID starting with f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.192554 4971 scope.go:117] "RemoveContainer" containerID="04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890" Nov 27 07:07:22 crc kubenswrapper[4971]: E1127 07:07:22.192840 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890\": container with ID starting with 04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890 not found: ID does not exist" containerID="04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.192863 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890"} err="failed to get container status \"04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890\": rpc error: code = NotFound desc = could not find container \"04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890\": container with ID starting with 04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890 not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.192878 4971 scope.go:117] "RemoveContainer" containerID="de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc" Nov 27 07:07:22 crc kubenswrapper[4971]: E1127 07:07:22.193191 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc\": container with ID starting with de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc not found: ID does not exist" containerID="de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.193213 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc"} err="failed to get container status \"de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc\": rpc error: code = NotFound desc = could not find container \"de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc\": container with ID starting with de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.193225 4971 scope.go:117] "RemoveContainer" containerID="8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a" Nov 27 07:07:22 crc kubenswrapper[4971]: E1127 07:07:22.193467 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\": container with ID starting with 8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a not found: ID does not exist" containerID="8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.193489 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a"} err="failed to get container status \"8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\": rpc error: code = NotFound desc = could not find container \"8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\": container with ID starting with 8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.193508 4971 scope.go:117] "RemoveContainer" containerID="312b81b8085de1c59a0a2fbd3fc064e8f3764519af180bb36f94404a0b7a2853" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.193825 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"312b81b8085de1c59a0a2fbd3fc064e8f3764519af180bb36f94404a0b7a2853"} err="failed to get container status \"312b81b8085de1c59a0a2fbd3fc064e8f3764519af180bb36f94404a0b7a2853\": rpc error: code = NotFound desc = could not find container \"312b81b8085de1c59a0a2fbd3fc064e8f3764519af180bb36f94404a0b7a2853\": container with ID starting with 312b81b8085de1c59a0a2fbd3fc064e8f3764519af180bb36f94404a0b7a2853 not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.193844 4971 scope.go:117] "RemoveContainer" containerID="91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.194040 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85"} err="failed to get container status \"91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85\": rpc error: code = NotFound desc = could not find container \"91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85\": container with ID starting with 91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85 not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.194058 4971 scope.go:117] "RemoveContainer" containerID="d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.194236 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349"} err="failed to get container status \"d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349\": rpc error: code = NotFound desc = could not find container \"d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349\": container with ID starting with d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349 not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.194256 4971 scope.go:117] "RemoveContainer" containerID="ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.194446 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae"} err="failed to get container status \"ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae\": rpc error: code = NotFound desc = could not find container \"ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae\": container with ID starting with ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.194465 4971 scope.go:117] "RemoveContainer" containerID="cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.194665 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57"} err="failed to get container status \"cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57\": rpc error: code = NotFound desc = could not find container \"cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57\": container with ID starting with cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57 not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.194707 4971 scope.go:117] "RemoveContainer" containerID="e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.194913 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c"} err="failed to get container status \"e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c\": rpc error: code = NotFound desc = could not find container \"e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c\": container with ID starting with e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.194933 4971 scope.go:117] "RemoveContainer" containerID="f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.195122 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d"} err="failed to get container status \"f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d\": rpc error: code = NotFound desc = could not find container \"f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d\": container with ID starting with f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.195139 4971 scope.go:117] "RemoveContainer" containerID="04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.195323 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890"} err="failed to get container status \"04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890\": rpc error: code = NotFound desc = could not find container \"04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890\": container with ID starting with 04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890 not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.195342 4971 scope.go:117] "RemoveContainer" containerID="de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.195515 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc"} err="failed to get container status \"de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc\": rpc error: code = NotFound desc = could not find container \"de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc\": container with ID starting with de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.195545 4971 scope.go:117] "RemoveContainer" containerID="8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.195722 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a"} err="failed to get container status \"8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\": rpc error: code = NotFound desc = could not find container \"8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\": container with ID starting with 8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.195740 4971 scope.go:117] "RemoveContainer" containerID="312b81b8085de1c59a0a2fbd3fc064e8f3764519af180bb36f94404a0b7a2853" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.195954 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"312b81b8085de1c59a0a2fbd3fc064e8f3764519af180bb36f94404a0b7a2853"} err="failed to get container status \"312b81b8085de1c59a0a2fbd3fc064e8f3764519af180bb36f94404a0b7a2853\": rpc error: code = NotFound desc = could not find container \"312b81b8085de1c59a0a2fbd3fc064e8f3764519af180bb36f94404a0b7a2853\": container with ID starting with 312b81b8085de1c59a0a2fbd3fc064e8f3764519af180bb36f94404a0b7a2853 not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.195973 4971 scope.go:117] "RemoveContainer" containerID="91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.196174 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85"} err="failed to get container status \"91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85\": rpc error: code = NotFound desc = could not find container \"91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85\": container with ID starting with 91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85 not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.196192 4971 scope.go:117] "RemoveContainer" containerID="d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.196346 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349"} err="failed to get container status \"d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349\": rpc error: code = NotFound desc = could not find container \"d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349\": container with ID starting with d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349 not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.196364 4971 scope.go:117] "RemoveContainer" containerID="ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.196515 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae"} err="failed to get container status \"ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae\": rpc error: code = NotFound desc = could not find container \"ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae\": container with ID starting with ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.196565 4971 scope.go:117] "RemoveContainer" containerID="cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.196798 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57"} err="failed to get container status \"cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57\": rpc error: code = NotFound desc = could not find container \"cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57\": container with ID starting with cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57 not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.196820 4971 scope.go:117] "RemoveContainer" containerID="e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.197048 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c"} err="failed to get container status \"e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c\": rpc error: code = NotFound desc = could not find container \"e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c\": container with ID starting with e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.197141 4971 scope.go:117] "RemoveContainer" containerID="f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.197483 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d"} err="failed to get container status \"f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d\": rpc error: code = NotFound desc = could not find container \"f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d\": container with ID starting with f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.197507 4971 scope.go:117] "RemoveContainer" containerID="04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.198390 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890"} err="failed to get container status \"04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890\": rpc error: code = NotFound desc = could not find container \"04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890\": container with ID starting with 04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890 not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.198432 4971 scope.go:117] "RemoveContainer" containerID="de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.198712 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc"} err="failed to get container status \"de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc\": rpc error: code = NotFound desc = could not find container \"de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc\": container with ID starting with de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.198737 4971 scope.go:117] "RemoveContainer" containerID="8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.198982 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a"} err="failed to get container status \"8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\": rpc error: code = NotFound desc = could not find container \"8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\": container with ID starting with 8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.199010 4971 scope.go:117] "RemoveContainer" containerID="312b81b8085de1c59a0a2fbd3fc064e8f3764519af180bb36f94404a0b7a2853" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.199211 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"312b81b8085de1c59a0a2fbd3fc064e8f3764519af180bb36f94404a0b7a2853"} err="failed to get container status \"312b81b8085de1c59a0a2fbd3fc064e8f3764519af180bb36f94404a0b7a2853\": rpc error: code = NotFound desc = could not find container \"312b81b8085de1c59a0a2fbd3fc064e8f3764519af180bb36f94404a0b7a2853\": container with ID starting with 312b81b8085de1c59a0a2fbd3fc064e8f3764519af180bb36f94404a0b7a2853 not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.199243 4971 scope.go:117] "RemoveContainer" containerID="91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.200753 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85"} err="failed to get container status \"91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85\": rpc error: code = NotFound desc = could not find container \"91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85\": container with ID starting with 91706702b6012457e50ed963d8cb8a1e250a3f09353b150077de02dfc6a1bb85 not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.200804 4971 scope.go:117] "RemoveContainer" containerID="d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.201129 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349"} err="failed to get container status \"d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349\": rpc error: code = NotFound desc = could not find container \"d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349\": container with ID starting with d688c43b6cb414267b0982a0f5ac0368bb01f8b8328b102c79b7ce0e20a5a349 not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.201146 4971 scope.go:117] "RemoveContainer" containerID="ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.201336 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae"} err="failed to get container status \"ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae\": rpc error: code = NotFound desc = could not find container \"ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae\": container with ID starting with ee9c9ac295f8f53288f0953f77ab12cf4e26ccea49c6b10cd324decc2fb8c1ae not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.201353 4971 scope.go:117] "RemoveContainer" containerID="cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.201552 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57"} err="failed to get container status \"cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57\": rpc error: code = NotFound desc = could not find container \"cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57\": container with ID starting with cd3c8004c35d6883c03b69e98e57ef7ed2b9e2a7e385ac6c9c653a1fc4dd4d57 not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.201578 4971 scope.go:117] "RemoveContainer" containerID="e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.201737 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c"} err="failed to get container status \"e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c\": rpc error: code = NotFound desc = could not find container \"e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c\": container with ID starting with e1afc56aaaa67998ac6a633c438efcff58eb41b60feaec809d1da0e997e8514c not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.201752 4971 scope.go:117] "RemoveContainer" containerID="f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.201932 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d"} err="failed to get container status \"f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d\": rpc error: code = NotFound desc = could not find container \"f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d\": container with ID starting with f8747fc57c15546bb16fe182279d2bbd4900dc4566989917c3aaf346640aeb5d not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.201950 4971 scope.go:117] "RemoveContainer" containerID="04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.202226 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890"} err="failed to get container status \"04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890\": rpc error: code = NotFound desc = could not find container \"04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890\": container with ID starting with 04d239569a1f3a062828e9e3c83f220b221fd7c0f0ea56da2b5af168c8956890 not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.202257 4971 scope.go:117] "RemoveContainer" containerID="de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.202767 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc"} err="failed to get container status \"de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc\": rpc error: code = NotFound desc = could not find container \"de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc\": container with ID starting with de9d0a6ca7eda44c27e64736f216179155f08ce72e344a36b57cc88bf254accc not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.202798 4971 scope.go:117] "RemoveContainer" containerID="8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.203063 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a"} err="failed to get container status \"8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\": rpc error: code = NotFound desc = could not find container \"8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a\": container with ID starting with 8979a801c7e0de671cbf0372d97385dfb1a8d1037504dee14c825351344b454a not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.203089 4971 scope.go:117] "RemoveContainer" containerID="312b81b8085de1c59a0a2fbd3fc064e8f3764519af180bb36f94404a0b7a2853" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.203456 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"312b81b8085de1c59a0a2fbd3fc064e8f3764519af180bb36f94404a0b7a2853"} err="failed to get container status \"312b81b8085de1c59a0a2fbd3fc064e8f3764519af180bb36f94404a0b7a2853\": rpc error: code = NotFound desc = could not find container \"312b81b8085de1c59a0a2fbd3fc064e8f3764519af180bb36f94404a0b7a2853\": container with ID starting with 312b81b8085de1c59a0a2fbd3fc064e8f3764519af180bb36f94404a0b7a2853 not found: ID does not exist" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.561470 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47c0abbf-6e9f-4bca-b3ca-bd896be15f2c" path="/var/lib/kubelet/pods/47c0abbf-6e9f-4bca-b3ca-bd896be15f2c/volumes" Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.924087 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" event={"ID":"6af4f2c1-29ff-4c34-a586-5cf3d357c97f","Type":"ContainerStarted","Data":"959bc5ea6e0c0ae30b82e17a48337f97b339bb6f5c9f08243eb1b73be13d010d"} Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.924630 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" event={"ID":"6af4f2c1-29ff-4c34-a586-5cf3d357c97f","Type":"ContainerStarted","Data":"e1097cba07514c5edbfb69cf4e521d3100a87273a906e060609357f7f2832d4c"} Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.924648 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" event={"ID":"6af4f2c1-29ff-4c34-a586-5cf3d357c97f","Type":"ContainerStarted","Data":"406aeeab5d54d4744b6d591aa47d9e399db07e4a7bb628f463ad0ea6410d0f54"} Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.924659 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" event={"ID":"6af4f2c1-29ff-4c34-a586-5cf3d357c97f","Type":"ContainerStarted","Data":"9c55d13261fdfb171174aa2cc65b2a251c579e98bfd022600a7a45d2610d3f92"} Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.924669 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" event={"ID":"6af4f2c1-29ff-4c34-a586-5cf3d357c97f","Type":"ContainerStarted","Data":"8ccfd42a5fe3a814b6493dc9a1df146f5f12f921d923e4ea0e394e8270ceb9dd"} Nov 27 07:07:22 crc kubenswrapper[4971]: I1127 07:07:22.924678 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" event={"ID":"6af4f2c1-29ff-4c34-a586-5cf3d357c97f","Type":"ContainerStarted","Data":"6ce6500d840bd5fbf923765d125d124a912648ef6ac06c6d748b8aeb9ebb4057"} Nov 27 07:07:24 crc kubenswrapper[4971]: I1127 07:07:24.949626 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" event={"ID":"6af4f2c1-29ff-4c34-a586-5cf3d357c97f","Type":"ContainerStarted","Data":"ceeabc683e9d4e2d38529c0c669d41105205cf422cb58267b3d44720eca0a79a"} Nov 27 07:07:27 crc kubenswrapper[4971]: I1127 07:07:27.971416 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" event={"ID":"6af4f2c1-29ff-4c34-a586-5cf3d357c97f","Type":"ContainerStarted","Data":"09eee5b91afe94c888f1fb02a516a910a809c044eaa53d9289618f282793c1a1"} Nov 27 07:07:27 crc kubenswrapper[4971]: I1127 07:07:27.972268 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:28 crc kubenswrapper[4971]: I1127 07:07:27.999710 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:28 crc kubenswrapper[4971]: I1127 07:07:28.002378 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" podStartSLOduration=7.00235198 podStartE2EDuration="7.00235198s" podCreationTimestamp="2025-11-27 07:07:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:07:28.001292259 +0000 UTC m=+886.193336187" watchObservedRunningTime="2025-11-27 07:07:28.00235198 +0000 UTC m=+886.194395908" Nov 27 07:07:28 crc kubenswrapper[4971]: I1127 07:07:28.979217 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:28 crc kubenswrapper[4971]: I1127 07:07:28.979639 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:29 crc kubenswrapper[4971]: I1127 07:07:29.018987 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:31 crc kubenswrapper[4971]: I1127 07:07:31.301607 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-9dxfl"] Nov 27 07:07:31 crc kubenswrapper[4971]: I1127 07:07:31.302439 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-9dxfl" Nov 27 07:07:31 crc kubenswrapper[4971]: I1127 07:07:31.305439 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Nov 27 07:07:31 crc kubenswrapper[4971]: I1127 07:07:31.305646 4971 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-887xw" Nov 27 07:07:31 crc kubenswrapper[4971]: I1127 07:07:31.305769 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Nov 27 07:07:31 crc kubenswrapper[4971]: I1127 07:07:31.307325 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Nov 27 07:07:31 crc kubenswrapper[4971]: I1127 07:07:31.314503 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-9dxfl"] Nov 27 07:07:31 crc kubenswrapper[4971]: I1127 07:07:31.402389 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/5a1c4271-d673-41e7-a19d-76717f9f9c31-node-mnt\") pod \"crc-storage-crc-9dxfl\" (UID: \"5a1c4271-d673-41e7-a19d-76717f9f9c31\") " pod="crc-storage/crc-storage-crc-9dxfl" Nov 27 07:07:31 crc kubenswrapper[4971]: I1127 07:07:31.402819 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5b4lj\" (UniqueName: \"kubernetes.io/projected/5a1c4271-d673-41e7-a19d-76717f9f9c31-kube-api-access-5b4lj\") pod \"crc-storage-crc-9dxfl\" (UID: \"5a1c4271-d673-41e7-a19d-76717f9f9c31\") " pod="crc-storage/crc-storage-crc-9dxfl" Nov 27 07:07:31 crc kubenswrapper[4971]: I1127 07:07:31.402852 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/5a1c4271-d673-41e7-a19d-76717f9f9c31-crc-storage\") pod \"crc-storage-crc-9dxfl\" (UID: \"5a1c4271-d673-41e7-a19d-76717f9f9c31\") " pod="crc-storage/crc-storage-crc-9dxfl" Nov 27 07:07:31 crc kubenswrapper[4971]: I1127 07:07:31.504171 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/5a1c4271-d673-41e7-a19d-76717f9f9c31-node-mnt\") pod \"crc-storage-crc-9dxfl\" (UID: \"5a1c4271-d673-41e7-a19d-76717f9f9c31\") " pod="crc-storage/crc-storage-crc-9dxfl" Nov 27 07:07:31 crc kubenswrapper[4971]: I1127 07:07:31.504246 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5b4lj\" (UniqueName: \"kubernetes.io/projected/5a1c4271-d673-41e7-a19d-76717f9f9c31-kube-api-access-5b4lj\") pod \"crc-storage-crc-9dxfl\" (UID: \"5a1c4271-d673-41e7-a19d-76717f9f9c31\") " pod="crc-storage/crc-storage-crc-9dxfl" Nov 27 07:07:31 crc kubenswrapper[4971]: I1127 07:07:31.504271 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/5a1c4271-d673-41e7-a19d-76717f9f9c31-crc-storage\") pod \"crc-storage-crc-9dxfl\" (UID: \"5a1c4271-d673-41e7-a19d-76717f9f9c31\") " pod="crc-storage/crc-storage-crc-9dxfl" Nov 27 07:07:31 crc kubenswrapper[4971]: I1127 07:07:31.504664 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/5a1c4271-d673-41e7-a19d-76717f9f9c31-node-mnt\") pod \"crc-storage-crc-9dxfl\" (UID: \"5a1c4271-d673-41e7-a19d-76717f9f9c31\") " pod="crc-storage/crc-storage-crc-9dxfl" Nov 27 07:07:31 crc kubenswrapper[4971]: I1127 07:07:31.506513 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/5a1c4271-d673-41e7-a19d-76717f9f9c31-crc-storage\") pod \"crc-storage-crc-9dxfl\" (UID: \"5a1c4271-d673-41e7-a19d-76717f9f9c31\") " pod="crc-storage/crc-storage-crc-9dxfl" Nov 27 07:07:31 crc kubenswrapper[4971]: I1127 07:07:31.528871 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5b4lj\" (UniqueName: \"kubernetes.io/projected/5a1c4271-d673-41e7-a19d-76717f9f9c31-kube-api-access-5b4lj\") pod \"crc-storage-crc-9dxfl\" (UID: \"5a1c4271-d673-41e7-a19d-76717f9f9c31\") " pod="crc-storage/crc-storage-crc-9dxfl" Nov 27 07:07:31 crc kubenswrapper[4971]: I1127 07:07:31.624089 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-9dxfl" Nov 27 07:07:31 crc kubenswrapper[4971]: E1127 07:07:31.649046 4971 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-9dxfl_crc-storage_5a1c4271-d673-41e7-a19d-76717f9f9c31_0(db1c3818680646447a27c621585f258423affa872880407c7bafb057d2c297c3): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 27 07:07:31 crc kubenswrapper[4971]: E1127 07:07:31.649220 4971 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-9dxfl_crc-storage_5a1c4271-d673-41e7-a19d-76717f9f9c31_0(db1c3818680646447a27c621585f258423affa872880407c7bafb057d2c297c3): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-9dxfl" Nov 27 07:07:31 crc kubenswrapper[4971]: E1127 07:07:31.649310 4971 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-9dxfl_crc-storage_5a1c4271-d673-41e7-a19d-76717f9f9c31_0(db1c3818680646447a27c621585f258423affa872880407c7bafb057d2c297c3): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-9dxfl" Nov 27 07:07:31 crc kubenswrapper[4971]: E1127 07:07:31.649425 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-9dxfl_crc-storage(5a1c4271-d673-41e7-a19d-76717f9f9c31)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-9dxfl_crc-storage(5a1c4271-d673-41e7-a19d-76717f9f9c31)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-9dxfl_crc-storage_5a1c4271-d673-41e7-a19d-76717f9f9c31_0(db1c3818680646447a27c621585f258423affa872880407c7bafb057d2c297c3): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-9dxfl" podUID="5a1c4271-d673-41e7-a19d-76717f9f9c31" Nov 27 07:07:31 crc kubenswrapper[4971]: I1127 07:07:31.993340 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-9dxfl" Nov 27 07:07:31 crc kubenswrapper[4971]: I1127 07:07:31.994070 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-9dxfl" Nov 27 07:07:32 crc kubenswrapper[4971]: I1127 07:07:32.385992 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-9dxfl"] Nov 27 07:07:32 crc kubenswrapper[4971]: I1127 07:07:32.997833 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-9dxfl" event={"ID":"5a1c4271-d673-41e7-a19d-76717f9f9c31","Type":"ContainerStarted","Data":"10953f20fe2c9a1557b7b59f7d56a632e2db150af94872abaa7d272f488fd1dd"} Nov 27 07:07:34 crc kubenswrapper[4971]: I1127 07:07:34.007747 4971 generic.go:334] "Generic (PLEG): container finished" podID="5a1c4271-d673-41e7-a19d-76717f9f9c31" containerID="6aec07e7dbacd67ae3399b7961202c2d21558e123ae9b8b31a484ba1ea981319" exitCode=0 Nov 27 07:07:34 crc kubenswrapper[4971]: I1127 07:07:34.007827 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-9dxfl" event={"ID":"5a1c4271-d673-41e7-a19d-76717f9f9c31","Type":"ContainerDied","Data":"6aec07e7dbacd67ae3399b7961202c2d21558e123ae9b8b31a484ba1ea981319"} Nov 27 07:07:34 crc kubenswrapper[4971]: I1127 07:07:34.819634 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7t5nq"] Nov 27 07:07:34 crc kubenswrapper[4971]: I1127 07:07:34.822343 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7t5nq" Nov 27 07:07:34 crc kubenswrapper[4971]: I1127 07:07:34.831384 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7t5nq"] Nov 27 07:07:34 crc kubenswrapper[4971]: I1127 07:07:34.860288 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7-catalog-content\") pod \"redhat-marketplace-7t5nq\" (UID: \"c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7\") " pod="openshift-marketplace/redhat-marketplace-7t5nq" Nov 27 07:07:34 crc kubenswrapper[4971]: I1127 07:07:34.860367 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dhhr9\" (UniqueName: \"kubernetes.io/projected/c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7-kube-api-access-dhhr9\") pod \"redhat-marketplace-7t5nq\" (UID: \"c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7\") " pod="openshift-marketplace/redhat-marketplace-7t5nq" Nov 27 07:07:34 crc kubenswrapper[4971]: I1127 07:07:34.860403 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7-utilities\") pod \"redhat-marketplace-7t5nq\" (UID: \"c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7\") " pod="openshift-marketplace/redhat-marketplace-7t5nq" Nov 27 07:07:34 crc kubenswrapper[4971]: I1127 07:07:34.961575 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7-catalog-content\") pod \"redhat-marketplace-7t5nq\" (UID: \"c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7\") " pod="openshift-marketplace/redhat-marketplace-7t5nq" Nov 27 07:07:34 crc kubenswrapper[4971]: I1127 07:07:34.961635 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dhhr9\" (UniqueName: \"kubernetes.io/projected/c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7-kube-api-access-dhhr9\") pod \"redhat-marketplace-7t5nq\" (UID: \"c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7\") " pod="openshift-marketplace/redhat-marketplace-7t5nq" Nov 27 07:07:34 crc kubenswrapper[4971]: I1127 07:07:34.961677 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7-utilities\") pod \"redhat-marketplace-7t5nq\" (UID: \"c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7\") " pod="openshift-marketplace/redhat-marketplace-7t5nq" Nov 27 07:07:34 crc kubenswrapper[4971]: I1127 07:07:34.962169 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7-utilities\") pod \"redhat-marketplace-7t5nq\" (UID: \"c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7\") " pod="openshift-marketplace/redhat-marketplace-7t5nq" Nov 27 07:07:34 crc kubenswrapper[4971]: I1127 07:07:34.962391 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7-catalog-content\") pod \"redhat-marketplace-7t5nq\" (UID: \"c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7\") " pod="openshift-marketplace/redhat-marketplace-7t5nq" Nov 27 07:07:34 crc kubenswrapper[4971]: I1127 07:07:34.992009 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dhhr9\" (UniqueName: \"kubernetes.io/projected/c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7-kube-api-access-dhhr9\") pod \"redhat-marketplace-7t5nq\" (UID: \"c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7\") " pod="openshift-marketplace/redhat-marketplace-7t5nq" Nov 27 07:07:35 crc kubenswrapper[4971]: I1127 07:07:35.147409 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7t5nq" Nov 27 07:07:35 crc kubenswrapper[4971]: I1127 07:07:35.308442 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-9dxfl" Nov 27 07:07:35 crc kubenswrapper[4971]: I1127 07:07:35.353903 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7t5nq"] Nov 27 07:07:35 crc kubenswrapper[4971]: W1127 07:07:35.359774 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc43d2f44_c5cb_4fe0_89fb_6e5d32de67e7.slice/crio-1fbfc7001ee197bd5213fa65edfcb7a326fde7e29acf894534ad91672fcd5b91 WatchSource:0}: Error finding container 1fbfc7001ee197bd5213fa65edfcb7a326fde7e29acf894534ad91672fcd5b91: Status 404 returned error can't find the container with id 1fbfc7001ee197bd5213fa65edfcb7a326fde7e29acf894534ad91672fcd5b91 Nov 27 07:07:35 crc kubenswrapper[4971]: I1127 07:07:35.373257 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/5a1c4271-d673-41e7-a19d-76717f9f9c31-node-mnt\") pod \"5a1c4271-d673-41e7-a19d-76717f9f9c31\" (UID: \"5a1c4271-d673-41e7-a19d-76717f9f9c31\") " Nov 27 07:07:35 crc kubenswrapper[4971]: I1127 07:07:35.373336 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/5a1c4271-d673-41e7-a19d-76717f9f9c31-crc-storage\") pod \"5a1c4271-d673-41e7-a19d-76717f9f9c31\" (UID: \"5a1c4271-d673-41e7-a19d-76717f9f9c31\") " Nov 27 07:07:35 crc kubenswrapper[4971]: I1127 07:07:35.373410 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5b4lj\" (UniqueName: \"kubernetes.io/projected/5a1c4271-d673-41e7-a19d-76717f9f9c31-kube-api-access-5b4lj\") pod \"5a1c4271-d673-41e7-a19d-76717f9f9c31\" (UID: \"5a1c4271-d673-41e7-a19d-76717f9f9c31\") " Nov 27 07:07:35 crc kubenswrapper[4971]: I1127 07:07:35.373733 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5a1c4271-d673-41e7-a19d-76717f9f9c31-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "5a1c4271-d673-41e7-a19d-76717f9f9c31" (UID: "5a1c4271-d673-41e7-a19d-76717f9f9c31"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 07:07:35 crc kubenswrapper[4971]: I1127 07:07:35.374145 4971 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/5a1c4271-d673-41e7-a19d-76717f9f9c31-node-mnt\") on node \"crc\" DevicePath \"\"" Nov 27 07:07:35 crc kubenswrapper[4971]: I1127 07:07:35.379300 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a1c4271-d673-41e7-a19d-76717f9f9c31-kube-api-access-5b4lj" (OuterVolumeSpecName: "kube-api-access-5b4lj") pod "5a1c4271-d673-41e7-a19d-76717f9f9c31" (UID: "5a1c4271-d673-41e7-a19d-76717f9f9c31"). InnerVolumeSpecName "kube-api-access-5b4lj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:07:35 crc kubenswrapper[4971]: I1127 07:07:35.391082 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a1c4271-d673-41e7-a19d-76717f9f9c31-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "5a1c4271-d673-41e7-a19d-76717f9f9c31" (UID: "5a1c4271-d673-41e7-a19d-76717f9f9c31"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:07:35 crc kubenswrapper[4971]: I1127 07:07:35.475392 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5b4lj\" (UniqueName: \"kubernetes.io/projected/5a1c4271-d673-41e7-a19d-76717f9f9c31-kube-api-access-5b4lj\") on node \"crc\" DevicePath \"\"" Nov 27 07:07:35 crc kubenswrapper[4971]: I1127 07:07:35.475432 4971 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/5a1c4271-d673-41e7-a19d-76717f9f9c31-crc-storage\") on node \"crc\" DevicePath \"\"" Nov 27 07:07:36 crc kubenswrapper[4971]: I1127 07:07:36.028968 4971 generic.go:334] "Generic (PLEG): container finished" podID="c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7" containerID="14b7a7d62ae60e95be84dd5119817ff69f5d3d77b26c410a8b13f50371c83dd7" exitCode=0 Nov 27 07:07:36 crc kubenswrapper[4971]: I1127 07:07:36.029080 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7t5nq" event={"ID":"c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7","Type":"ContainerDied","Data":"14b7a7d62ae60e95be84dd5119817ff69f5d3d77b26c410a8b13f50371c83dd7"} Nov 27 07:07:36 crc kubenswrapper[4971]: I1127 07:07:36.029122 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7t5nq" event={"ID":"c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7","Type":"ContainerStarted","Data":"1fbfc7001ee197bd5213fa65edfcb7a326fde7e29acf894534ad91672fcd5b91"} Nov 27 07:07:36 crc kubenswrapper[4971]: I1127 07:07:36.031082 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-9dxfl" event={"ID":"5a1c4271-d673-41e7-a19d-76717f9f9c31","Type":"ContainerDied","Data":"10953f20fe2c9a1557b7b59f7d56a632e2db150af94872abaa7d272f488fd1dd"} Nov 27 07:07:36 crc kubenswrapper[4971]: I1127 07:07:36.031136 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="10953f20fe2c9a1557b7b59f7d56a632e2db150af94872abaa7d272f488fd1dd" Nov 27 07:07:36 crc kubenswrapper[4971]: I1127 07:07:36.031249 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-9dxfl" Nov 27 07:07:38 crc kubenswrapper[4971]: I1127 07:07:38.048565 4971 generic.go:334] "Generic (PLEG): container finished" podID="c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7" containerID="903fbbf3158b3ffbea23572f454bc76072aaa6017fede5738ffce8f25fa0208f" exitCode=0 Nov 27 07:07:38 crc kubenswrapper[4971]: I1127 07:07:38.048672 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7t5nq" event={"ID":"c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7","Type":"ContainerDied","Data":"903fbbf3158b3ffbea23572f454bc76072aaa6017fede5738ffce8f25fa0208f"} Nov 27 07:07:39 crc kubenswrapper[4971]: I1127 07:07:39.058039 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7t5nq" event={"ID":"c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7","Type":"ContainerStarted","Data":"61a1da8f2b34072df23f0ec54f073c4dcd40ec03d01ec593672447673b52b8d9"} Nov 27 07:07:39 crc kubenswrapper[4971]: I1127 07:07:39.088956 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7t5nq" podStartSLOduration=2.560573223 podStartE2EDuration="5.088922016s" podCreationTimestamp="2025-11-27 07:07:34 +0000 UTC" firstStartedPulling="2025-11-27 07:07:36.030983684 +0000 UTC m=+894.223027602" lastFinishedPulling="2025-11-27 07:07:38.559332487 +0000 UTC m=+896.751376395" observedRunningTime="2025-11-27 07:07:39.083379813 +0000 UTC m=+897.275423731" watchObservedRunningTime="2025-11-27 07:07:39.088922016 +0000 UTC m=+897.280965934" Nov 27 07:07:43 crc kubenswrapper[4971]: I1127 07:07:43.123396 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f8jcx9"] Nov 27 07:07:43 crc kubenswrapper[4971]: E1127 07:07:43.125195 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a1c4271-d673-41e7-a19d-76717f9f9c31" containerName="storage" Nov 27 07:07:43 crc kubenswrapper[4971]: I1127 07:07:43.125265 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a1c4271-d673-41e7-a19d-76717f9f9c31" containerName="storage" Nov 27 07:07:43 crc kubenswrapper[4971]: I1127 07:07:43.125419 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a1c4271-d673-41e7-a19d-76717f9f9c31" containerName="storage" Nov 27 07:07:43 crc kubenswrapper[4971]: I1127 07:07:43.126237 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f8jcx9" Nov 27 07:07:43 crc kubenswrapper[4971]: I1127 07:07:43.129149 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Nov 27 07:07:43 crc kubenswrapper[4971]: I1127 07:07:43.143557 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f8jcx9"] Nov 27 07:07:43 crc kubenswrapper[4971]: I1127 07:07:43.191354 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dd894e14-f465-47e2-aaed-d2060077c852-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f8jcx9\" (UID: \"dd894e14-f465-47e2-aaed-d2060077c852\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f8jcx9" Nov 27 07:07:43 crc kubenswrapper[4971]: I1127 07:07:43.191852 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p5pcq\" (UniqueName: \"kubernetes.io/projected/dd894e14-f465-47e2-aaed-d2060077c852-kube-api-access-p5pcq\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f8jcx9\" (UID: \"dd894e14-f465-47e2-aaed-d2060077c852\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f8jcx9" Nov 27 07:07:43 crc kubenswrapper[4971]: I1127 07:07:43.192035 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dd894e14-f465-47e2-aaed-d2060077c852-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f8jcx9\" (UID: \"dd894e14-f465-47e2-aaed-d2060077c852\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f8jcx9" Nov 27 07:07:43 crc kubenswrapper[4971]: I1127 07:07:43.293034 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p5pcq\" (UniqueName: \"kubernetes.io/projected/dd894e14-f465-47e2-aaed-d2060077c852-kube-api-access-p5pcq\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f8jcx9\" (UID: \"dd894e14-f465-47e2-aaed-d2060077c852\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f8jcx9" Nov 27 07:07:43 crc kubenswrapper[4971]: I1127 07:07:43.293439 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dd894e14-f465-47e2-aaed-d2060077c852-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f8jcx9\" (UID: \"dd894e14-f465-47e2-aaed-d2060077c852\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f8jcx9" Nov 27 07:07:43 crc kubenswrapper[4971]: I1127 07:07:43.293673 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dd894e14-f465-47e2-aaed-d2060077c852-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f8jcx9\" (UID: \"dd894e14-f465-47e2-aaed-d2060077c852\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f8jcx9" Nov 27 07:07:43 crc kubenswrapper[4971]: I1127 07:07:43.294419 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dd894e14-f465-47e2-aaed-d2060077c852-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f8jcx9\" (UID: \"dd894e14-f465-47e2-aaed-d2060077c852\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f8jcx9" Nov 27 07:07:43 crc kubenswrapper[4971]: I1127 07:07:43.294561 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dd894e14-f465-47e2-aaed-d2060077c852-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f8jcx9\" (UID: \"dd894e14-f465-47e2-aaed-d2060077c852\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f8jcx9" Nov 27 07:07:43 crc kubenswrapper[4971]: I1127 07:07:43.328756 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p5pcq\" (UniqueName: \"kubernetes.io/projected/dd894e14-f465-47e2-aaed-d2060077c852-kube-api-access-p5pcq\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f8jcx9\" (UID: \"dd894e14-f465-47e2-aaed-d2060077c852\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f8jcx9" Nov 27 07:07:43 crc kubenswrapper[4971]: I1127 07:07:43.446267 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f8jcx9" Nov 27 07:07:43 crc kubenswrapper[4971]: I1127 07:07:43.691391 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f8jcx9"] Nov 27 07:07:43 crc kubenswrapper[4971]: W1127 07:07:43.701235 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddd894e14_f465_47e2_aaed_d2060077c852.slice/crio-faa2f5a92e87b3dadd4ac9eec19e09b2892a84ec0a6eb74b27d4161da55db375 WatchSource:0}: Error finding container faa2f5a92e87b3dadd4ac9eec19e09b2892a84ec0a6eb74b27d4161da55db375: Status 404 returned error can't find the container with id faa2f5a92e87b3dadd4ac9eec19e09b2892a84ec0a6eb74b27d4161da55db375 Nov 27 07:07:44 crc kubenswrapper[4971]: I1127 07:07:44.096947 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f8jcx9" event={"ID":"dd894e14-f465-47e2-aaed-d2060077c852","Type":"ContainerStarted","Data":"ce55e519541601426050614c8f4bdbc3e9b931c5f5d179be5abd3e056893ebe3"} Nov 27 07:07:44 crc kubenswrapper[4971]: I1127 07:07:44.097033 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f8jcx9" event={"ID":"dd894e14-f465-47e2-aaed-d2060077c852","Type":"ContainerStarted","Data":"faa2f5a92e87b3dadd4ac9eec19e09b2892a84ec0a6eb74b27d4161da55db375"} Nov 27 07:07:44 crc kubenswrapper[4971]: I1127 07:07:44.795640 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-8clq2"] Nov 27 07:07:44 crc kubenswrapper[4971]: I1127 07:07:44.797084 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8clq2" Nov 27 07:07:44 crc kubenswrapper[4971]: I1127 07:07:44.815233 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a22911c-f760-4516-b57c-2ed619ed1832-catalog-content\") pod \"redhat-operators-8clq2\" (UID: \"3a22911c-f760-4516-b57c-2ed619ed1832\") " pod="openshift-marketplace/redhat-operators-8clq2" Nov 27 07:07:44 crc kubenswrapper[4971]: I1127 07:07:44.815337 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a22911c-f760-4516-b57c-2ed619ed1832-utilities\") pod \"redhat-operators-8clq2\" (UID: \"3a22911c-f760-4516-b57c-2ed619ed1832\") " pod="openshift-marketplace/redhat-operators-8clq2" Nov 27 07:07:44 crc kubenswrapper[4971]: I1127 07:07:44.815382 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qx4cb\" (UniqueName: \"kubernetes.io/projected/3a22911c-f760-4516-b57c-2ed619ed1832-kube-api-access-qx4cb\") pod \"redhat-operators-8clq2\" (UID: \"3a22911c-f760-4516-b57c-2ed619ed1832\") " pod="openshift-marketplace/redhat-operators-8clq2" Nov 27 07:07:44 crc kubenswrapper[4971]: I1127 07:07:44.818419 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8clq2"] Nov 27 07:07:44 crc kubenswrapper[4971]: I1127 07:07:44.915707 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qx4cb\" (UniqueName: \"kubernetes.io/projected/3a22911c-f760-4516-b57c-2ed619ed1832-kube-api-access-qx4cb\") pod \"redhat-operators-8clq2\" (UID: \"3a22911c-f760-4516-b57c-2ed619ed1832\") " pod="openshift-marketplace/redhat-operators-8clq2" Nov 27 07:07:44 crc kubenswrapper[4971]: I1127 07:07:44.915790 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a22911c-f760-4516-b57c-2ed619ed1832-catalog-content\") pod \"redhat-operators-8clq2\" (UID: \"3a22911c-f760-4516-b57c-2ed619ed1832\") " pod="openshift-marketplace/redhat-operators-8clq2" Nov 27 07:07:44 crc kubenswrapper[4971]: I1127 07:07:44.915829 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a22911c-f760-4516-b57c-2ed619ed1832-utilities\") pod \"redhat-operators-8clq2\" (UID: \"3a22911c-f760-4516-b57c-2ed619ed1832\") " pod="openshift-marketplace/redhat-operators-8clq2" Nov 27 07:07:44 crc kubenswrapper[4971]: I1127 07:07:44.916322 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a22911c-f760-4516-b57c-2ed619ed1832-utilities\") pod \"redhat-operators-8clq2\" (UID: \"3a22911c-f760-4516-b57c-2ed619ed1832\") " pod="openshift-marketplace/redhat-operators-8clq2" Nov 27 07:07:44 crc kubenswrapper[4971]: I1127 07:07:44.916576 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a22911c-f760-4516-b57c-2ed619ed1832-catalog-content\") pod \"redhat-operators-8clq2\" (UID: \"3a22911c-f760-4516-b57c-2ed619ed1832\") " pod="openshift-marketplace/redhat-operators-8clq2" Nov 27 07:07:44 crc kubenswrapper[4971]: I1127 07:07:44.949221 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qx4cb\" (UniqueName: \"kubernetes.io/projected/3a22911c-f760-4516-b57c-2ed619ed1832-kube-api-access-qx4cb\") pod \"redhat-operators-8clq2\" (UID: \"3a22911c-f760-4516-b57c-2ed619ed1832\") " pod="openshift-marketplace/redhat-operators-8clq2" Nov 27 07:07:45 crc kubenswrapper[4971]: I1127 07:07:45.105111 4971 generic.go:334] "Generic (PLEG): container finished" podID="dd894e14-f465-47e2-aaed-d2060077c852" containerID="ce55e519541601426050614c8f4bdbc3e9b931c5f5d179be5abd3e056893ebe3" exitCode=0 Nov 27 07:07:45 crc kubenswrapper[4971]: I1127 07:07:45.105187 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f8jcx9" event={"ID":"dd894e14-f465-47e2-aaed-d2060077c852","Type":"ContainerDied","Data":"ce55e519541601426050614c8f4bdbc3e9b931c5f5d179be5abd3e056893ebe3"} Nov 27 07:07:45 crc kubenswrapper[4971]: I1127 07:07:45.133582 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8clq2" Nov 27 07:07:45 crc kubenswrapper[4971]: I1127 07:07:45.150104 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7t5nq" Nov 27 07:07:45 crc kubenswrapper[4971]: I1127 07:07:45.150188 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7t5nq" Nov 27 07:07:45 crc kubenswrapper[4971]: I1127 07:07:45.216037 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7t5nq" Nov 27 07:07:45 crc kubenswrapper[4971]: I1127 07:07:45.377164 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8clq2"] Nov 27 07:07:45 crc kubenswrapper[4971]: W1127 07:07:45.382848 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3a22911c_f760_4516_b57c_2ed619ed1832.slice/crio-d3d3bbab0441ae17f7f9418d376e4baea5a690a9ed586b91922c05729f9b9c99 WatchSource:0}: Error finding container d3d3bbab0441ae17f7f9418d376e4baea5a690a9ed586b91922c05729f9b9c99: Status 404 returned error can't find the container with id d3d3bbab0441ae17f7f9418d376e4baea5a690a9ed586b91922c05729f9b9c99 Nov 27 07:07:46 crc kubenswrapper[4971]: I1127 07:07:46.114209 4971 generic.go:334] "Generic (PLEG): container finished" podID="3a22911c-f760-4516-b57c-2ed619ed1832" containerID="893c9114643d5edf01a04bf9ebfd63f266fee1f5f3a2e1cae2468a276062b723" exitCode=0 Nov 27 07:07:46 crc kubenswrapper[4971]: I1127 07:07:46.114277 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8clq2" event={"ID":"3a22911c-f760-4516-b57c-2ed619ed1832","Type":"ContainerDied","Data":"893c9114643d5edf01a04bf9ebfd63f266fee1f5f3a2e1cae2468a276062b723"} Nov 27 07:07:46 crc kubenswrapper[4971]: I1127 07:07:46.114752 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8clq2" event={"ID":"3a22911c-f760-4516-b57c-2ed619ed1832","Type":"ContainerStarted","Data":"d3d3bbab0441ae17f7f9418d376e4baea5a690a9ed586b91922c05729f9b9c99"} Nov 27 07:07:46 crc kubenswrapper[4971]: I1127 07:07:46.158883 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7t5nq" Nov 27 07:07:47 crc kubenswrapper[4971]: I1127 07:07:47.124267 4971 generic.go:334] "Generic (PLEG): container finished" podID="dd894e14-f465-47e2-aaed-d2060077c852" containerID="13734b6bb4b40ed05b274bcc4296cf0ddf78c267f9b7086a640997aef300f167" exitCode=0 Nov 27 07:07:47 crc kubenswrapper[4971]: I1127 07:07:47.124371 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f8jcx9" event={"ID":"dd894e14-f465-47e2-aaed-d2060077c852","Type":"ContainerDied","Data":"13734b6bb4b40ed05b274bcc4296cf0ddf78c267f9b7086a640997aef300f167"} Nov 27 07:07:47 crc kubenswrapper[4971]: I1127 07:07:47.127507 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8clq2" event={"ID":"3a22911c-f760-4516-b57c-2ed619ed1832","Type":"ContainerStarted","Data":"e3ac7c8edf0fae481b69f43dfa6709b10177ac9c7a9846247495541943f7a765"} Nov 27 07:07:48 crc kubenswrapper[4971]: I1127 07:07:48.138619 4971 generic.go:334] "Generic (PLEG): container finished" podID="dd894e14-f465-47e2-aaed-d2060077c852" containerID="174121beae46273c0fc69200619fdd694db61f537dccb3518f9cd14093bb3960" exitCode=0 Nov 27 07:07:48 crc kubenswrapper[4971]: I1127 07:07:48.138724 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f8jcx9" event={"ID":"dd894e14-f465-47e2-aaed-d2060077c852","Type":"ContainerDied","Data":"174121beae46273c0fc69200619fdd694db61f537dccb3518f9cd14093bb3960"} Nov 27 07:07:48 crc kubenswrapper[4971]: I1127 07:07:48.140995 4971 generic.go:334] "Generic (PLEG): container finished" podID="3a22911c-f760-4516-b57c-2ed619ed1832" containerID="e3ac7c8edf0fae481b69f43dfa6709b10177ac9c7a9846247495541943f7a765" exitCode=0 Nov 27 07:07:48 crc kubenswrapper[4971]: I1127 07:07:48.141033 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8clq2" event={"ID":"3a22911c-f760-4516-b57c-2ed619ed1832","Type":"ContainerDied","Data":"e3ac7c8edf0fae481b69f43dfa6709b10177ac9c7a9846247495541943f7a765"} Nov 27 07:07:48 crc kubenswrapper[4971]: I1127 07:07:48.779918 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7t5nq"] Nov 27 07:07:48 crc kubenswrapper[4971]: I1127 07:07:48.780214 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-7t5nq" podUID="c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7" containerName="registry-server" containerID="cri-o://61a1da8f2b34072df23f0ec54f073c4dcd40ec03d01ec593672447673b52b8d9" gracePeriod=2 Nov 27 07:07:49 crc kubenswrapper[4971]: I1127 07:07:49.146468 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7t5nq" Nov 27 07:07:49 crc kubenswrapper[4971]: I1127 07:07:49.158940 4971 generic.go:334] "Generic (PLEG): container finished" podID="c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7" containerID="61a1da8f2b34072df23f0ec54f073c4dcd40ec03d01ec593672447673b52b8d9" exitCode=0 Nov 27 07:07:49 crc kubenswrapper[4971]: I1127 07:07:49.159101 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7t5nq" Nov 27 07:07:49 crc kubenswrapper[4971]: I1127 07:07:49.159463 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7t5nq" event={"ID":"c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7","Type":"ContainerDied","Data":"61a1da8f2b34072df23f0ec54f073c4dcd40ec03d01ec593672447673b52b8d9"} Nov 27 07:07:49 crc kubenswrapper[4971]: I1127 07:07:49.159506 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7t5nq" event={"ID":"c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7","Type":"ContainerDied","Data":"1fbfc7001ee197bd5213fa65edfcb7a326fde7e29acf894534ad91672fcd5b91"} Nov 27 07:07:49 crc kubenswrapper[4971]: I1127 07:07:49.159553 4971 scope.go:117] "RemoveContainer" containerID="61a1da8f2b34072df23f0ec54f073c4dcd40ec03d01ec593672447673b52b8d9" Nov 27 07:07:49 crc kubenswrapper[4971]: I1127 07:07:49.162677 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8clq2" event={"ID":"3a22911c-f760-4516-b57c-2ed619ed1832","Type":"ContainerStarted","Data":"4ea7ba37bd0a52ab2a534c6462a88a2500567c8d2b3a28a82c443863bbfea682"} Nov 27 07:07:49 crc kubenswrapper[4971]: I1127 07:07:49.196911 4971 scope.go:117] "RemoveContainer" containerID="903fbbf3158b3ffbea23572f454bc76072aaa6017fede5738ffce8f25fa0208f" Nov 27 07:07:49 crc kubenswrapper[4971]: I1127 07:07:49.203792 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-8clq2" podStartSLOduration=2.78319868 podStartE2EDuration="5.203773176s" podCreationTimestamp="2025-11-27 07:07:44 +0000 UTC" firstStartedPulling="2025-11-27 07:07:46.183051645 +0000 UTC m=+904.375095563" lastFinishedPulling="2025-11-27 07:07:48.603626131 +0000 UTC m=+906.795670059" observedRunningTime="2025-11-27 07:07:49.197703108 +0000 UTC m=+907.389747036" watchObservedRunningTime="2025-11-27 07:07:49.203773176 +0000 UTC m=+907.395817094" Nov 27 07:07:49 crc kubenswrapper[4971]: I1127 07:07:49.221757 4971 scope.go:117] "RemoveContainer" containerID="14b7a7d62ae60e95be84dd5119817ff69f5d3d77b26c410a8b13f50371c83dd7" Nov 27 07:07:49 crc kubenswrapper[4971]: I1127 07:07:49.256058 4971 scope.go:117] "RemoveContainer" containerID="61a1da8f2b34072df23f0ec54f073c4dcd40ec03d01ec593672447673b52b8d9" Nov 27 07:07:49 crc kubenswrapper[4971]: E1127 07:07:49.256704 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"61a1da8f2b34072df23f0ec54f073c4dcd40ec03d01ec593672447673b52b8d9\": container with ID starting with 61a1da8f2b34072df23f0ec54f073c4dcd40ec03d01ec593672447673b52b8d9 not found: ID does not exist" containerID="61a1da8f2b34072df23f0ec54f073c4dcd40ec03d01ec593672447673b52b8d9" Nov 27 07:07:49 crc kubenswrapper[4971]: I1127 07:07:49.256746 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61a1da8f2b34072df23f0ec54f073c4dcd40ec03d01ec593672447673b52b8d9"} err="failed to get container status \"61a1da8f2b34072df23f0ec54f073c4dcd40ec03d01ec593672447673b52b8d9\": rpc error: code = NotFound desc = could not find container \"61a1da8f2b34072df23f0ec54f073c4dcd40ec03d01ec593672447673b52b8d9\": container with ID starting with 61a1da8f2b34072df23f0ec54f073c4dcd40ec03d01ec593672447673b52b8d9 not found: ID does not exist" Nov 27 07:07:49 crc kubenswrapper[4971]: I1127 07:07:49.256774 4971 scope.go:117] "RemoveContainer" containerID="903fbbf3158b3ffbea23572f454bc76072aaa6017fede5738ffce8f25fa0208f" Nov 27 07:07:49 crc kubenswrapper[4971]: E1127 07:07:49.257494 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"903fbbf3158b3ffbea23572f454bc76072aaa6017fede5738ffce8f25fa0208f\": container with ID starting with 903fbbf3158b3ffbea23572f454bc76072aaa6017fede5738ffce8f25fa0208f not found: ID does not exist" containerID="903fbbf3158b3ffbea23572f454bc76072aaa6017fede5738ffce8f25fa0208f" Nov 27 07:07:49 crc kubenswrapper[4971]: I1127 07:07:49.257522 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"903fbbf3158b3ffbea23572f454bc76072aaa6017fede5738ffce8f25fa0208f"} err="failed to get container status \"903fbbf3158b3ffbea23572f454bc76072aaa6017fede5738ffce8f25fa0208f\": rpc error: code = NotFound desc = could not find container \"903fbbf3158b3ffbea23572f454bc76072aaa6017fede5738ffce8f25fa0208f\": container with ID starting with 903fbbf3158b3ffbea23572f454bc76072aaa6017fede5738ffce8f25fa0208f not found: ID does not exist" Nov 27 07:07:49 crc kubenswrapper[4971]: I1127 07:07:49.257640 4971 scope.go:117] "RemoveContainer" containerID="14b7a7d62ae60e95be84dd5119817ff69f5d3d77b26c410a8b13f50371c83dd7" Nov 27 07:07:49 crc kubenswrapper[4971]: E1127 07:07:49.258142 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"14b7a7d62ae60e95be84dd5119817ff69f5d3d77b26c410a8b13f50371c83dd7\": container with ID starting with 14b7a7d62ae60e95be84dd5119817ff69f5d3d77b26c410a8b13f50371c83dd7 not found: ID does not exist" containerID="14b7a7d62ae60e95be84dd5119817ff69f5d3d77b26c410a8b13f50371c83dd7" Nov 27 07:07:49 crc kubenswrapper[4971]: I1127 07:07:49.258162 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14b7a7d62ae60e95be84dd5119817ff69f5d3d77b26c410a8b13f50371c83dd7"} err="failed to get container status \"14b7a7d62ae60e95be84dd5119817ff69f5d3d77b26c410a8b13f50371c83dd7\": rpc error: code = NotFound desc = could not find container \"14b7a7d62ae60e95be84dd5119817ff69f5d3d77b26c410a8b13f50371c83dd7\": container with ID starting with 14b7a7d62ae60e95be84dd5119817ff69f5d3d77b26c410a8b13f50371c83dd7 not found: ID does not exist" Nov 27 07:07:49 crc kubenswrapper[4971]: I1127 07:07:49.293226 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7-catalog-content\") pod \"c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7\" (UID: \"c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7\") " Nov 27 07:07:49 crc kubenswrapper[4971]: I1127 07:07:49.293392 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dhhr9\" (UniqueName: \"kubernetes.io/projected/c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7-kube-api-access-dhhr9\") pod \"c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7\" (UID: \"c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7\") " Nov 27 07:07:49 crc kubenswrapper[4971]: I1127 07:07:49.293468 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7-utilities\") pod \"c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7\" (UID: \"c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7\") " Nov 27 07:07:49 crc kubenswrapper[4971]: I1127 07:07:49.294742 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7-utilities" (OuterVolumeSpecName: "utilities") pod "c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7" (UID: "c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:07:49 crc kubenswrapper[4971]: I1127 07:07:49.301330 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7-kube-api-access-dhhr9" (OuterVolumeSpecName: "kube-api-access-dhhr9") pod "c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7" (UID: "c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7"). InnerVolumeSpecName "kube-api-access-dhhr9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:07:49 crc kubenswrapper[4971]: I1127 07:07:49.321983 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7" (UID: "c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:07:49 crc kubenswrapper[4971]: I1127 07:07:49.379466 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f8jcx9" Nov 27 07:07:49 crc kubenswrapper[4971]: I1127 07:07:49.395274 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 07:07:49 crc kubenswrapper[4971]: I1127 07:07:49.395322 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dhhr9\" (UniqueName: \"kubernetes.io/projected/c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7-kube-api-access-dhhr9\") on node \"crc\" DevicePath \"\"" Nov 27 07:07:49 crc kubenswrapper[4971]: I1127 07:07:49.395336 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 07:07:49 crc kubenswrapper[4971]: I1127 07:07:49.494488 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7t5nq"] Nov 27 07:07:49 crc kubenswrapper[4971]: I1127 07:07:49.497652 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p5pcq\" (UniqueName: \"kubernetes.io/projected/dd894e14-f465-47e2-aaed-d2060077c852-kube-api-access-p5pcq\") pod \"dd894e14-f465-47e2-aaed-d2060077c852\" (UID: \"dd894e14-f465-47e2-aaed-d2060077c852\") " Nov 27 07:07:49 crc kubenswrapper[4971]: I1127 07:07:49.497787 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dd894e14-f465-47e2-aaed-d2060077c852-bundle\") pod \"dd894e14-f465-47e2-aaed-d2060077c852\" (UID: \"dd894e14-f465-47e2-aaed-d2060077c852\") " Nov 27 07:07:49 crc kubenswrapper[4971]: I1127 07:07:49.497936 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dd894e14-f465-47e2-aaed-d2060077c852-util\") pod \"dd894e14-f465-47e2-aaed-d2060077c852\" (UID: \"dd894e14-f465-47e2-aaed-d2060077c852\") " Nov 27 07:07:49 crc kubenswrapper[4971]: I1127 07:07:49.498520 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd894e14-f465-47e2-aaed-d2060077c852-bundle" (OuterVolumeSpecName: "bundle") pod "dd894e14-f465-47e2-aaed-d2060077c852" (UID: "dd894e14-f465-47e2-aaed-d2060077c852"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:07:49 crc kubenswrapper[4971]: I1127 07:07:49.498638 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd894e14-f465-47e2-aaed-d2060077c852-util" (OuterVolumeSpecName: "util") pod "dd894e14-f465-47e2-aaed-d2060077c852" (UID: "dd894e14-f465-47e2-aaed-d2060077c852"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:07:49 crc kubenswrapper[4971]: I1127 07:07:49.500080 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-7t5nq"] Nov 27 07:07:49 crc kubenswrapper[4971]: I1127 07:07:49.599502 4971 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dd894e14-f465-47e2-aaed-d2060077c852-util\") on node \"crc\" DevicePath \"\"" Nov 27 07:07:49 crc kubenswrapper[4971]: I1127 07:07:49.599557 4971 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dd894e14-f465-47e2-aaed-d2060077c852-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:07:49 crc kubenswrapper[4971]: I1127 07:07:49.800117 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd894e14-f465-47e2-aaed-d2060077c852-kube-api-access-p5pcq" (OuterVolumeSpecName: "kube-api-access-p5pcq") pod "dd894e14-f465-47e2-aaed-d2060077c852" (UID: "dd894e14-f465-47e2-aaed-d2060077c852"). InnerVolumeSpecName "kube-api-access-p5pcq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:07:49 crc kubenswrapper[4971]: I1127 07:07:49.801868 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p5pcq\" (UniqueName: \"kubernetes.io/projected/dd894e14-f465-47e2-aaed-d2060077c852-kube-api-access-p5pcq\") on node \"crc\" DevicePath \"\"" Nov 27 07:07:50 crc kubenswrapper[4971]: I1127 07:07:50.174793 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f8jcx9" event={"ID":"dd894e14-f465-47e2-aaed-d2060077c852","Type":"ContainerDied","Data":"faa2f5a92e87b3dadd4ac9eec19e09b2892a84ec0a6eb74b27d4161da55db375"} Nov 27 07:07:50 crc kubenswrapper[4971]: I1127 07:07:50.174855 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f8jcx9" Nov 27 07:07:50 crc kubenswrapper[4971]: I1127 07:07:50.174864 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="faa2f5a92e87b3dadd4ac9eec19e09b2892a84ec0a6eb74b27d4161da55db375" Nov 27 07:07:50 crc kubenswrapper[4971]: I1127 07:07:50.557864 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7" path="/var/lib/kubelet/pods/c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7/volumes" Nov 27 07:07:51 crc kubenswrapper[4971]: I1127 07:07:51.548492 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-wvbzj" Nov 27 07:07:54 crc kubenswrapper[4971]: I1127 07:07:54.638268 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-2xtbl"] Nov 27 07:07:54 crc kubenswrapper[4971]: E1127 07:07:54.639055 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7" containerName="extract-content" Nov 27 07:07:54 crc kubenswrapper[4971]: I1127 07:07:54.639077 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7" containerName="extract-content" Nov 27 07:07:54 crc kubenswrapper[4971]: E1127 07:07:54.639094 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7" containerName="extract-utilities" Nov 27 07:07:54 crc kubenswrapper[4971]: I1127 07:07:54.639102 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7" containerName="extract-utilities" Nov 27 07:07:54 crc kubenswrapper[4971]: E1127 07:07:54.639113 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd894e14-f465-47e2-aaed-d2060077c852" containerName="util" Nov 27 07:07:54 crc kubenswrapper[4971]: I1127 07:07:54.639122 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd894e14-f465-47e2-aaed-d2060077c852" containerName="util" Nov 27 07:07:54 crc kubenswrapper[4971]: E1127 07:07:54.639138 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd894e14-f465-47e2-aaed-d2060077c852" containerName="pull" Nov 27 07:07:54 crc kubenswrapper[4971]: I1127 07:07:54.639145 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd894e14-f465-47e2-aaed-d2060077c852" containerName="pull" Nov 27 07:07:54 crc kubenswrapper[4971]: E1127 07:07:54.639164 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd894e14-f465-47e2-aaed-d2060077c852" containerName="extract" Nov 27 07:07:54 crc kubenswrapper[4971]: I1127 07:07:54.639171 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd894e14-f465-47e2-aaed-d2060077c852" containerName="extract" Nov 27 07:07:54 crc kubenswrapper[4971]: E1127 07:07:54.639186 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7" containerName="registry-server" Nov 27 07:07:54 crc kubenswrapper[4971]: I1127 07:07:54.639193 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7" containerName="registry-server" Nov 27 07:07:54 crc kubenswrapper[4971]: I1127 07:07:54.639310 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="c43d2f44-c5cb-4fe0-89fb-6e5d32de67e7" containerName="registry-server" Nov 27 07:07:54 crc kubenswrapper[4971]: I1127 07:07:54.639338 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd894e14-f465-47e2-aaed-d2060077c852" containerName="extract" Nov 27 07:07:54 crc kubenswrapper[4971]: I1127 07:07:54.639883 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-2xtbl" Nov 27 07:07:54 crc kubenswrapper[4971]: I1127 07:07:54.647662 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Nov 27 07:07:54 crc kubenswrapper[4971]: I1127 07:07:54.648229 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Nov 27 07:07:54 crc kubenswrapper[4971]: I1127 07:07:54.648960 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-p8c22" Nov 27 07:07:54 crc kubenswrapper[4971]: I1127 07:07:54.665288 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-2xtbl"] Nov 27 07:07:54 crc kubenswrapper[4971]: I1127 07:07:54.773109 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c79wv\" (UniqueName: \"kubernetes.io/projected/b8cc8b1e-4eaf-40de-8385-0b5bec5db1ba-kube-api-access-c79wv\") pod \"nmstate-operator-5b5b58f5c8-2xtbl\" (UID: \"b8cc8b1e-4eaf-40de-8385-0b5bec5db1ba\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-2xtbl" Nov 27 07:07:54 crc kubenswrapper[4971]: I1127 07:07:54.874324 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c79wv\" (UniqueName: \"kubernetes.io/projected/b8cc8b1e-4eaf-40de-8385-0b5bec5db1ba-kube-api-access-c79wv\") pod \"nmstate-operator-5b5b58f5c8-2xtbl\" (UID: \"b8cc8b1e-4eaf-40de-8385-0b5bec5db1ba\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-2xtbl" Nov 27 07:07:54 crc kubenswrapper[4971]: I1127 07:07:54.899147 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c79wv\" (UniqueName: \"kubernetes.io/projected/b8cc8b1e-4eaf-40de-8385-0b5bec5db1ba-kube-api-access-c79wv\") pod \"nmstate-operator-5b5b58f5c8-2xtbl\" (UID: \"b8cc8b1e-4eaf-40de-8385-0b5bec5db1ba\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-2xtbl" Nov 27 07:07:54 crc kubenswrapper[4971]: I1127 07:07:54.957101 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-2xtbl" Nov 27 07:07:55 crc kubenswrapper[4971]: I1127 07:07:55.134644 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-8clq2" Nov 27 07:07:55 crc kubenswrapper[4971]: I1127 07:07:55.135143 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-8clq2" Nov 27 07:07:55 crc kubenswrapper[4971]: I1127 07:07:55.190096 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-8clq2" Nov 27 07:07:55 crc kubenswrapper[4971]: I1127 07:07:55.219994 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-2xtbl"] Nov 27 07:07:55 crc kubenswrapper[4971]: I1127 07:07:55.307875 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-8clq2" Nov 27 07:07:56 crc kubenswrapper[4971]: I1127 07:07:56.220685 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-2xtbl" event={"ID":"b8cc8b1e-4eaf-40de-8385-0b5bec5db1ba","Type":"ContainerStarted","Data":"bee2fb9ffd8ab0e46585650e19699d2d392c0643c99ab483bc22bc73d867803e"} Nov 27 07:07:56 crc kubenswrapper[4971]: I1127 07:07:56.412959 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 07:07:56 crc kubenswrapper[4971]: I1127 07:07:56.413022 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 07:07:56 crc kubenswrapper[4971]: I1127 07:07:56.978810 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8clq2"] Nov 27 07:07:57 crc kubenswrapper[4971]: I1127 07:07:57.227250 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-8clq2" podUID="3a22911c-f760-4516-b57c-2ed619ed1832" containerName="registry-server" containerID="cri-o://4ea7ba37bd0a52ab2a534c6462a88a2500567c8d2b3a28a82c443863bbfea682" gracePeriod=2 Nov 27 07:07:57 crc kubenswrapper[4971]: I1127 07:07:57.605935 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8clq2" Nov 27 07:07:57 crc kubenswrapper[4971]: I1127 07:07:57.724440 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qx4cb\" (UniqueName: \"kubernetes.io/projected/3a22911c-f760-4516-b57c-2ed619ed1832-kube-api-access-qx4cb\") pod \"3a22911c-f760-4516-b57c-2ed619ed1832\" (UID: \"3a22911c-f760-4516-b57c-2ed619ed1832\") " Nov 27 07:07:57 crc kubenswrapper[4971]: I1127 07:07:57.724648 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a22911c-f760-4516-b57c-2ed619ed1832-catalog-content\") pod \"3a22911c-f760-4516-b57c-2ed619ed1832\" (UID: \"3a22911c-f760-4516-b57c-2ed619ed1832\") " Nov 27 07:07:57 crc kubenswrapper[4971]: I1127 07:07:57.724690 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a22911c-f760-4516-b57c-2ed619ed1832-utilities\") pod \"3a22911c-f760-4516-b57c-2ed619ed1832\" (UID: \"3a22911c-f760-4516-b57c-2ed619ed1832\") " Nov 27 07:07:57 crc kubenswrapper[4971]: I1127 07:07:57.725748 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a22911c-f760-4516-b57c-2ed619ed1832-utilities" (OuterVolumeSpecName: "utilities") pod "3a22911c-f760-4516-b57c-2ed619ed1832" (UID: "3a22911c-f760-4516-b57c-2ed619ed1832"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:07:57 crc kubenswrapper[4971]: I1127 07:07:57.731966 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a22911c-f760-4516-b57c-2ed619ed1832-kube-api-access-qx4cb" (OuterVolumeSpecName: "kube-api-access-qx4cb") pod "3a22911c-f760-4516-b57c-2ed619ed1832" (UID: "3a22911c-f760-4516-b57c-2ed619ed1832"). InnerVolumeSpecName "kube-api-access-qx4cb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:07:57 crc kubenswrapper[4971]: I1127 07:07:57.824317 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a22911c-f760-4516-b57c-2ed619ed1832-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3a22911c-f760-4516-b57c-2ed619ed1832" (UID: "3a22911c-f760-4516-b57c-2ed619ed1832"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:07:57 crc kubenswrapper[4971]: I1127 07:07:57.826460 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qx4cb\" (UniqueName: \"kubernetes.io/projected/3a22911c-f760-4516-b57c-2ed619ed1832-kube-api-access-qx4cb\") on node \"crc\" DevicePath \"\"" Nov 27 07:07:57 crc kubenswrapper[4971]: I1127 07:07:57.826504 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a22911c-f760-4516-b57c-2ed619ed1832-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 07:07:57 crc kubenswrapper[4971]: I1127 07:07:57.826514 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a22911c-f760-4516-b57c-2ed619ed1832-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 07:07:58 crc kubenswrapper[4971]: I1127 07:07:58.233522 4971 generic.go:334] "Generic (PLEG): container finished" podID="3a22911c-f760-4516-b57c-2ed619ed1832" containerID="4ea7ba37bd0a52ab2a534c6462a88a2500567c8d2b3a28a82c443863bbfea682" exitCode=0 Nov 27 07:07:58 crc kubenswrapper[4971]: I1127 07:07:58.233588 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8clq2" Nov 27 07:07:58 crc kubenswrapper[4971]: I1127 07:07:58.233585 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8clq2" event={"ID":"3a22911c-f760-4516-b57c-2ed619ed1832","Type":"ContainerDied","Data":"4ea7ba37bd0a52ab2a534c6462a88a2500567c8d2b3a28a82c443863bbfea682"} Nov 27 07:07:58 crc kubenswrapper[4971]: I1127 07:07:58.233717 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8clq2" event={"ID":"3a22911c-f760-4516-b57c-2ed619ed1832","Type":"ContainerDied","Data":"d3d3bbab0441ae17f7f9418d376e4baea5a690a9ed586b91922c05729f9b9c99"} Nov 27 07:07:58 crc kubenswrapper[4971]: I1127 07:07:58.233745 4971 scope.go:117] "RemoveContainer" containerID="4ea7ba37bd0a52ab2a534c6462a88a2500567c8d2b3a28a82c443863bbfea682" Nov 27 07:07:58 crc kubenswrapper[4971]: I1127 07:07:58.235161 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-2xtbl" event={"ID":"b8cc8b1e-4eaf-40de-8385-0b5bec5db1ba","Type":"ContainerStarted","Data":"ae29f296759f1481d1f44709c74049fc1ae7b16b10c6dc0ae6ed634158922833"} Nov 27 07:07:58 crc kubenswrapper[4971]: I1127 07:07:58.248886 4971 scope.go:117] "RemoveContainer" containerID="e3ac7c8edf0fae481b69f43dfa6709b10177ac9c7a9846247495541943f7a765" Nov 27 07:07:58 crc kubenswrapper[4971]: I1127 07:07:58.263867 4971 scope.go:117] "RemoveContainer" containerID="893c9114643d5edf01a04bf9ebfd63f266fee1f5f3a2e1cae2468a276062b723" Nov 27 07:07:58 crc kubenswrapper[4971]: I1127 07:07:58.265720 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-2xtbl" podStartSLOduration=2.027203715 podStartE2EDuration="4.265699768s" podCreationTimestamp="2025-11-27 07:07:54 +0000 UTC" firstStartedPulling="2025-11-27 07:07:55.235332025 +0000 UTC m=+913.427375943" lastFinishedPulling="2025-11-27 07:07:57.473828078 +0000 UTC m=+915.665871996" observedRunningTime="2025-11-27 07:07:58.259993411 +0000 UTC m=+916.452037329" watchObservedRunningTime="2025-11-27 07:07:58.265699768 +0000 UTC m=+916.457743686" Nov 27 07:07:58 crc kubenswrapper[4971]: I1127 07:07:58.277008 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8clq2"] Nov 27 07:07:58 crc kubenswrapper[4971]: I1127 07:07:58.280632 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-8clq2"] Nov 27 07:07:58 crc kubenswrapper[4971]: I1127 07:07:58.283718 4971 scope.go:117] "RemoveContainer" containerID="4ea7ba37bd0a52ab2a534c6462a88a2500567c8d2b3a28a82c443863bbfea682" Nov 27 07:07:58 crc kubenswrapper[4971]: E1127 07:07:58.284248 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ea7ba37bd0a52ab2a534c6462a88a2500567c8d2b3a28a82c443863bbfea682\": container with ID starting with 4ea7ba37bd0a52ab2a534c6462a88a2500567c8d2b3a28a82c443863bbfea682 not found: ID does not exist" containerID="4ea7ba37bd0a52ab2a534c6462a88a2500567c8d2b3a28a82c443863bbfea682" Nov 27 07:07:58 crc kubenswrapper[4971]: I1127 07:07:58.284287 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ea7ba37bd0a52ab2a534c6462a88a2500567c8d2b3a28a82c443863bbfea682"} err="failed to get container status \"4ea7ba37bd0a52ab2a534c6462a88a2500567c8d2b3a28a82c443863bbfea682\": rpc error: code = NotFound desc = could not find container \"4ea7ba37bd0a52ab2a534c6462a88a2500567c8d2b3a28a82c443863bbfea682\": container with ID starting with 4ea7ba37bd0a52ab2a534c6462a88a2500567c8d2b3a28a82c443863bbfea682 not found: ID does not exist" Nov 27 07:07:58 crc kubenswrapper[4971]: I1127 07:07:58.284314 4971 scope.go:117] "RemoveContainer" containerID="e3ac7c8edf0fae481b69f43dfa6709b10177ac9c7a9846247495541943f7a765" Nov 27 07:07:58 crc kubenswrapper[4971]: E1127 07:07:58.284642 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e3ac7c8edf0fae481b69f43dfa6709b10177ac9c7a9846247495541943f7a765\": container with ID starting with e3ac7c8edf0fae481b69f43dfa6709b10177ac9c7a9846247495541943f7a765 not found: ID does not exist" containerID="e3ac7c8edf0fae481b69f43dfa6709b10177ac9c7a9846247495541943f7a765" Nov 27 07:07:58 crc kubenswrapper[4971]: I1127 07:07:58.284683 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e3ac7c8edf0fae481b69f43dfa6709b10177ac9c7a9846247495541943f7a765"} err="failed to get container status \"e3ac7c8edf0fae481b69f43dfa6709b10177ac9c7a9846247495541943f7a765\": rpc error: code = NotFound desc = could not find container \"e3ac7c8edf0fae481b69f43dfa6709b10177ac9c7a9846247495541943f7a765\": container with ID starting with e3ac7c8edf0fae481b69f43dfa6709b10177ac9c7a9846247495541943f7a765 not found: ID does not exist" Nov 27 07:07:58 crc kubenswrapper[4971]: I1127 07:07:58.284703 4971 scope.go:117] "RemoveContainer" containerID="893c9114643d5edf01a04bf9ebfd63f266fee1f5f3a2e1cae2468a276062b723" Nov 27 07:07:58 crc kubenswrapper[4971]: E1127 07:07:58.285015 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"893c9114643d5edf01a04bf9ebfd63f266fee1f5f3a2e1cae2468a276062b723\": container with ID starting with 893c9114643d5edf01a04bf9ebfd63f266fee1f5f3a2e1cae2468a276062b723 not found: ID does not exist" containerID="893c9114643d5edf01a04bf9ebfd63f266fee1f5f3a2e1cae2468a276062b723" Nov 27 07:07:58 crc kubenswrapper[4971]: I1127 07:07:58.285059 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"893c9114643d5edf01a04bf9ebfd63f266fee1f5f3a2e1cae2468a276062b723"} err="failed to get container status \"893c9114643d5edf01a04bf9ebfd63f266fee1f5f3a2e1cae2468a276062b723\": rpc error: code = NotFound desc = could not find container \"893c9114643d5edf01a04bf9ebfd63f266fee1f5f3a2e1cae2468a276062b723\": container with ID starting with 893c9114643d5edf01a04bf9ebfd63f266fee1f5f3a2e1cae2468a276062b723 not found: ID does not exist" Nov 27 07:07:58 crc kubenswrapper[4971]: I1127 07:07:58.558571 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a22911c-f760-4516-b57c-2ed619ed1832" path="/var/lib/kubelet/pods/3a22911c-f760-4516-b57c-2ed619ed1832/volumes" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.249597 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-npfl8"] Nov 27 07:08:03 crc kubenswrapper[4971]: E1127 07:08:03.249847 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a22911c-f760-4516-b57c-2ed619ed1832" containerName="registry-server" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.249860 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a22911c-f760-4516-b57c-2ed619ed1832" containerName="registry-server" Nov 27 07:08:03 crc kubenswrapper[4971]: E1127 07:08:03.249872 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a22911c-f760-4516-b57c-2ed619ed1832" containerName="extract-utilities" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.249879 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a22911c-f760-4516-b57c-2ed619ed1832" containerName="extract-utilities" Nov 27 07:08:03 crc kubenswrapper[4971]: E1127 07:08:03.249903 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a22911c-f760-4516-b57c-2ed619ed1832" containerName="extract-content" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.249910 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a22911c-f760-4516-b57c-2ed619ed1832" containerName="extract-content" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.250005 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a22911c-f760-4516-b57c-2ed619ed1832" containerName="registry-server" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.250574 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-npfl8" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.253556 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-sndsr" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.266397 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-npfl8"] Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.283797 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-9kfld"] Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.285518 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-9kfld" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.299781 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4n9w2\" (UniqueName: \"kubernetes.io/projected/d83969e1-c2e8-40a2-b5a6-2226f3876a85-kube-api-access-4n9w2\") pod \"nmstate-metrics-7f946cbc9-npfl8\" (UID: \"d83969e1-c2e8-40a2-b5a6-2226f3876a85\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-npfl8" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.317140 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-ttwlt"] Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.318113 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-ttwlt" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.320779 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.324327 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-ttwlt"] Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.391831 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-8l7v6"] Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.392748 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-8l7v6" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.394563 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.394847 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.395839 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-nzz9g" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.400839 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/89b39e22-730b-4f81-a85e-b80ec9e7f19a-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-ttwlt\" (UID: \"89b39e22-730b-4f81-a85e-b80ec9e7f19a\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-ttwlt" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.400916 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fc9n2\" (UniqueName: \"kubernetes.io/projected/1c7d7b3d-899a-4feb-b8a2-9a906b0878cd-kube-api-access-fc9n2\") pod \"nmstate-console-plugin-7fbb5f6569-8l7v6\" (UID: \"1c7d7b3d-899a-4feb-b8a2-9a906b0878cd\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-8l7v6" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.401035 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/c63c6fa0-dedb-4b00-b001-da4dcfcd8058-ovs-socket\") pod \"nmstate-handler-9kfld\" (UID: \"c63c6fa0-dedb-4b00-b001-da4dcfcd8058\") " pod="openshift-nmstate/nmstate-handler-9kfld" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.401106 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/c63c6fa0-dedb-4b00-b001-da4dcfcd8058-dbus-socket\") pod \"nmstate-handler-9kfld\" (UID: \"c63c6fa0-dedb-4b00-b001-da4dcfcd8058\") " pod="openshift-nmstate/nmstate-handler-9kfld" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.401128 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4n9w2\" (UniqueName: \"kubernetes.io/projected/d83969e1-c2e8-40a2-b5a6-2226f3876a85-kube-api-access-4n9w2\") pod \"nmstate-metrics-7f946cbc9-npfl8\" (UID: \"d83969e1-c2e8-40a2-b5a6-2226f3876a85\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-npfl8" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.401149 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/c63c6fa0-dedb-4b00-b001-da4dcfcd8058-nmstate-lock\") pod \"nmstate-handler-9kfld\" (UID: \"c63c6fa0-dedb-4b00-b001-da4dcfcd8058\") " pod="openshift-nmstate/nmstate-handler-9kfld" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.401180 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pczqp\" (UniqueName: \"kubernetes.io/projected/c63c6fa0-dedb-4b00-b001-da4dcfcd8058-kube-api-access-pczqp\") pod \"nmstate-handler-9kfld\" (UID: \"c63c6fa0-dedb-4b00-b001-da4dcfcd8058\") " pod="openshift-nmstate/nmstate-handler-9kfld" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.401203 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8ttqm\" (UniqueName: \"kubernetes.io/projected/89b39e22-730b-4f81-a85e-b80ec9e7f19a-kube-api-access-8ttqm\") pod \"nmstate-webhook-5f6d4c5ccb-ttwlt\" (UID: \"89b39e22-730b-4f81-a85e-b80ec9e7f19a\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-ttwlt" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.401218 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/1c7d7b3d-899a-4feb-b8a2-9a906b0878cd-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-8l7v6\" (UID: \"1c7d7b3d-899a-4feb-b8a2-9a906b0878cd\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-8l7v6" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.401327 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/1c7d7b3d-899a-4feb-b8a2-9a906b0878cd-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-8l7v6\" (UID: \"1c7d7b3d-899a-4feb-b8a2-9a906b0878cd\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-8l7v6" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.416001 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-8l7v6"] Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.439710 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4n9w2\" (UniqueName: \"kubernetes.io/projected/d83969e1-c2e8-40a2-b5a6-2226f3876a85-kube-api-access-4n9w2\") pod \"nmstate-metrics-7f946cbc9-npfl8\" (UID: \"d83969e1-c2e8-40a2-b5a6-2226f3876a85\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-npfl8" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.503958 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/c63c6fa0-dedb-4b00-b001-da4dcfcd8058-dbus-socket\") pod \"nmstate-handler-9kfld\" (UID: \"c63c6fa0-dedb-4b00-b001-da4dcfcd8058\") " pod="openshift-nmstate/nmstate-handler-9kfld" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.504007 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/c63c6fa0-dedb-4b00-b001-da4dcfcd8058-nmstate-lock\") pod \"nmstate-handler-9kfld\" (UID: \"c63c6fa0-dedb-4b00-b001-da4dcfcd8058\") " pod="openshift-nmstate/nmstate-handler-9kfld" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.504047 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pczqp\" (UniqueName: \"kubernetes.io/projected/c63c6fa0-dedb-4b00-b001-da4dcfcd8058-kube-api-access-pczqp\") pod \"nmstate-handler-9kfld\" (UID: \"c63c6fa0-dedb-4b00-b001-da4dcfcd8058\") " pod="openshift-nmstate/nmstate-handler-9kfld" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.504077 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8ttqm\" (UniqueName: \"kubernetes.io/projected/89b39e22-730b-4f81-a85e-b80ec9e7f19a-kube-api-access-8ttqm\") pod \"nmstate-webhook-5f6d4c5ccb-ttwlt\" (UID: \"89b39e22-730b-4f81-a85e-b80ec9e7f19a\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-ttwlt" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.504099 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/1c7d7b3d-899a-4feb-b8a2-9a906b0878cd-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-8l7v6\" (UID: \"1c7d7b3d-899a-4feb-b8a2-9a906b0878cd\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-8l7v6" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.504125 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/1c7d7b3d-899a-4feb-b8a2-9a906b0878cd-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-8l7v6\" (UID: \"1c7d7b3d-899a-4feb-b8a2-9a906b0878cd\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-8l7v6" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.504156 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/89b39e22-730b-4f81-a85e-b80ec9e7f19a-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-ttwlt\" (UID: \"89b39e22-730b-4f81-a85e-b80ec9e7f19a\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-ttwlt" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.504184 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fc9n2\" (UniqueName: \"kubernetes.io/projected/1c7d7b3d-899a-4feb-b8a2-9a906b0878cd-kube-api-access-fc9n2\") pod \"nmstate-console-plugin-7fbb5f6569-8l7v6\" (UID: \"1c7d7b3d-899a-4feb-b8a2-9a906b0878cd\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-8l7v6" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.504209 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/c63c6fa0-dedb-4b00-b001-da4dcfcd8058-ovs-socket\") pod \"nmstate-handler-9kfld\" (UID: \"c63c6fa0-dedb-4b00-b001-da4dcfcd8058\") " pod="openshift-nmstate/nmstate-handler-9kfld" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.504291 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/c63c6fa0-dedb-4b00-b001-da4dcfcd8058-ovs-socket\") pod \"nmstate-handler-9kfld\" (UID: \"c63c6fa0-dedb-4b00-b001-da4dcfcd8058\") " pod="openshift-nmstate/nmstate-handler-9kfld" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.504590 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/c63c6fa0-dedb-4b00-b001-da4dcfcd8058-dbus-socket\") pod \"nmstate-handler-9kfld\" (UID: \"c63c6fa0-dedb-4b00-b001-da4dcfcd8058\") " pod="openshift-nmstate/nmstate-handler-9kfld" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.505609 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/1c7d7b3d-899a-4feb-b8a2-9a906b0878cd-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-8l7v6\" (UID: \"1c7d7b3d-899a-4feb-b8a2-9a906b0878cd\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-8l7v6" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.505788 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/c63c6fa0-dedb-4b00-b001-da4dcfcd8058-nmstate-lock\") pod \"nmstate-handler-9kfld\" (UID: \"c63c6fa0-dedb-4b00-b001-da4dcfcd8058\") " pod="openshift-nmstate/nmstate-handler-9kfld" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.509108 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/89b39e22-730b-4f81-a85e-b80ec9e7f19a-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-ttwlt\" (UID: \"89b39e22-730b-4f81-a85e-b80ec9e7f19a\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-ttwlt" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.517222 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/1c7d7b3d-899a-4feb-b8a2-9a906b0878cd-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-8l7v6\" (UID: \"1c7d7b3d-899a-4feb-b8a2-9a906b0878cd\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-8l7v6" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.535298 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pczqp\" (UniqueName: \"kubernetes.io/projected/c63c6fa0-dedb-4b00-b001-da4dcfcd8058-kube-api-access-pczqp\") pod \"nmstate-handler-9kfld\" (UID: \"c63c6fa0-dedb-4b00-b001-da4dcfcd8058\") " pod="openshift-nmstate/nmstate-handler-9kfld" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.535402 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fc9n2\" (UniqueName: \"kubernetes.io/projected/1c7d7b3d-899a-4feb-b8a2-9a906b0878cd-kube-api-access-fc9n2\") pod \"nmstate-console-plugin-7fbb5f6569-8l7v6\" (UID: \"1c7d7b3d-899a-4feb-b8a2-9a906b0878cd\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-8l7v6" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.541761 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8ttqm\" (UniqueName: \"kubernetes.io/projected/89b39e22-730b-4f81-a85e-b80ec9e7f19a-kube-api-access-8ttqm\") pod \"nmstate-webhook-5f6d4c5ccb-ttwlt\" (UID: \"89b39e22-730b-4f81-a85e-b80ec9e7f19a\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-ttwlt" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.574585 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-npfl8" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.607137 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-6d9cbf9545-7w6ns"] Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.607864 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-9kfld" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.610176 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6d9cbf9545-7w6ns" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.625038 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-6d9cbf9545-7w6ns"] Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.632289 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-ttwlt" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.710123 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-8l7v6" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.811616 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6192a8a4-a880-4e26-a2ab-94510327602c-console-oauth-config\") pod \"console-6d9cbf9545-7w6ns\" (UID: \"6192a8a4-a880-4e26-a2ab-94510327602c\") " pod="openshift-console/console-6d9cbf9545-7w6ns" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.811695 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6192a8a4-a880-4e26-a2ab-94510327602c-service-ca\") pod \"console-6d9cbf9545-7w6ns\" (UID: \"6192a8a4-a880-4e26-a2ab-94510327602c\") " pod="openshift-console/console-6d9cbf9545-7w6ns" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.811745 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6192a8a4-a880-4e26-a2ab-94510327602c-console-serving-cert\") pod \"console-6d9cbf9545-7w6ns\" (UID: \"6192a8a4-a880-4e26-a2ab-94510327602c\") " pod="openshift-console/console-6d9cbf9545-7w6ns" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.811783 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6192a8a4-a880-4e26-a2ab-94510327602c-console-config\") pod \"console-6d9cbf9545-7w6ns\" (UID: \"6192a8a4-a880-4e26-a2ab-94510327602c\") " pod="openshift-console/console-6d9cbf9545-7w6ns" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.811807 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6192a8a4-a880-4e26-a2ab-94510327602c-trusted-ca-bundle\") pod \"console-6d9cbf9545-7w6ns\" (UID: \"6192a8a4-a880-4e26-a2ab-94510327602c\") " pod="openshift-console/console-6d9cbf9545-7w6ns" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.811836 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6192a8a4-a880-4e26-a2ab-94510327602c-oauth-serving-cert\") pod \"console-6d9cbf9545-7w6ns\" (UID: \"6192a8a4-a880-4e26-a2ab-94510327602c\") " pod="openshift-console/console-6d9cbf9545-7w6ns" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.811867 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5zhk\" (UniqueName: \"kubernetes.io/projected/6192a8a4-a880-4e26-a2ab-94510327602c-kube-api-access-j5zhk\") pod \"console-6d9cbf9545-7w6ns\" (UID: \"6192a8a4-a880-4e26-a2ab-94510327602c\") " pod="openshift-console/console-6d9cbf9545-7w6ns" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.848105 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-npfl8"] Nov 27 07:08:03 crc kubenswrapper[4971]: W1127 07:08:03.854787 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd83969e1_c2e8_40a2_b5a6_2226f3876a85.slice/crio-55a5f2637585f7aba09ad6c206b774bfbfadffe4a627503b9f29814f3b693577 WatchSource:0}: Error finding container 55a5f2637585f7aba09ad6c206b774bfbfadffe4a627503b9f29814f3b693577: Status 404 returned error can't find the container with id 55a5f2637585f7aba09ad6c206b774bfbfadffe4a627503b9f29814f3b693577 Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.890866 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-ttwlt"] Nov 27 07:08:03 crc kubenswrapper[4971]: W1127 07:08:03.897044 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod89b39e22_730b_4f81_a85e_b80ec9e7f19a.slice/crio-896c74259fc831a6f91eddb2bcfa091a6aa761d24eb5c0f73824b62972d8123e WatchSource:0}: Error finding container 896c74259fc831a6f91eddb2bcfa091a6aa761d24eb5c0f73824b62972d8123e: Status 404 returned error can't find the container with id 896c74259fc831a6f91eddb2bcfa091a6aa761d24eb5c0f73824b62972d8123e Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.912343 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6192a8a4-a880-4e26-a2ab-94510327602c-console-serving-cert\") pod \"console-6d9cbf9545-7w6ns\" (UID: \"6192a8a4-a880-4e26-a2ab-94510327602c\") " pod="openshift-console/console-6d9cbf9545-7w6ns" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.912388 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6192a8a4-a880-4e26-a2ab-94510327602c-console-config\") pod \"console-6d9cbf9545-7w6ns\" (UID: \"6192a8a4-a880-4e26-a2ab-94510327602c\") " pod="openshift-console/console-6d9cbf9545-7w6ns" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.912413 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6192a8a4-a880-4e26-a2ab-94510327602c-trusted-ca-bundle\") pod \"console-6d9cbf9545-7w6ns\" (UID: \"6192a8a4-a880-4e26-a2ab-94510327602c\") " pod="openshift-console/console-6d9cbf9545-7w6ns" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.912437 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6192a8a4-a880-4e26-a2ab-94510327602c-oauth-serving-cert\") pod \"console-6d9cbf9545-7w6ns\" (UID: \"6192a8a4-a880-4e26-a2ab-94510327602c\") " pod="openshift-console/console-6d9cbf9545-7w6ns" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.912471 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5zhk\" (UniqueName: \"kubernetes.io/projected/6192a8a4-a880-4e26-a2ab-94510327602c-kube-api-access-j5zhk\") pod \"console-6d9cbf9545-7w6ns\" (UID: \"6192a8a4-a880-4e26-a2ab-94510327602c\") " pod="openshift-console/console-6d9cbf9545-7w6ns" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.912502 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6192a8a4-a880-4e26-a2ab-94510327602c-console-oauth-config\") pod \"console-6d9cbf9545-7w6ns\" (UID: \"6192a8a4-a880-4e26-a2ab-94510327602c\") " pod="openshift-console/console-6d9cbf9545-7w6ns" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.912555 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6192a8a4-a880-4e26-a2ab-94510327602c-service-ca\") pod \"console-6d9cbf9545-7w6ns\" (UID: \"6192a8a4-a880-4e26-a2ab-94510327602c\") " pod="openshift-console/console-6d9cbf9545-7w6ns" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.913904 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6192a8a4-a880-4e26-a2ab-94510327602c-service-ca\") pod \"console-6d9cbf9545-7w6ns\" (UID: \"6192a8a4-a880-4e26-a2ab-94510327602c\") " pod="openshift-console/console-6d9cbf9545-7w6ns" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.913964 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6192a8a4-a880-4e26-a2ab-94510327602c-oauth-serving-cert\") pod \"console-6d9cbf9545-7w6ns\" (UID: \"6192a8a4-a880-4e26-a2ab-94510327602c\") " pod="openshift-console/console-6d9cbf9545-7w6ns" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.914016 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6192a8a4-a880-4e26-a2ab-94510327602c-console-config\") pod \"console-6d9cbf9545-7w6ns\" (UID: \"6192a8a4-a880-4e26-a2ab-94510327602c\") " pod="openshift-console/console-6d9cbf9545-7w6ns" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.914065 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6192a8a4-a880-4e26-a2ab-94510327602c-trusted-ca-bundle\") pod \"console-6d9cbf9545-7w6ns\" (UID: \"6192a8a4-a880-4e26-a2ab-94510327602c\") " pod="openshift-console/console-6d9cbf9545-7w6ns" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.922220 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6192a8a4-a880-4e26-a2ab-94510327602c-console-serving-cert\") pod \"console-6d9cbf9545-7w6ns\" (UID: \"6192a8a4-a880-4e26-a2ab-94510327602c\") " pod="openshift-console/console-6d9cbf9545-7w6ns" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.922390 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6192a8a4-a880-4e26-a2ab-94510327602c-console-oauth-config\") pod \"console-6d9cbf9545-7w6ns\" (UID: \"6192a8a4-a880-4e26-a2ab-94510327602c\") " pod="openshift-console/console-6d9cbf9545-7w6ns" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.933476 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5zhk\" (UniqueName: \"kubernetes.io/projected/6192a8a4-a880-4e26-a2ab-94510327602c-kube-api-access-j5zhk\") pod \"console-6d9cbf9545-7w6ns\" (UID: \"6192a8a4-a880-4e26-a2ab-94510327602c\") " pod="openshift-console/console-6d9cbf9545-7w6ns" Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.946454 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-8l7v6"] Nov 27 07:08:03 crc kubenswrapper[4971]: I1127 07:08:03.951477 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6d9cbf9545-7w6ns" Nov 27 07:08:03 crc kubenswrapper[4971]: W1127 07:08:03.978014 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1c7d7b3d_899a_4feb_b8a2_9a906b0878cd.slice/crio-96571d0b6eb89c7de449ba4a61694077534c0a6f7c0ae079707ca9a81336d957 WatchSource:0}: Error finding container 96571d0b6eb89c7de449ba4a61694077534c0a6f7c0ae079707ca9a81336d957: Status 404 returned error can't find the container with id 96571d0b6eb89c7de449ba4a61694077534c0a6f7c0ae079707ca9a81336d957 Nov 27 07:08:04 crc kubenswrapper[4971]: I1127 07:08:04.131062 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-6d9cbf9545-7w6ns"] Nov 27 07:08:04 crc kubenswrapper[4971]: W1127 07:08:04.137679 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6192a8a4_a880_4e26_a2ab_94510327602c.slice/crio-c7a83ce88a87ca368b3bdd7e88484fc9225f7061bdef3cf8f0dd5284c7f9637b WatchSource:0}: Error finding container c7a83ce88a87ca368b3bdd7e88484fc9225f7061bdef3cf8f0dd5284c7f9637b: Status 404 returned error can't find the container with id c7a83ce88a87ca368b3bdd7e88484fc9225f7061bdef3cf8f0dd5284c7f9637b Nov 27 07:08:04 crc kubenswrapper[4971]: I1127 07:08:04.270617 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-8l7v6" event={"ID":"1c7d7b3d-899a-4feb-b8a2-9a906b0878cd","Type":"ContainerStarted","Data":"96571d0b6eb89c7de449ba4a61694077534c0a6f7c0ae079707ca9a81336d957"} Nov 27 07:08:04 crc kubenswrapper[4971]: I1127 07:08:04.271562 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-ttwlt" event={"ID":"89b39e22-730b-4f81-a85e-b80ec9e7f19a","Type":"ContainerStarted","Data":"896c74259fc831a6f91eddb2bcfa091a6aa761d24eb5c0f73824b62972d8123e"} Nov 27 07:08:04 crc kubenswrapper[4971]: I1127 07:08:04.272556 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-9kfld" event={"ID":"c63c6fa0-dedb-4b00-b001-da4dcfcd8058","Type":"ContainerStarted","Data":"3a7a4d69c7ccec95fdab5778785b100d8c5b321e563361a8e521e7342a8cbc9a"} Nov 27 07:08:04 crc kubenswrapper[4971]: I1127 07:08:04.273337 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-npfl8" event={"ID":"d83969e1-c2e8-40a2-b5a6-2226f3876a85","Type":"ContainerStarted","Data":"55a5f2637585f7aba09ad6c206b774bfbfadffe4a627503b9f29814f3b693577"} Nov 27 07:08:04 crc kubenswrapper[4971]: I1127 07:08:04.274228 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-6d9cbf9545-7w6ns" event={"ID":"6192a8a4-a880-4e26-a2ab-94510327602c","Type":"ContainerStarted","Data":"c7a83ce88a87ca368b3bdd7e88484fc9225f7061bdef3cf8f0dd5284c7f9637b"} Nov 27 07:08:05 crc kubenswrapper[4971]: I1127 07:08:05.281568 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-6d9cbf9545-7w6ns" event={"ID":"6192a8a4-a880-4e26-a2ab-94510327602c","Type":"ContainerStarted","Data":"907f682bcac503e45e433293a4898a48766a413faedb5706e1503a2cbf760cd9"} Nov 27 07:08:05 crc kubenswrapper[4971]: I1127 07:08:05.310006 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-6d9cbf9545-7w6ns" podStartSLOduration=2.309985176 podStartE2EDuration="2.309985176s" podCreationTimestamp="2025-11-27 07:08:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:08:05.301199558 +0000 UTC m=+923.493243476" watchObservedRunningTime="2025-11-27 07:08:05.309985176 +0000 UTC m=+923.502029084" Nov 27 07:08:08 crc kubenswrapper[4971]: I1127 07:08:08.298942 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-8l7v6" event={"ID":"1c7d7b3d-899a-4feb-b8a2-9a906b0878cd","Type":"ContainerStarted","Data":"40d0d159a97d8e0e1595583a087e13f7f8790f2ed918d3138fc816d7dca41edb"} Nov 27 07:08:08 crc kubenswrapper[4971]: I1127 07:08:08.301452 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-ttwlt" event={"ID":"89b39e22-730b-4f81-a85e-b80ec9e7f19a","Type":"ContainerStarted","Data":"ba1467f5ec04f6127e102eeed441b3d03b7b5025c58d9048f1da7bb470722190"} Nov 27 07:08:08 crc kubenswrapper[4971]: I1127 07:08:08.302788 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-ttwlt" Nov 27 07:08:08 crc kubenswrapper[4971]: I1127 07:08:08.304617 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-9kfld" event={"ID":"c63c6fa0-dedb-4b00-b001-da4dcfcd8058","Type":"ContainerStarted","Data":"cd1b9c19e6b2d821741282548ea918278e422f20d468ae60f6bd2a8bc04d77a0"} Nov 27 07:08:08 crc kubenswrapper[4971]: I1127 07:08:08.305309 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-9kfld" Nov 27 07:08:08 crc kubenswrapper[4971]: I1127 07:08:08.306747 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-npfl8" event={"ID":"d83969e1-c2e8-40a2-b5a6-2226f3876a85","Type":"ContainerStarted","Data":"f95c9a524e7f584927dbe61d45adc04e9a8b0cd87f429893be9c208cd918a832"} Nov 27 07:08:08 crc kubenswrapper[4971]: I1127 07:08:08.320082 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-8l7v6" podStartSLOduration=1.56300762 podStartE2EDuration="5.320060234s" podCreationTimestamp="2025-11-27 07:08:03 +0000 UTC" firstStartedPulling="2025-11-27 07:08:03.980341468 +0000 UTC m=+922.172385386" lastFinishedPulling="2025-11-27 07:08:07.737394082 +0000 UTC m=+925.929438000" observedRunningTime="2025-11-27 07:08:08.316029546 +0000 UTC m=+926.508073454" watchObservedRunningTime="2025-11-27 07:08:08.320060234 +0000 UTC m=+926.512104152" Nov 27 07:08:08 crc kubenswrapper[4971]: I1127 07:08:08.359505 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-ttwlt" podStartSLOduration=1.5227866319999999 podStartE2EDuration="5.359476909s" podCreationTimestamp="2025-11-27 07:08:03 +0000 UTC" firstStartedPulling="2025-11-27 07:08:03.900688905 +0000 UTC m=+922.092732823" lastFinishedPulling="2025-11-27 07:08:07.737379182 +0000 UTC m=+925.929423100" observedRunningTime="2025-11-27 07:08:08.340176304 +0000 UTC m=+926.532220242" watchObservedRunningTime="2025-11-27 07:08:08.359476909 +0000 UTC m=+926.551520827" Nov 27 07:08:08 crc kubenswrapper[4971]: I1127 07:08:08.362585 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-9kfld" podStartSLOduration=1.268969729 podStartE2EDuration="5.362574529s" podCreationTimestamp="2025-11-27 07:08:03 +0000 UTC" firstStartedPulling="2025-11-27 07:08:03.666738104 +0000 UTC m=+921.858782022" lastFinishedPulling="2025-11-27 07:08:07.760342904 +0000 UTC m=+925.952386822" observedRunningTime="2025-11-27 07:08:08.358795479 +0000 UTC m=+926.550839407" watchObservedRunningTime="2025-11-27 07:08:08.362574529 +0000 UTC m=+926.554618447" Nov 27 07:08:11 crc kubenswrapper[4971]: I1127 07:08:11.323817 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-npfl8" event={"ID":"d83969e1-c2e8-40a2-b5a6-2226f3876a85","Type":"ContainerStarted","Data":"586193d4242c5015be88fb1442657f998e5cae04c4cb046323e5d22eba59e5de"} Nov 27 07:08:11 crc kubenswrapper[4971]: I1127 07:08:11.697749 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-99wp6"] Nov 27 07:08:11 crc kubenswrapper[4971]: I1127 07:08:11.708630 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-99wp6" Nov 27 07:08:11 crc kubenswrapper[4971]: I1127 07:08:11.722738 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-99wp6"] Nov 27 07:08:11 crc kubenswrapper[4971]: I1127 07:08:11.830411 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa5233d3-61bb-4563-b052-499f9ff3ea48-utilities\") pod \"certified-operators-99wp6\" (UID: \"fa5233d3-61bb-4563-b052-499f9ff3ea48\") " pod="openshift-marketplace/certified-operators-99wp6" Nov 27 07:08:11 crc kubenswrapper[4971]: I1127 07:08:11.830513 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa5233d3-61bb-4563-b052-499f9ff3ea48-catalog-content\") pod \"certified-operators-99wp6\" (UID: \"fa5233d3-61bb-4563-b052-499f9ff3ea48\") " pod="openshift-marketplace/certified-operators-99wp6" Nov 27 07:08:11 crc kubenswrapper[4971]: I1127 07:08:11.830569 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wdf5c\" (UniqueName: \"kubernetes.io/projected/fa5233d3-61bb-4563-b052-499f9ff3ea48-kube-api-access-wdf5c\") pod \"certified-operators-99wp6\" (UID: \"fa5233d3-61bb-4563-b052-499f9ff3ea48\") " pod="openshift-marketplace/certified-operators-99wp6" Nov 27 07:08:11 crc kubenswrapper[4971]: I1127 07:08:11.932057 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa5233d3-61bb-4563-b052-499f9ff3ea48-utilities\") pod \"certified-operators-99wp6\" (UID: \"fa5233d3-61bb-4563-b052-499f9ff3ea48\") " pod="openshift-marketplace/certified-operators-99wp6" Nov 27 07:08:11 crc kubenswrapper[4971]: I1127 07:08:11.932140 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa5233d3-61bb-4563-b052-499f9ff3ea48-catalog-content\") pod \"certified-operators-99wp6\" (UID: \"fa5233d3-61bb-4563-b052-499f9ff3ea48\") " pod="openshift-marketplace/certified-operators-99wp6" Nov 27 07:08:11 crc kubenswrapper[4971]: I1127 07:08:11.932167 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wdf5c\" (UniqueName: \"kubernetes.io/projected/fa5233d3-61bb-4563-b052-499f9ff3ea48-kube-api-access-wdf5c\") pod \"certified-operators-99wp6\" (UID: \"fa5233d3-61bb-4563-b052-499f9ff3ea48\") " pod="openshift-marketplace/certified-operators-99wp6" Nov 27 07:08:11 crc kubenswrapper[4971]: I1127 07:08:11.932688 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa5233d3-61bb-4563-b052-499f9ff3ea48-utilities\") pod \"certified-operators-99wp6\" (UID: \"fa5233d3-61bb-4563-b052-499f9ff3ea48\") " pod="openshift-marketplace/certified-operators-99wp6" Nov 27 07:08:11 crc kubenswrapper[4971]: I1127 07:08:11.932811 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa5233d3-61bb-4563-b052-499f9ff3ea48-catalog-content\") pod \"certified-operators-99wp6\" (UID: \"fa5233d3-61bb-4563-b052-499f9ff3ea48\") " pod="openshift-marketplace/certified-operators-99wp6" Nov 27 07:08:11 crc kubenswrapper[4971]: I1127 07:08:11.952029 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wdf5c\" (UniqueName: \"kubernetes.io/projected/fa5233d3-61bb-4563-b052-499f9ff3ea48-kube-api-access-wdf5c\") pod \"certified-operators-99wp6\" (UID: \"fa5233d3-61bb-4563-b052-499f9ff3ea48\") " pod="openshift-marketplace/certified-operators-99wp6" Nov 27 07:08:12 crc kubenswrapper[4971]: I1127 07:08:12.035486 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-99wp6" Nov 27 07:08:12 crc kubenswrapper[4971]: I1127 07:08:12.285899 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-99wp6"] Nov 27 07:08:12 crc kubenswrapper[4971]: W1127 07:08:12.291350 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfa5233d3_61bb_4563_b052_499f9ff3ea48.slice/crio-6bd92b6412ec21f1b355f58ab813e63c1452bde9661613159b935473993fee05 WatchSource:0}: Error finding container 6bd92b6412ec21f1b355f58ab813e63c1452bde9661613159b935473993fee05: Status 404 returned error can't find the container with id 6bd92b6412ec21f1b355f58ab813e63c1452bde9661613159b935473993fee05 Nov 27 07:08:12 crc kubenswrapper[4971]: I1127 07:08:12.331860 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-99wp6" event={"ID":"fa5233d3-61bb-4563-b052-499f9ff3ea48","Type":"ContainerStarted","Data":"6bd92b6412ec21f1b355f58ab813e63c1452bde9661613159b935473993fee05"} Nov 27 07:08:12 crc kubenswrapper[4971]: I1127 07:08:12.349173 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-npfl8" podStartSLOduration=2.06391969 podStartE2EDuration="9.349150595s" podCreationTimestamp="2025-11-27 07:08:03 +0000 UTC" firstStartedPulling="2025-11-27 07:08:03.85814271 +0000 UTC m=+922.050186628" lastFinishedPulling="2025-11-27 07:08:11.143373615 +0000 UTC m=+929.335417533" observedRunningTime="2025-11-27 07:08:12.348125735 +0000 UTC m=+930.540169673" watchObservedRunningTime="2025-11-27 07:08:12.349150595 +0000 UTC m=+930.541194513" Nov 27 07:08:13 crc kubenswrapper[4971]: I1127 07:08:13.339807 4971 generic.go:334] "Generic (PLEG): container finished" podID="fa5233d3-61bb-4563-b052-499f9ff3ea48" containerID="4d04446432a9c5ae191215e07c96112bff073213903f23c03da760be1e086665" exitCode=0 Nov 27 07:08:13 crc kubenswrapper[4971]: I1127 07:08:13.339928 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-99wp6" event={"ID":"fa5233d3-61bb-4563-b052-499f9ff3ea48","Type":"ContainerDied","Data":"4d04446432a9c5ae191215e07c96112bff073213903f23c03da760be1e086665"} Nov 27 07:08:13 crc kubenswrapper[4971]: I1127 07:08:13.632886 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-9kfld" Nov 27 07:08:13 crc kubenswrapper[4971]: I1127 07:08:13.952101 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-6d9cbf9545-7w6ns" Nov 27 07:08:13 crc kubenswrapper[4971]: I1127 07:08:13.952196 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-6d9cbf9545-7w6ns" Nov 27 07:08:13 crc kubenswrapper[4971]: I1127 07:08:13.960089 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-6d9cbf9545-7w6ns" Nov 27 07:08:14 crc kubenswrapper[4971]: I1127 07:08:14.347603 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-99wp6" event={"ID":"fa5233d3-61bb-4563-b052-499f9ff3ea48","Type":"ContainerStarted","Data":"eebf4f990fa001f98968dc6fbc20a53dbb9a290359207ee37a52493b8e443140"} Nov 27 07:08:14 crc kubenswrapper[4971]: I1127 07:08:14.351072 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-6d9cbf9545-7w6ns" Nov 27 07:08:14 crc kubenswrapper[4971]: I1127 07:08:14.428358 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-blqcq"] Nov 27 07:08:15 crc kubenswrapper[4971]: I1127 07:08:15.355147 4971 generic.go:334] "Generic (PLEG): container finished" podID="fa5233d3-61bb-4563-b052-499f9ff3ea48" containerID="eebf4f990fa001f98968dc6fbc20a53dbb9a290359207ee37a52493b8e443140" exitCode=0 Nov 27 07:08:15 crc kubenswrapper[4971]: I1127 07:08:15.355195 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-99wp6" event={"ID":"fa5233d3-61bb-4563-b052-499f9ff3ea48","Type":"ContainerDied","Data":"eebf4f990fa001f98968dc6fbc20a53dbb9a290359207ee37a52493b8e443140"} Nov 27 07:08:16 crc kubenswrapper[4971]: I1127 07:08:16.364236 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-99wp6" event={"ID":"fa5233d3-61bb-4563-b052-499f9ff3ea48","Type":"ContainerStarted","Data":"85d2cbbf2007c7a59418ed1f9d4c5ede1b4aa7ee69a23a51ef149b85c35011ac"} Nov 27 07:08:16 crc kubenswrapper[4971]: I1127 07:08:16.382469 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-99wp6" podStartSLOduration=2.823412238 podStartE2EDuration="5.382450919s" podCreationTimestamp="2025-11-27 07:08:11 +0000 UTC" firstStartedPulling="2025-11-27 07:08:13.341634659 +0000 UTC m=+931.533678577" lastFinishedPulling="2025-11-27 07:08:15.90067334 +0000 UTC m=+934.092717258" observedRunningTime="2025-11-27 07:08:16.379977076 +0000 UTC m=+934.572020994" watchObservedRunningTime="2025-11-27 07:08:16.382450919 +0000 UTC m=+934.574494837" Nov 27 07:08:22 crc kubenswrapper[4971]: I1127 07:08:22.035752 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-99wp6" Nov 27 07:08:22 crc kubenswrapper[4971]: I1127 07:08:22.036796 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-99wp6" Nov 27 07:08:22 crc kubenswrapper[4971]: I1127 07:08:22.078912 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-99wp6" Nov 27 07:08:22 crc kubenswrapper[4971]: I1127 07:08:22.450879 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-99wp6" Nov 27 07:08:22 crc kubenswrapper[4971]: I1127 07:08:22.503708 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-99wp6"] Nov 27 07:08:23 crc kubenswrapper[4971]: I1127 07:08:23.640509 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-ttwlt" Nov 27 07:08:24 crc kubenswrapper[4971]: I1127 07:08:24.416423 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-99wp6" podUID="fa5233d3-61bb-4563-b052-499f9ff3ea48" containerName="registry-server" containerID="cri-o://85d2cbbf2007c7a59418ed1f9d4c5ede1b4aa7ee69a23a51ef149b85c35011ac" gracePeriod=2 Nov 27 07:08:24 crc kubenswrapper[4971]: I1127 07:08:24.759461 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-99wp6" Nov 27 07:08:24 crc kubenswrapper[4971]: I1127 07:08:24.925258 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa5233d3-61bb-4563-b052-499f9ff3ea48-catalog-content\") pod \"fa5233d3-61bb-4563-b052-499f9ff3ea48\" (UID: \"fa5233d3-61bb-4563-b052-499f9ff3ea48\") " Nov 27 07:08:24 crc kubenswrapper[4971]: I1127 07:08:24.925358 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wdf5c\" (UniqueName: \"kubernetes.io/projected/fa5233d3-61bb-4563-b052-499f9ff3ea48-kube-api-access-wdf5c\") pod \"fa5233d3-61bb-4563-b052-499f9ff3ea48\" (UID: \"fa5233d3-61bb-4563-b052-499f9ff3ea48\") " Nov 27 07:08:24 crc kubenswrapper[4971]: I1127 07:08:24.925427 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa5233d3-61bb-4563-b052-499f9ff3ea48-utilities\") pod \"fa5233d3-61bb-4563-b052-499f9ff3ea48\" (UID: \"fa5233d3-61bb-4563-b052-499f9ff3ea48\") " Nov 27 07:08:24 crc kubenswrapper[4971]: I1127 07:08:24.926438 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa5233d3-61bb-4563-b052-499f9ff3ea48-utilities" (OuterVolumeSpecName: "utilities") pod "fa5233d3-61bb-4563-b052-499f9ff3ea48" (UID: "fa5233d3-61bb-4563-b052-499f9ff3ea48"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:08:24 crc kubenswrapper[4971]: I1127 07:08:24.932093 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa5233d3-61bb-4563-b052-499f9ff3ea48-kube-api-access-wdf5c" (OuterVolumeSpecName: "kube-api-access-wdf5c") pod "fa5233d3-61bb-4563-b052-499f9ff3ea48" (UID: "fa5233d3-61bb-4563-b052-499f9ff3ea48"). InnerVolumeSpecName "kube-api-access-wdf5c". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:08:24 crc kubenswrapper[4971]: I1127 07:08:24.993500 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa5233d3-61bb-4563-b052-499f9ff3ea48-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fa5233d3-61bb-4563-b052-499f9ff3ea48" (UID: "fa5233d3-61bb-4563-b052-499f9ff3ea48"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:08:25 crc kubenswrapper[4971]: I1127 07:08:25.027762 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa5233d3-61bb-4563-b052-499f9ff3ea48-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 07:08:25 crc kubenswrapper[4971]: I1127 07:08:25.027806 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wdf5c\" (UniqueName: \"kubernetes.io/projected/fa5233d3-61bb-4563-b052-499f9ff3ea48-kube-api-access-wdf5c\") on node \"crc\" DevicePath \"\"" Nov 27 07:08:25 crc kubenswrapper[4971]: I1127 07:08:25.027819 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa5233d3-61bb-4563-b052-499f9ff3ea48-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 07:08:25 crc kubenswrapper[4971]: I1127 07:08:25.428484 4971 generic.go:334] "Generic (PLEG): container finished" podID="fa5233d3-61bb-4563-b052-499f9ff3ea48" containerID="85d2cbbf2007c7a59418ed1f9d4c5ede1b4aa7ee69a23a51ef149b85c35011ac" exitCode=0 Nov 27 07:08:25 crc kubenswrapper[4971]: I1127 07:08:25.428924 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-99wp6" event={"ID":"fa5233d3-61bb-4563-b052-499f9ff3ea48","Type":"ContainerDied","Data":"85d2cbbf2007c7a59418ed1f9d4c5ede1b4aa7ee69a23a51ef149b85c35011ac"} Nov 27 07:08:25 crc kubenswrapper[4971]: I1127 07:08:25.428966 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-99wp6" event={"ID":"fa5233d3-61bb-4563-b052-499f9ff3ea48","Type":"ContainerDied","Data":"6bd92b6412ec21f1b355f58ab813e63c1452bde9661613159b935473993fee05"} Nov 27 07:08:25 crc kubenswrapper[4971]: I1127 07:08:25.428989 4971 scope.go:117] "RemoveContainer" containerID="85d2cbbf2007c7a59418ed1f9d4c5ede1b4aa7ee69a23a51ef149b85c35011ac" Nov 27 07:08:25 crc kubenswrapper[4971]: I1127 07:08:25.429188 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-99wp6" Nov 27 07:08:25 crc kubenswrapper[4971]: I1127 07:08:25.464630 4971 scope.go:117] "RemoveContainer" containerID="eebf4f990fa001f98968dc6fbc20a53dbb9a290359207ee37a52493b8e443140" Nov 27 07:08:25 crc kubenswrapper[4971]: I1127 07:08:25.470503 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-99wp6"] Nov 27 07:08:25 crc kubenswrapper[4971]: I1127 07:08:25.474274 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-99wp6"] Nov 27 07:08:25 crc kubenswrapper[4971]: I1127 07:08:25.484408 4971 scope.go:117] "RemoveContainer" containerID="4d04446432a9c5ae191215e07c96112bff073213903f23c03da760be1e086665" Nov 27 07:08:25 crc kubenswrapper[4971]: I1127 07:08:25.503076 4971 scope.go:117] "RemoveContainer" containerID="85d2cbbf2007c7a59418ed1f9d4c5ede1b4aa7ee69a23a51ef149b85c35011ac" Nov 27 07:08:25 crc kubenswrapper[4971]: E1127 07:08:25.503665 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85d2cbbf2007c7a59418ed1f9d4c5ede1b4aa7ee69a23a51ef149b85c35011ac\": container with ID starting with 85d2cbbf2007c7a59418ed1f9d4c5ede1b4aa7ee69a23a51ef149b85c35011ac not found: ID does not exist" containerID="85d2cbbf2007c7a59418ed1f9d4c5ede1b4aa7ee69a23a51ef149b85c35011ac" Nov 27 07:08:25 crc kubenswrapper[4971]: I1127 07:08:25.503712 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85d2cbbf2007c7a59418ed1f9d4c5ede1b4aa7ee69a23a51ef149b85c35011ac"} err="failed to get container status \"85d2cbbf2007c7a59418ed1f9d4c5ede1b4aa7ee69a23a51ef149b85c35011ac\": rpc error: code = NotFound desc = could not find container \"85d2cbbf2007c7a59418ed1f9d4c5ede1b4aa7ee69a23a51ef149b85c35011ac\": container with ID starting with 85d2cbbf2007c7a59418ed1f9d4c5ede1b4aa7ee69a23a51ef149b85c35011ac not found: ID does not exist" Nov 27 07:08:25 crc kubenswrapper[4971]: I1127 07:08:25.503754 4971 scope.go:117] "RemoveContainer" containerID="eebf4f990fa001f98968dc6fbc20a53dbb9a290359207ee37a52493b8e443140" Nov 27 07:08:25 crc kubenswrapper[4971]: E1127 07:08:25.504326 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eebf4f990fa001f98968dc6fbc20a53dbb9a290359207ee37a52493b8e443140\": container with ID starting with eebf4f990fa001f98968dc6fbc20a53dbb9a290359207ee37a52493b8e443140 not found: ID does not exist" containerID="eebf4f990fa001f98968dc6fbc20a53dbb9a290359207ee37a52493b8e443140" Nov 27 07:08:25 crc kubenswrapper[4971]: I1127 07:08:25.504377 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eebf4f990fa001f98968dc6fbc20a53dbb9a290359207ee37a52493b8e443140"} err="failed to get container status \"eebf4f990fa001f98968dc6fbc20a53dbb9a290359207ee37a52493b8e443140\": rpc error: code = NotFound desc = could not find container \"eebf4f990fa001f98968dc6fbc20a53dbb9a290359207ee37a52493b8e443140\": container with ID starting with eebf4f990fa001f98968dc6fbc20a53dbb9a290359207ee37a52493b8e443140 not found: ID does not exist" Nov 27 07:08:25 crc kubenswrapper[4971]: I1127 07:08:25.504412 4971 scope.go:117] "RemoveContainer" containerID="4d04446432a9c5ae191215e07c96112bff073213903f23c03da760be1e086665" Nov 27 07:08:25 crc kubenswrapper[4971]: E1127 07:08:25.504825 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d04446432a9c5ae191215e07c96112bff073213903f23c03da760be1e086665\": container with ID starting with 4d04446432a9c5ae191215e07c96112bff073213903f23c03da760be1e086665 not found: ID does not exist" containerID="4d04446432a9c5ae191215e07c96112bff073213903f23c03da760be1e086665" Nov 27 07:08:25 crc kubenswrapper[4971]: I1127 07:08:25.504895 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d04446432a9c5ae191215e07c96112bff073213903f23c03da760be1e086665"} err="failed to get container status \"4d04446432a9c5ae191215e07c96112bff073213903f23c03da760be1e086665\": rpc error: code = NotFound desc = could not find container \"4d04446432a9c5ae191215e07c96112bff073213903f23c03da760be1e086665\": container with ID starting with 4d04446432a9c5ae191215e07c96112bff073213903f23c03da760be1e086665 not found: ID does not exist" Nov 27 07:08:26 crc kubenswrapper[4971]: I1127 07:08:26.413640 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 07:08:26 crc kubenswrapper[4971]: I1127 07:08:26.413710 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 07:08:26 crc kubenswrapper[4971]: I1127 07:08:26.559652 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa5233d3-61bb-4563-b052-499f9ff3ea48" path="/var/lib/kubelet/pods/fa5233d3-61bb-4563-b052-499f9ff3ea48/volumes" Nov 27 07:08:38 crc kubenswrapper[4971]: I1127 07:08:38.952628 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834hblm"] Nov 27 07:08:38 crc kubenswrapper[4971]: E1127 07:08:38.954682 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa5233d3-61bb-4563-b052-499f9ff3ea48" containerName="extract-content" Nov 27 07:08:38 crc kubenswrapper[4971]: I1127 07:08:38.954765 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa5233d3-61bb-4563-b052-499f9ff3ea48" containerName="extract-content" Nov 27 07:08:38 crc kubenswrapper[4971]: E1127 07:08:38.954825 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa5233d3-61bb-4563-b052-499f9ff3ea48" containerName="extract-utilities" Nov 27 07:08:38 crc kubenswrapper[4971]: I1127 07:08:38.954877 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa5233d3-61bb-4563-b052-499f9ff3ea48" containerName="extract-utilities" Nov 27 07:08:38 crc kubenswrapper[4971]: E1127 07:08:38.954945 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa5233d3-61bb-4563-b052-499f9ff3ea48" containerName="registry-server" Nov 27 07:08:38 crc kubenswrapper[4971]: I1127 07:08:38.954994 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa5233d3-61bb-4563-b052-499f9ff3ea48" containerName="registry-server" Nov 27 07:08:38 crc kubenswrapper[4971]: I1127 07:08:38.955154 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa5233d3-61bb-4563-b052-499f9ff3ea48" containerName="registry-server" Nov 27 07:08:38 crc kubenswrapper[4971]: I1127 07:08:38.956120 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834hblm" Nov 27 07:08:38 crc kubenswrapper[4971]: I1127 07:08:38.959156 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Nov 27 07:08:38 crc kubenswrapper[4971]: I1127 07:08:38.970489 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834hblm"] Nov 27 07:08:39 crc kubenswrapper[4971]: I1127 07:08:39.054497 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/39c338ba-f061-448c-8f8d-0699d5ea38ac-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834hblm\" (UID: \"39c338ba-f061-448c-8f8d-0699d5ea38ac\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834hblm" Nov 27 07:08:39 crc kubenswrapper[4971]: I1127 07:08:39.054564 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/39c338ba-f061-448c-8f8d-0699d5ea38ac-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834hblm\" (UID: \"39c338ba-f061-448c-8f8d-0699d5ea38ac\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834hblm" Nov 27 07:08:39 crc kubenswrapper[4971]: I1127 07:08:39.054723 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pq2h5\" (UniqueName: \"kubernetes.io/projected/39c338ba-f061-448c-8f8d-0699d5ea38ac-kube-api-access-pq2h5\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834hblm\" (UID: \"39c338ba-f061-448c-8f8d-0699d5ea38ac\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834hblm" Nov 27 07:08:39 crc kubenswrapper[4971]: I1127 07:08:39.156200 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/39c338ba-f061-448c-8f8d-0699d5ea38ac-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834hblm\" (UID: \"39c338ba-f061-448c-8f8d-0699d5ea38ac\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834hblm" Nov 27 07:08:39 crc kubenswrapper[4971]: I1127 07:08:39.156319 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pq2h5\" (UniqueName: \"kubernetes.io/projected/39c338ba-f061-448c-8f8d-0699d5ea38ac-kube-api-access-pq2h5\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834hblm\" (UID: \"39c338ba-f061-448c-8f8d-0699d5ea38ac\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834hblm" Nov 27 07:08:39 crc kubenswrapper[4971]: I1127 07:08:39.156357 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/39c338ba-f061-448c-8f8d-0699d5ea38ac-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834hblm\" (UID: \"39c338ba-f061-448c-8f8d-0699d5ea38ac\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834hblm" Nov 27 07:08:39 crc kubenswrapper[4971]: I1127 07:08:39.156879 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/39c338ba-f061-448c-8f8d-0699d5ea38ac-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834hblm\" (UID: \"39c338ba-f061-448c-8f8d-0699d5ea38ac\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834hblm" Nov 27 07:08:39 crc kubenswrapper[4971]: I1127 07:08:39.156909 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/39c338ba-f061-448c-8f8d-0699d5ea38ac-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834hblm\" (UID: \"39c338ba-f061-448c-8f8d-0699d5ea38ac\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834hblm" Nov 27 07:08:39 crc kubenswrapper[4971]: I1127 07:08:39.178226 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pq2h5\" (UniqueName: \"kubernetes.io/projected/39c338ba-f061-448c-8f8d-0699d5ea38ac-kube-api-access-pq2h5\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834hblm\" (UID: \"39c338ba-f061-448c-8f8d-0699d5ea38ac\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834hblm" Nov 27 07:08:39 crc kubenswrapper[4971]: I1127 07:08:39.273104 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834hblm" Nov 27 07:08:39 crc kubenswrapper[4971]: I1127 07:08:39.470488 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-blqcq" podUID="b2038ca5-beb4-434e-81a7-16a67ad9382d" containerName="console" containerID="cri-o://f85c70c2928efa4403f20f7bddd4bd8272de9de70c00d6bb4dd59b51f02b83f7" gracePeriod=15 Nov 27 07:08:39 crc kubenswrapper[4971]: I1127 07:08:39.485686 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834hblm"] Nov 27 07:08:39 crc kubenswrapper[4971]: I1127 07:08:39.516581 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834hblm" event={"ID":"39c338ba-f061-448c-8f8d-0699d5ea38ac","Type":"ContainerStarted","Data":"210e96ab5d9f71f000cca3c4b174cb83f6d43e5edae9b76fab28ab58eb5b007a"} Nov 27 07:08:39 crc kubenswrapper[4971]: I1127 07:08:39.865517 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-blqcq_b2038ca5-beb4-434e-81a7-16a67ad9382d/console/0.log" Nov 27 07:08:39 crc kubenswrapper[4971]: I1127 07:08:39.865930 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-blqcq" Nov 27 07:08:39 crc kubenswrapper[4971]: I1127 07:08:39.968266 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b2038ca5-beb4-434e-81a7-16a67ad9382d-console-serving-cert\") pod \"b2038ca5-beb4-434e-81a7-16a67ad9382d\" (UID: \"b2038ca5-beb4-434e-81a7-16a67ad9382d\") " Nov 27 07:08:39 crc kubenswrapper[4971]: I1127 07:08:39.968395 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b2038ca5-beb4-434e-81a7-16a67ad9382d-console-oauth-config\") pod \"b2038ca5-beb4-434e-81a7-16a67ad9382d\" (UID: \"b2038ca5-beb4-434e-81a7-16a67ad9382d\") " Nov 27 07:08:39 crc kubenswrapper[4971]: I1127 07:08:39.968417 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b2038ca5-beb4-434e-81a7-16a67ad9382d-trusted-ca-bundle\") pod \"b2038ca5-beb4-434e-81a7-16a67ad9382d\" (UID: \"b2038ca5-beb4-434e-81a7-16a67ad9382d\") " Nov 27 07:08:39 crc kubenswrapper[4971]: I1127 07:08:39.968449 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b2038ca5-beb4-434e-81a7-16a67ad9382d-oauth-serving-cert\") pod \"b2038ca5-beb4-434e-81a7-16a67ad9382d\" (UID: \"b2038ca5-beb4-434e-81a7-16a67ad9382d\") " Nov 27 07:08:39 crc kubenswrapper[4971]: I1127 07:08:39.968477 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b2038ca5-beb4-434e-81a7-16a67ad9382d-service-ca\") pod \"b2038ca5-beb4-434e-81a7-16a67ad9382d\" (UID: \"b2038ca5-beb4-434e-81a7-16a67ad9382d\") " Nov 27 07:08:39 crc kubenswrapper[4971]: I1127 07:08:39.968509 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b2038ca5-beb4-434e-81a7-16a67ad9382d-console-config\") pod \"b2038ca5-beb4-434e-81a7-16a67ad9382d\" (UID: \"b2038ca5-beb4-434e-81a7-16a67ad9382d\") " Nov 27 07:08:39 crc kubenswrapper[4971]: I1127 07:08:39.968586 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-75btm\" (UniqueName: \"kubernetes.io/projected/b2038ca5-beb4-434e-81a7-16a67ad9382d-kube-api-access-75btm\") pod \"b2038ca5-beb4-434e-81a7-16a67ad9382d\" (UID: \"b2038ca5-beb4-434e-81a7-16a67ad9382d\") " Nov 27 07:08:39 crc kubenswrapper[4971]: I1127 07:08:39.969328 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b2038ca5-beb4-434e-81a7-16a67ad9382d-console-config" (OuterVolumeSpecName: "console-config") pod "b2038ca5-beb4-434e-81a7-16a67ad9382d" (UID: "b2038ca5-beb4-434e-81a7-16a67ad9382d"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:08:39 crc kubenswrapper[4971]: I1127 07:08:39.969336 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b2038ca5-beb4-434e-81a7-16a67ad9382d-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "b2038ca5-beb4-434e-81a7-16a67ad9382d" (UID: "b2038ca5-beb4-434e-81a7-16a67ad9382d"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:08:39 crc kubenswrapper[4971]: I1127 07:08:39.969385 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b2038ca5-beb4-434e-81a7-16a67ad9382d-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "b2038ca5-beb4-434e-81a7-16a67ad9382d" (UID: "b2038ca5-beb4-434e-81a7-16a67ad9382d"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:08:39 crc kubenswrapper[4971]: I1127 07:08:39.969617 4971 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b2038ca5-beb4-434e-81a7-16a67ad9382d-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:08:39 crc kubenswrapper[4971]: I1127 07:08:39.969631 4971 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b2038ca5-beb4-434e-81a7-16a67ad9382d-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 27 07:08:39 crc kubenswrapper[4971]: I1127 07:08:39.969640 4971 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b2038ca5-beb4-434e-81a7-16a67ad9382d-console-config\") on node \"crc\" DevicePath \"\"" Nov 27 07:08:39 crc kubenswrapper[4971]: I1127 07:08:39.969880 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b2038ca5-beb4-434e-81a7-16a67ad9382d-service-ca" (OuterVolumeSpecName: "service-ca") pod "b2038ca5-beb4-434e-81a7-16a67ad9382d" (UID: "b2038ca5-beb4-434e-81a7-16a67ad9382d"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:08:39 crc kubenswrapper[4971]: I1127 07:08:39.974169 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2038ca5-beb4-434e-81a7-16a67ad9382d-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "b2038ca5-beb4-434e-81a7-16a67ad9382d" (UID: "b2038ca5-beb4-434e-81a7-16a67ad9382d"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:08:39 crc kubenswrapper[4971]: I1127 07:08:39.974275 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2038ca5-beb4-434e-81a7-16a67ad9382d-kube-api-access-75btm" (OuterVolumeSpecName: "kube-api-access-75btm") pod "b2038ca5-beb4-434e-81a7-16a67ad9382d" (UID: "b2038ca5-beb4-434e-81a7-16a67ad9382d"). InnerVolumeSpecName "kube-api-access-75btm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:08:39 crc kubenswrapper[4971]: I1127 07:08:39.975485 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2038ca5-beb4-434e-81a7-16a67ad9382d-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "b2038ca5-beb4-434e-81a7-16a67ad9382d" (UID: "b2038ca5-beb4-434e-81a7-16a67ad9382d"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:08:40 crc kubenswrapper[4971]: I1127 07:08:40.071094 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-75btm\" (UniqueName: \"kubernetes.io/projected/b2038ca5-beb4-434e-81a7-16a67ad9382d-kube-api-access-75btm\") on node \"crc\" DevicePath \"\"" Nov 27 07:08:40 crc kubenswrapper[4971]: I1127 07:08:40.071140 4971 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b2038ca5-beb4-434e-81a7-16a67ad9382d-console-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 27 07:08:40 crc kubenswrapper[4971]: I1127 07:08:40.071152 4971 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b2038ca5-beb4-434e-81a7-16a67ad9382d-console-oauth-config\") on node \"crc\" DevicePath \"\"" Nov 27 07:08:40 crc kubenswrapper[4971]: I1127 07:08:40.071164 4971 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b2038ca5-beb4-434e-81a7-16a67ad9382d-service-ca\") on node \"crc\" DevicePath \"\"" Nov 27 07:08:40 crc kubenswrapper[4971]: I1127 07:08:40.524699 4971 generic.go:334] "Generic (PLEG): container finished" podID="39c338ba-f061-448c-8f8d-0699d5ea38ac" containerID="f928e5a198c532da39c10efbc1a1946d0d3fafda7023578a2754098732b6bd4a" exitCode=0 Nov 27 07:08:40 crc kubenswrapper[4971]: I1127 07:08:40.524763 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834hblm" event={"ID":"39c338ba-f061-448c-8f8d-0699d5ea38ac","Type":"ContainerDied","Data":"f928e5a198c532da39c10efbc1a1946d0d3fafda7023578a2754098732b6bd4a"} Nov 27 07:08:40 crc kubenswrapper[4971]: I1127 07:08:40.527706 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-blqcq_b2038ca5-beb4-434e-81a7-16a67ad9382d/console/0.log" Nov 27 07:08:40 crc kubenswrapper[4971]: I1127 07:08:40.527753 4971 generic.go:334] "Generic (PLEG): container finished" podID="b2038ca5-beb4-434e-81a7-16a67ad9382d" containerID="f85c70c2928efa4403f20f7bddd4bd8272de9de70c00d6bb4dd59b51f02b83f7" exitCode=2 Nov 27 07:08:40 crc kubenswrapper[4971]: I1127 07:08:40.527806 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-blqcq" event={"ID":"b2038ca5-beb4-434e-81a7-16a67ad9382d","Type":"ContainerDied","Data":"f85c70c2928efa4403f20f7bddd4bd8272de9de70c00d6bb4dd59b51f02b83f7"} Nov 27 07:08:40 crc kubenswrapper[4971]: I1127 07:08:40.527822 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-blqcq" Nov 27 07:08:40 crc kubenswrapper[4971]: I1127 07:08:40.527863 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-blqcq" event={"ID":"b2038ca5-beb4-434e-81a7-16a67ad9382d","Type":"ContainerDied","Data":"7da05e0af60eef4024acf504cb088644335393c753db592c3cbb247548a97f44"} Nov 27 07:08:40 crc kubenswrapper[4971]: I1127 07:08:40.527889 4971 scope.go:117] "RemoveContainer" containerID="f85c70c2928efa4403f20f7bddd4bd8272de9de70c00d6bb4dd59b51f02b83f7" Nov 27 07:08:40 crc kubenswrapper[4971]: I1127 07:08:40.556655 4971 scope.go:117] "RemoveContainer" containerID="f85c70c2928efa4403f20f7bddd4bd8272de9de70c00d6bb4dd59b51f02b83f7" Nov 27 07:08:40 crc kubenswrapper[4971]: E1127 07:08:40.558005 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f85c70c2928efa4403f20f7bddd4bd8272de9de70c00d6bb4dd59b51f02b83f7\": container with ID starting with f85c70c2928efa4403f20f7bddd4bd8272de9de70c00d6bb4dd59b51f02b83f7 not found: ID does not exist" containerID="f85c70c2928efa4403f20f7bddd4bd8272de9de70c00d6bb4dd59b51f02b83f7" Nov 27 07:08:40 crc kubenswrapper[4971]: I1127 07:08:40.558052 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f85c70c2928efa4403f20f7bddd4bd8272de9de70c00d6bb4dd59b51f02b83f7"} err="failed to get container status \"f85c70c2928efa4403f20f7bddd4bd8272de9de70c00d6bb4dd59b51f02b83f7\": rpc error: code = NotFound desc = could not find container \"f85c70c2928efa4403f20f7bddd4bd8272de9de70c00d6bb4dd59b51f02b83f7\": container with ID starting with f85c70c2928efa4403f20f7bddd4bd8272de9de70c00d6bb4dd59b51f02b83f7 not found: ID does not exist" Nov 27 07:08:40 crc kubenswrapper[4971]: I1127 07:08:40.567632 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-blqcq"] Nov 27 07:08:40 crc kubenswrapper[4971]: I1127 07:08:40.571075 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-blqcq"] Nov 27 07:08:42 crc kubenswrapper[4971]: I1127 07:08:42.557603 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b2038ca5-beb4-434e-81a7-16a67ad9382d" path="/var/lib/kubelet/pods/b2038ca5-beb4-434e-81a7-16a67ad9382d/volumes" Nov 27 07:08:43 crc kubenswrapper[4971]: I1127 07:08:43.549313 4971 generic.go:334] "Generic (PLEG): container finished" podID="39c338ba-f061-448c-8f8d-0699d5ea38ac" containerID="8d28492611734678d645490136a164ffed32134888e2f5e5627036d881bae846" exitCode=0 Nov 27 07:08:43 crc kubenswrapper[4971]: I1127 07:08:43.549429 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834hblm" event={"ID":"39c338ba-f061-448c-8f8d-0699d5ea38ac","Type":"ContainerDied","Data":"8d28492611734678d645490136a164ffed32134888e2f5e5627036d881bae846"} Nov 27 07:08:44 crc kubenswrapper[4971]: I1127 07:08:44.557632 4971 generic.go:334] "Generic (PLEG): container finished" podID="39c338ba-f061-448c-8f8d-0699d5ea38ac" containerID="5754a346603825de6c1785be39309793bb7e1ad0258f729f80640a54ac25a3df" exitCode=0 Nov 27 07:08:44 crc kubenswrapper[4971]: I1127 07:08:44.558009 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834hblm" event={"ID":"39c338ba-f061-448c-8f8d-0699d5ea38ac","Type":"ContainerDied","Data":"5754a346603825de6c1785be39309793bb7e1ad0258f729f80640a54ac25a3df"} Nov 27 07:08:45 crc kubenswrapper[4971]: I1127 07:08:45.803038 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834hblm" Nov 27 07:08:45 crc kubenswrapper[4971]: I1127 07:08:45.881573 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/39c338ba-f061-448c-8f8d-0699d5ea38ac-util\") pod \"39c338ba-f061-448c-8f8d-0699d5ea38ac\" (UID: \"39c338ba-f061-448c-8f8d-0699d5ea38ac\") " Nov 27 07:08:45 crc kubenswrapper[4971]: I1127 07:08:45.881740 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/39c338ba-f061-448c-8f8d-0699d5ea38ac-bundle\") pod \"39c338ba-f061-448c-8f8d-0699d5ea38ac\" (UID: \"39c338ba-f061-448c-8f8d-0699d5ea38ac\") " Nov 27 07:08:45 crc kubenswrapper[4971]: I1127 07:08:45.881823 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pq2h5\" (UniqueName: \"kubernetes.io/projected/39c338ba-f061-448c-8f8d-0699d5ea38ac-kube-api-access-pq2h5\") pod \"39c338ba-f061-448c-8f8d-0699d5ea38ac\" (UID: \"39c338ba-f061-448c-8f8d-0699d5ea38ac\") " Nov 27 07:08:45 crc kubenswrapper[4971]: I1127 07:08:45.883132 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39c338ba-f061-448c-8f8d-0699d5ea38ac-bundle" (OuterVolumeSpecName: "bundle") pod "39c338ba-f061-448c-8f8d-0699d5ea38ac" (UID: "39c338ba-f061-448c-8f8d-0699d5ea38ac"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:08:45 crc kubenswrapper[4971]: I1127 07:08:45.888336 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39c338ba-f061-448c-8f8d-0699d5ea38ac-kube-api-access-pq2h5" (OuterVolumeSpecName: "kube-api-access-pq2h5") pod "39c338ba-f061-448c-8f8d-0699d5ea38ac" (UID: "39c338ba-f061-448c-8f8d-0699d5ea38ac"). InnerVolumeSpecName "kube-api-access-pq2h5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:08:45 crc kubenswrapper[4971]: I1127 07:08:45.895127 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39c338ba-f061-448c-8f8d-0699d5ea38ac-util" (OuterVolumeSpecName: "util") pod "39c338ba-f061-448c-8f8d-0699d5ea38ac" (UID: "39c338ba-f061-448c-8f8d-0699d5ea38ac"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:08:45 crc kubenswrapper[4971]: I1127 07:08:45.983794 4971 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/39c338ba-f061-448c-8f8d-0699d5ea38ac-util\") on node \"crc\" DevicePath \"\"" Nov 27 07:08:45 crc kubenswrapper[4971]: I1127 07:08:45.983852 4971 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/39c338ba-f061-448c-8f8d-0699d5ea38ac-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:08:45 crc kubenswrapper[4971]: I1127 07:08:45.983862 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pq2h5\" (UniqueName: \"kubernetes.io/projected/39c338ba-f061-448c-8f8d-0699d5ea38ac-kube-api-access-pq2h5\") on node \"crc\" DevicePath \"\"" Nov 27 07:08:46 crc kubenswrapper[4971]: I1127 07:08:46.570504 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834hblm" event={"ID":"39c338ba-f061-448c-8f8d-0699d5ea38ac","Type":"ContainerDied","Data":"210e96ab5d9f71f000cca3c4b174cb83f6d43e5edae9b76fab28ab58eb5b007a"} Nov 27 07:08:46 crc kubenswrapper[4971]: I1127 07:08:46.570559 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="210e96ab5d9f71f000cca3c4b174cb83f6d43e5edae9b76fab28ab58eb5b007a" Nov 27 07:08:46 crc kubenswrapper[4971]: I1127 07:08:46.570577 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834hblm" Nov 27 07:08:56 crc kubenswrapper[4971]: I1127 07:08:56.413317 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 07:08:56 crc kubenswrapper[4971]: I1127 07:08:56.413962 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 07:08:56 crc kubenswrapper[4971]: I1127 07:08:56.414582 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 07:08:56 crc kubenswrapper[4971]: I1127 07:08:56.415422 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4b10f4e9a06fcbdd9ab8460b0527e15bbc485600c51c9ce5ae3fa173ac3281f7"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 07:08:56 crc kubenswrapper[4971]: I1127 07:08:56.415488 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://4b10f4e9a06fcbdd9ab8460b0527e15bbc485600c51c9ce5ae3fa173ac3281f7" gracePeriod=600 Nov 27 07:08:56 crc kubenswrapper[4971]: I1127 07:08:56.634591 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="4b10f4e9a06fcbdd9ab8460b0527e15bbc485600c51c9ce5ae3fa173ac3281f7" exitCode=0 Nov 27 07:08:56 crc kubenswrapper[4971]: I1127 07:08:56.634654 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"4b10f4e9a06fcbdd9ab8460b0527e15bbc485600c51c9ce5ae3fa173ac3281f7"} Nov 27 07:08:56 crc kubenswrapper[4971]: I1127 07:08:56.634700 4971 scope.go:117] "RemoveContainer" containerID="74798b3e4b4c86a92a8e5dce55abcc5cc8f9c222cd75d6d3af831d281c1bb219" Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.062413 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-69f6b6f45b-zlxzt"] Nov 27 07:08:57 crc kubenswrapper[4971]: E1127 07:08:57.063121 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2038ca5-beb4-434e-81a7-16a67ad9382d" containerName="console" Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.063146 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2038ca5-beb4-434e-81a7-16a67ad9382d" containerName="console" Nov 27 07:08:57 crc kubenswrapper[4971]: E1127 07:08:57.063174 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39c338ba-f061-448c-8f8d-0699d5ea38ac" containerName="util" Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.063183 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="39c338ba-f061-448c-8f8d-0699d5ea38ac" containerName="util" Nov 27 07:08:57 crc kubenswrapper[4971]: E1127 07:08:57.063192 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39c338ba-f061-448c-8f8d-0699d5ea38ac" containerName="extract" Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.063202 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="39c338ba-f061-448c-8f8d-0699d5ea38ac" containerName="extract" Nov 27 07:08:57 crc kubenswrapper[4971]: E1127 07:08:57.063215 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39c338ba-f061-448c-8f8d-0699d5ea38ac" containerName="pull" Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.063223 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="39c338ba-f061-448c-8f8d-0699d5ea38ac" containerName="pull" Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.063350 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="39c338ba-f061-448c-8f8d-0699d5ea38ac" containerName="extract" Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.063371 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2038ca5-beb4-434e-81a7-16a67ad9382d" containerName="console" Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.063872 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-69f6b6f45b-zlxzt" Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.069286 4971 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.074322 4971 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.074666 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.077933 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.078005 4971 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-qfblr" Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.093347 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-69f6b6f45b-zlxzt"] Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.140207 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hb987\" (UniqueName: \"kubernetes.io/projected/58f5b77d-eff3-48fe-b104-c7d62bb97327-kube-api-access-hb987\") pod \"metallb-operator-controller-manager-69f6b6f45b-zlxzt\" (UID: \"58f5b77d-eff3-48fe-b104-c7d62bb97327\") " pod="metallb-system/metallb-operator-controller-manager-69f6b6f45b-zlxzt" Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.140282 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/58f5b77d-eff3-48fe-b104-c7d62bb97327-webhook-cert\") pod \"metallb-operator-controller-manager-69f6b6f45b-zlxzt\" (UID: \"58f5b77d-eff3-48fe-b104-c7d62bb97327\") " pod="metallb-system/metallb-operator-controller-manager-69f6b6f45b-zlxzt" Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.140470 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/58f5b77d-eff3-48fe-b104-c7d62bb97327-apiservice-cert\") pod \"metallb-operator-controller-manager-69f6b6f45b-zlxzt\" (UID: \"58f5b77d-eff3-48fe-b104-c7d62bb97327\") " pod="metallb-system/metallb-operator-controller-manager-69f6b6f45b-zlxzt" Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.241964 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hb987\" (UniqueName: \"kubernetes.io/projected/58f5b77d-eff3-48fe-b104-c7d62bb97327-kube-api-access-hb987\") pod \"metallb-operator-controller-manager-69f6b6f45b-zlxzt\" (UID: \"58f5b77d-eff3-48fe-b104-c7d62bb97327\") " pod="metallb-system/metallb-operator-controller-manager-69f6b6f45b-zlxzt" Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.242015 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/58f5b77d-eff3-48fe-b104-c7d62bb97327-webhook-cert\") pod \"metallb-operator-controller-manager-69f6b6f45b-zlxzt\" (UID: \"58f5b77d-eff3-48fe-b104-c7d62bb97327\") " pod="metallb-system/metallb-operator-controller-manager-69f6b6f45b-zlxzt" Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.242046 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/58f5b77d-eff3-48fe-b104-c7d62bb97327-apiservice-cert\") pod \"metallb-operator-controller-manager-69f6b6f45b-zlxzt\" (UID: \"58f5b77d-eff3-48fe-b104-c7d62bb97327\") " pod="metallb-system/metallb-operator-controller-manager-69f6b6f45b-zlxzt" Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.252464 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/58f5b77d-eff3-48fe-b104-c7d62bb97327-webhook-cert\") pod \"metallb-operator-controller-manager-69f6b6f45b-zlxzt\" (UID: \"58f5b77d-eff3-48fe-b104-c7d62bb97327\") " pod="metallb-system/metallb-operator-controller-manager-69f6b6f45b-zlxzt" Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.252479 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/58f5b77d-eff3-48fe-b104-c7d62bb97327-apiservice-cert\") pod \"metallb-operator-controller-manager-69f6b6f45b-zlxzt\" (UID: \"58f5b77d-eff3-48fe-b104-c7d62bb97327\") " pod="metallb-system/metallb-operator-controller-manager-69f6b6f45b-zlxzt" Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.293774 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hb987\" (UniqueName: \"kubernetes.io/projected/58f5b77d-eff3-48fe-b104-c7d62bb97327-kube-api-access-hb987\") pod \"metallb-operator-controller-manager-69f6b6f45b-zlxzt\" (UID: \"58f5b77d-eff3-48fe-b104-c7d62bb97327\") " pod="metallb-system/metallb-operator-controller-manager-69f6b6f45b-zlxzt" Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.327615 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-74448477b5-m75vr"] Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.328347 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-74448477b5-m75vr" Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.333889 4971 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.334418 4971 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-xz8p9" Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.336277 4971 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.343181 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1f8d9f92-08c5-4ae0-bcbb-8d78b73dd6b7-webhook-cert\") pod \"metallb-operator-webhook-server-74448477b5-m75vr\" (UID: \"1f8d9f92-08c5-4ae0-bcbb-8d78b73dd6b7\") " pod="metallb-system/metallb-operator-webhook-server-74448477b5-m75vr" Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.343243 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ffkkd\" (UniqueName: \"kubernetes.io/projected/1f8d9f92-08c5-4ae0-bcbb-8d78b73dd6b7-kube-api-access-ffkkd\") pod \"metallb-operator-webhook-server-74448477b5-m75vr\" (UID: \"1f8d9f92-08c5-4ae0-bcbb-8d78b73dd6b7\") " pod="metallb-system/metallb-operator-webhook-server-74448477b5-m75vr" Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.343317 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1f8d9f92-08c5-4ae0-bcbb-8d78b73dd6b7-apiservice-cert\") pod \"metallb-operator-webhook-server-74448477b5-m75vr\" (UID: \"1f8d9f92-08c5-4ae0-bcbb-8d78b73dd6b7\") " pod="metallb-system/metallb-operator-webhook-server-74448477b5-m75vr" Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.346820 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-74448477b5-m75vr"] Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.383038 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-69f6b6f45b-zlxzt" Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.445358 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1f8d9f92-08c5-4ae0-bcbb-8d78b73dd6b7-webhook-cert\") pod \"metallb-operator-webhook-server-74448477b5-m75vr\" (UID: \"1f8d9f92-08c5-4ae0-bcbb-8d78b73dd6b7\") " pod="metallb-system/metallb-operator-webhook-server-74448477b5-m75vr" Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.445780 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ffkkd\" (UniqueName: \"kubernetes.io/projected/1f8d9f92-08c5-4ae0-bcbb-8d78b73dd6b7-kube-api-access-ffkkd\") pod \"metallb-operator-webhook-server-74448477b5-m75vr\" (UID: \"1f8d9f92-08c5-4ae0-bcbb-8d78b73dd6b7\") " pod="metallb-system/metallb-operator-webhook-server-74448477b5-m75vr" Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.445846 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1f8d9f92-08c5-4ae0-bcbb-8d78b73dd6b7-apiservice-cert\") pod \"metallb-operator-webhook-server-74448477b5-m75vr\" (UID: \"1f8d9f92-08c5-4ae0-bcbb-8d78b73dd6b7\") " pod="metallb-system/metallb-operator-webhook-server-74448477b5-m75vr" Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.452361 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1f8d9f92-08c5-4ae0-bcbb-8d78b73dd6b7-apiservice-cert\") pod \"metallb-operator-webhook-server-74448477b5-m75vr\" (UID: \"1f8d9f92-08c5-4ae0-bcbb-8d78b73dd6b7\") " pod="metallb-system/metallb-operator-webhook-server-74448477b5-m75vr" Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.455148 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1f8d9f92-08c5-4ae0-bcbb-8d78b73dd6b7-webhook-cert\") pod \"metallb-operator-webhook-server-74448477b5-m75vr\" (UID: \"1f8d9f92-08c5-4ae0-bcbb-8d78b73dd6b7\") " pod="metallb-system/metallb-operator-webhook-server-74448477b5-m75vr" Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.511587 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ffkkd\" (UniqueName: \"kubernetes.io/projected/1f8d9f92-08c5-4ae0-bcbb-8d78b73dd6b7-kube-api-access-ffkkd\") pod \"metallb-operator-webhook-server-74448477b5-m75vr\" (UID: \"1f8d9f92-08c5-4ae0-bcbb-8d78b73dd6b7\") " pod="metallb-system/metallb-operator-webhook-server-74448477b5-m75vr" Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.647038 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-74448477b5-m75vr" Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.754617 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"30867a48d7e31eebb704511f63aa767a3965e3de0a82008135ef08d2702adfac"} Nov 27 07:08:57 crc kubenswrapper[4971]: I1127 07:08:57.809501 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-69f6b6f45b-zlxzt"] Nov 27 07:08:58 crc kubenswrapper[4971]: I1127 07:08:58.176737 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-74448477b5-m75vr"] Nov 27 07:08:58 crc kubenswrapper[4971]: W1127 07:08:58.188761 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1f8d9f92_08c5_4ae0_bcbb_8d78b73dd6b7.slice/crio-dbac5df36497d958e90590e84e4ee5c51b084c349cd627eb7b6e44c51aa69128 WatchSource:0}: Error finding container dbac5df36497d958e90590e84e4ee5c51b084c349cd627eb7b6e44c51aa69128: Status 404 returned error can't find the container with id dbac5df36497d958e90590e84e4ee5c51b084c349cd627eb7b6e44c51aa69128 Nov 27 07:08:58 crc kubenswrapper[4971]: I1127 07:08:58.764149 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-69f6b6f45b-zlxzt" event={"ID":"58f5b77d-eff3-48fe-b104-c7d62bb97327","Type":"ContainerStarted","Data":"1cebbc1d3edf4ee16f9cf6ee63a876cec637643ffc8d0ee10484bd5ed8611122"} Nov 27 07:08:58 crc kubenswrapper[4971]: I1127 07:08:58.765671 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-74448477b5-m75vr" event={"ID":"1f8d9f92-08c5-4ae0-bcbb-8d78b73dd6b7","Type":"ContainerStarted","Data":"dbac5df36497d958e90590e84e4ee5c51b084c349cd627eb7b6e44c51aa69128"} Nov 27 07:09:03 crc kubenswrapper[4971]: I1127 07:09:03.895573 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-69f6b6f45b-zlxzt" event={"ID":"58f5b77d-eff3-48fe-b104-c7d62bb97327","Type":"ContainerStarted","Data":"983c6a562dba45a4e897671be7f020aa5d94cbd826d01e968fa8de8442dba61a"} Nov 27 07:09:03 crc kubenswrapper[4971]: I1127 07:09:03.896334 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-69f6b6f45b-zlxzt" Nov 27 07:09:03 crc kubenswrapper[4971]: I1127 07:09:03.921382 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-69f6b6f45b-zlxzt" podStartSLOduration=3.30218221 podStartE2EDuration="6.921357517s" podCreationTimestamp="2025-11-27 07:08:57 +0000 UTC" firstStartedPulling="2025-11-27 07:08:57.851589482 +0000 UTC m=+976.043633400" lastFinishedPulling="2025-11-27 07:09:01.470764789 +0000 UTC m=+979.662808707" observedRunningTime="2025-11-27 07:09:03.918137692 +0000 UTC m=+982.110181610" watchObservedRunningTime="2025-11-27 07:09:03.921357517 +0000 UTC m=+982.113401435" Nov 27 07:09:05 crc kubenswrapper[4971]: I1127 07:09:05.910337 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-74448477b5-m75vr" event={"ID":"1f8d9f92-08c5-4ae0-bcbb-8d78b73dd6b7","Type":"ContainerStarted","Data":"61446b542ae1939828bed5eda1ccfe0cab365effaa1626c3dab44b1c98a4f2e8"} Nov 27 07:09:05 crc kubenswrapper[4971]: I1127 07:09:05.910889 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-74448477b5-m75vr" Nov 27 07:09:05 crc kubenswrapper[4971]: I1127 07:09:05.927088 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-74448477b5-m75vr" podStartSLOduration=2.028509346 podStartE2EDuration="8.927070154s" podCreationTimestamp="2025-11-27 07:08:57 +0000 UTC" firstStartedPulling="2025-11-27 07:08:58.191346087 +0000 UTC m=+976.383390005" lastFinishedPulling="2025-11-27 07:09:05.089906895 +0000 UTC m=+983.281950813" observedRunningTime="2025-11-27 07:09:05.925752885 +0000 UTC m=+984.117796823" watchObservedRunningTime="2025-11-27 07:09:05.927070154 +0000 UTC m=+984.119114082" Nov 27 07:09:17 crc kubenswrapper[4971]: I1127 07:09:17.674626 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-74448477b5-m75vr" Nov 27 07:09:37 crc kubenswrapper[4971]: I1127 07:09:37.386109 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-69f6b6f45b-zlxzt" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.166229 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-bpn6l"] Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.169218 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-bpn6l" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.173515 4971 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-fzjxv" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.173582 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.174156 4971 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.178178 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-8pnfh"] Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.180542 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-8pnfh" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.182761 4971 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.196866 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-8pnfh"] Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.272226 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-rpws9"] Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.276217 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-rpws9" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.278935 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.278980 4971 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-hr99w" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.279098 4971 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.279162 4971 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.291908 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-2tmkr"] Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.292822 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-2tmkr" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.298095 4971 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.318612 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/cce86d74-11b3-43c7-bdae-eb87fbeb457a-frr-conf\") pod \"frr-k8s-bpn6l\" (UID: \"cce86d74-11b3-43c7-bdae-eb87fbeb457a\") " pod="metallb-system/frr-k8s-bpn6l" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.318678 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/cce86d74-11b3-43c7-bdae-eb87fbeb457a-frr-startup\") pod \"frr-k8s-bpn6l\" (UID: \"cce86d74-11b3-43c7-bdae-eb87fbeb457a\") " pod="metallb-system/frr-k8s-bpn6l" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.318724 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/cce86d74-11b3-43c7-bdae-eb87fbeb457a-frr-sockets\") pod \"frr-k8s-bpn6l\" (UID: \"cce86d74-11b3-43c7-bdae-eb87fbeb457a\") " pod="metallb-system/frr-k8s-bpn6l" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.318780 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cce86d74-11b3-43c7-bdae-eb87fbeb457a-metrics-certs\") pod \"frr-k8s-bpn6l\" (UID: \"cce86d74-11b3-43c7-bdae-eb87fbeb457a\") " pod="metallb-system/frr-k8s-bpn6l" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.318922 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b677a99e-e567-4d2f-8c52-a92ec72d82a6-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-8pnfh\" (UID: \"b677a99e-e567-4d2f-8c52-a92ec72d82a6\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-8pnfh" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.319139 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nqv7m\" (UniqueName: \"kubernetes.io/projected/cce86d74-11b3-43c7-bdae-eb87fbeb457a-kube-api-access-nqv7m\") pod \"frr-k8s-bpn6l\" (UID: \"cce86d74-11b3-43c7-bdae-eb87fbeb457a\") " pod="metallb-system/frr-k8s-bpn6l" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.319350 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7dbp\" (UniqueName: \"kubernetes.io/projected/b677a99e-e567-4d2f-8c52-a92ec72d82a6-kube-api-access-h7dbp\") pod \"frr-k8s-webhook-server-7fcb986d4-8pnfh\" (UID: \"b677a99e-e567-4d2f-8c52-a92ec72d82a6\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-8pnfh" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.319382 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/cce86d74-11b3-43c7-bdae-eb87fbeb457a-reloader\") pod \"frr-k8s-bpn6l\" (UID: \"cce86d74-11b3-43c7-bdae-eb87fbeb457a\") " pod="metallb-system/frr-k8s-bpn6l" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.319483 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/cce86d74-11b3-43c7-bdae-eb87fbeb457a-metrics\") pod \"frr-k8s-bpn6l\" (UID: \"cce86d74-11b3-43c7-bdae-eb87fbeb457a\") " pod="metallb-system/frr-k8s-bpn6l" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.320492 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-2tmkr"] Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.421390 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/cce86d74-11b3-43c7-bdae-eb87fbeb457a-frr-sockets\") pod \"frr-k8s-bpn6l\" (UID: \"cce86d74-11b3-43c7-bdae-eb87fbeb457a\") " pod="metallb-system/frr-k8s-bpn6l" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.421446 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cce86d74-11b3-43c7-bdae-eb87fbeb457a-metrics-certs\") pod \"frr-k8s-bpn6l\" (UID: \"cce86d74-11b3-43c7-bdae-eb87fbeb457a\") " pod="metallb-system/frr-k8s-bpn6l" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.421475 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b677a99e-e567-4d2f-8c52-a92ec72d82a6-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-8pnfh\" (UID: \"b677a99e-e567-4d2f-8c52-a92ec72d82a6\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-8pnfh" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.421508 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/d39b11b5-58d7-4eb3-aea7-50e2f7f40c89-memberlist\") pod \"speaker-rpws9\" (UID: \"d39b11b5-58d7-4eb3-aea7-50e2f7f40c89\") " pod="metallb-system/speaker-rpws9" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.421557 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f64f6f7-7b67-4928-b73e-198643169a45-metrics-certs\") pod \"controller-f8648f98b-2tmkr\" (UID: \"6f64f6f7-7b67-4928-b73e-198643169a45\") " pod="metallb-system/controller-f8648f98b-2tmkr" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.421601 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nqv7m\" (UniqueName: \"kubernetes.io/projected/cce86d74-11b3-43c7-bdae-eb87fbeb457a-kube-api-access-nqv7m\") pod \"frr-k8s-bpn6l\" (UID: \"cce86d74-11b3-43c7-bdae-eb87fbeb457a\") " pod="metallb-system/frr-k8s-bpn6l" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.421628 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6f64f6f7-7b67-4928-b73e-198643169a45-cert\") pod \"controller-f8648f98b-2tmkr\" (UID: \"6f64f6f7-7b67-4928-b73e-198643169a45\") " pod="metallb-system/controller-f8648f98b-2tmkr" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.421649 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/d39b11b5-58d7-4eb3-aea7-50e2f7f40c89-metallb-excludel2\") pod \"speaker-rpws9\" (UID: \"d39b11b5-58d7-4eb3-aea7-50e2f7f40c89\") " pod="metallb-system/speaker-rpws9" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.421678 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d39b11b5-58d7-4eb3-aea7-50e2f7f40c89-metrics-certs\") pod \"speaker-rpws9\" (UID: \"d39b11b5-58d7-4eb3-aea7-50e2f7f40c89\") " pod="metallb-system/speaker-rpws9" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.421710 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5q7b\" (UniqueName: \"kubernetes.io/projected/6f64f6f7-7b67-4928-b73e-198643169a45-kube-api-access-b5q7b\") pod \"controller-f8648f98b-2tmkr\" (UID: \"6f64f6f7-7b67-4928-b73e-198643169a45\") " pod="metallb-system/controller-f8648f98b-2tmkr" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.421734 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7dbp\" (UniqueName: \"kubernetes.io/projected/b677a99e-e567-4d2f-8c52-a92ec72d82a6-kube-api-access-h7dbp\") pod \"frr-k8s-webhook-server-7fcb986d4-8pnfh\" (UID: \"b677a99e-e567-4d2f-8c52-a92ec72d82a6\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-8pnfh" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.421762 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/cce86d74-11b3-43c7-bdae-eb87fbeb457a-reloader\") pod \"frr-k8s-bpn6l\" (UID: \"cce86d74-11b3-43c7-bdae-eb87fbeb457a\") " pod="metallb-system/frr-k8s-bpn6l" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.421801 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/cce86d74-11b3-43c7-bdae-eb87fbeb457a-metrics\") pod \"frr-k8s-bpn6l\" (UID: \"cce86d74-11b3-43c7-bdae-eb87fbeb457a\") " pod="metallb-system/frr-k8s-bpn6l" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.421856 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h28cj\" (UniqueName: \"kubernetes.io/projected/d39b11b5-58d7-4eb3-aea7-50e2f7f40c89-kube-api-access-h28cj\") pod \"speaker-rpws9\" (UID: \"d39b11b5-58d7-4eb3-aea7-50e2f7f40c89\") " pod="metallb-system/speaker-rpws9" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.421882 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/cce86d74-11b3-43c7-bdae-eb87fbeb457a-frr-conf\") pod \"frr-k8s-bpn6l\" (UID: \"cce86d74-11b3-43c7-bdae-eb87fbeb457a\") " pod="metallb-system/frr-k8s-bpn6l" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.421904 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/cce86d74-11b3-43c7-bdae-eb87fbeb457a-frr-startup\") pod \"frr-k8s-bpn6l\" (UID: \"cce86d74-11b3-43c7-bdae-eb87fbeb457a\") " pod="metallb-system/frr-k8s-bpn6l" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.421998 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/cce86d74-11b3-43c7-bdae-eb87fbeb457a-frr-sockets\") pod \"frr-k8s-bpn6l\" (UID: \"cce86d74-11b3-43c7-bdae-eb87fbeb457a\") " pod="metallb-system/frr-k8s-bpn6l" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.423070 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/cce86d74-11b3-43c7-bdae-eb87fbeb457a-reloader\") pod \"frr-k8s-bpn6l\" (UID: \"cce86d74-11b3-43c7-bdae-eb87fbeb457a\") " pod="metallb-system/frr-k8s-bpn6l" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.423251 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/cce86d74-11b3-43c7-bdae-eb87fbeb457a-metrics\") pod \"frr-k8s-bpn6l\" (UID: \"cce86d74-11b3-43c7-bdae-eb87fbeb457a\") " pod="metallb-system/frr-k8s-bpn6l" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.423273 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/cce86d74-11b3-43c7-bdae-eb87fbeb457a-frr-conf\") pod \"frr-k8s-bpn6l\" (UID: \"cce86d74-11b3-43c7-bdae-eb87fbeb457a\") " pod="metallb-system/frr-k8s-bpn6l" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.423456 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/cce86d74-11b3-43c7-bdae-eb87fbeb457a-frr-startup\") pod \"frr-k8s-bpn6l\" (UID: \"cce86d74-11b3-43c7-bdae-eb87fbeb457a\") " pod="metallb-system/frr-k8s-bpn6l" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.430905 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b677a99e-e567-4d2f-8c52-a92ec72d82a6-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-8pnfh\" (UID: \"b677a99e-e567-4d2f-8c52-a92ec72d82a6\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-8pnfh" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.430926 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cce86d74-11b3-43c7-bdae-eb87fbeb457a-metrics-certs\") pod \"frr-k8s-bpn6l\" (UID: \"cce86d74-11b3-43c7-bdae-eb87fbeb457a\") " pod="metallb-system/frr-k8s-bpn6l" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.445379 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7dbp\" (UniqueName: \"kubernetes.io/projected/b677a99e-e567-4d2f-8c52-a92ec72d82a6-kube-api-access-h7dbp\") pod \"frr-k8s-webhook-server-7fcb986d4-8pnfh\" (UID: \"b677a99e-e567-4d2f-8c52-a92ec72d82a6\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-8pnfh" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.452498 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nqv7m\" (UniqueName: \"kubernetes.io/projected/cce86d74-11b3-43c7-bdae-eb87fbeb457a-kube-api-access-nqv7m\") pod \"frr-k8s-bpn6l\" (UID: \"cce86d74-11b3-43c7-bdae-eb87fbeb457a\") " pod="metallb-system/frr-k8s-bpn6l" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.497297 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-bpn6l" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.506277 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-8pnfh" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.522958 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h28cj\" (UniqueName: \"kubernetes.io/projected/d39b11b5-58d7-4eb3-aea7-50e2f7f40c89-kube-api-access-h28cj\") pod \"speaker-rpws9\" (UID: \"d39b11b5-58d7-4eb3-aea7-50e2f7f40c89\") " pod="metallb-system/speaker-rpws9" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.523022 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/d39b11b5-58d7-4eb3-aea7-50e2f7f40c89-memberlist\") pod \"speaker-rpws9\" (UID: \"d39b11b5-58d7-4eb3-aea7-50e2f7f40c89\") " pod="metallb-system/speaker-rpws9" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.523064 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f64f6f7-7b67-4928-b73e-198643169a45-metrics-certs\") pod \"controller-f8648f98b-2tmkr\" (UID: \"6f64f6f7-7b67-4928-b73e-198643169a45\") " pod="metallb-system/controller-f8648f98b-2tmkr" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.523114 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/d39b11b5-58d7-4eb3-aea7-50e2f7f40c89-metallb-excludel2\") pod \"speaker-rpws9\" (UID: \"d39b11b5-58d7-4eb3-aea7-50e2f7f40c89\") " pod="metallb-system/speaker-rpws9" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.523153 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6f64f6f7-7b67-4928-b73e-198643169a45-cert\") pod \"controller-f8648f98b-2tmkr\" (UID: \"6f64f6f7-7b67-4928-b73e-198643169a45\") " pod="metallb-system/controller-f8648f98b-2tmkr" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.523186 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d39b11b5-58d7-4eb3-aea7-50e2f7f40c89-metrics-certs\") pod \"speaker-rpws9\" (UID: \"d39b11b5-58d7-4eb3-aea7-50e2f7f40c89\") " pod="metallb-system/speaker-rpws9" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.523222 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5q7b\" (UniqueName: \"kubernetes.io/projected/6f64f6f7-7b67-4928-b73e-198643169a45-kube-api-access-b5q7b\") pod \"controller-f8648f98b-2tmkr\" (UID: \"6f64f6f7-7b67-4928-b73e-198643169a45\") " pod="metallb-system/controller-f8648f98b-2tmkr" Nov 27 07:09:38 crc kubenswrapper[4971]: E1127 07:09:38.523793 4971 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Nov 27 07:09:38 crc kubenswrapper[4971]: E1127 07:09:38.523897 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d39b11b5-58d7-4eb3-aea7-50e2f7f40c89-metrics-certs podName:d39b11b5-58d7-4eb3-aea7-50e2f7f40c89 nodeName:}" failed. No retries permitted until 2025-11-27 07:09:39.023873775 +0000 UTC m=+1017.215917773 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d39b11b5-58d7-4eb3-aea7-50e2f7f40c89-metrics-certs") pod "speaker-rpws9" (UID: "d39b11b5-58d7-4eb3-aea7-50e2f7f40c89") : secret "speaker-certs-secret" not found Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.524299 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/d39b11b5-58d7-4eb3-aea7-50e2f7f40c89-metallb-excludel2\") pod \"speaker-rpws9\" (UID: \"d39b11b5-58d7-4eb3-aea7-50e2f7f40c89\") " pod="metallb-system/speaker-rpws9" Nov 27 07:09:38 crc kubenswrapper[4971]: E1127 07:09:38.524315 4971 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Nov 27 07:09:38 crc kubenswrapper[4971]: E1127 07:09:38.524440 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d39b11b5-58d7-4eb3-aea7-50e2f7f40c89-memberlist podName:d39b11b5-58d7-4eb3-aea7-50e2f7f40c89 nodeName:}" failed. No retries permitted until 2025-11-27 07:09:39.024409721 +0000 UTC m=+1017.216453629 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/d39b11b5-58d7-4eb3-aea7-50e2f7f40c89-memberlist") pod "speaker-rpws9" (UID: "d39b11b5-58d7-4eb3-aea7-50e2f7f40c89") : secret "metallb-memberlist" not found Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.526919 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6f64f6f7-7b67-4928-b73e-198643169a45-cert\") pod \"controller-f8648f98b-2tmkr\" (UID: \"6f64f6f7-7b67-4928-b73e-198643169a45\") " pod="metallb-system/controller-f8648f98b-2tmkr" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.532465 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f64f6f7-7b67-4928-b73e-198643169a45-metrics-certs\") pod \"controller-f8648f98b-2tmkr\" (UID: \"6f64f6f7-7b67-4928-b73e-198643169a45\") " pod="metallb-system/controller-f8648f98b-2tmkr" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.559055 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h28cj\" (UniqueName: \"kubernetes.io/projected/d39b11b5-58d7-4eb3-aea7-50e2f7f40c89-kube-api-access-h28cj\") pod \"speaker-rpws9\" (UID: \"d39b11b5-58d7-4eb3-aea7-50e2f7f40c89\") " pod="metallb-system/speaker-rpws9" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.562035 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5q7b\" (UniqueName: \"kubernetes.io/projected/6f64f6f7-7b67-4928-b73e-198643169a45-kube-api-access-b5q7b\") pod \"controller-f8648f98b-2tmkr\" (UID: \"6f64f6f7-7b67-4928-b73e-198643169a45\") " pod="metallb-system/controller-f8648f98b-2tmkr" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.612503 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-2tmkr" Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.788786 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-8pnfh"] Nov 27 07:09:38 crc kubenswrapper[4971]: W1127 07:09:38.809843 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb677a99e_e567_4d2f_8c52_a92ec72d82a6.slice/crio-1e9265e5414f9c818df5f9f62a73ec9bb4582c8f5a4125b8257bd3adfda9ceda WatchSource:0}: Error finding container 1e9265e5414f9c818df5f9f62a73ec9bb4582c8f5a4125b8257bd3adfda9ceda: Status 404 returned error can't find the container with id 1e9265e5414f9c818df5f9f62a73ec9bb4582c8f5a4125b8257bd3adfda9ceda Nov 27 07:09:38 crc kubenswrapper[4971]: I1127 07:09:38.918318 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-2tmkr"] Nov 27 07:09:38 crc kubenswrapper[4971]: W1127 07:09:38.929587 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6f64f6f7_7b67_4928_b73e_198643169a45.slice/crio-8c5f81eb13201147243f35828bfb22615330d0aa65036aa930023d5c4b4effb5 WatchSource:0}: Error finding container 8c5f81eb13201147243f35828bfb22615330d0aa65036aa930023d5c4b4effb5: Status 404 returned error can't find the container with id 8c5f81eb13201147243f35828bfb22615330d0aa65036aa930023d5c4b4effb5 Nov 27 07:09:39 crc kubenswrapper[4971]: I1127 07:09:39.038755 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/d39b11b5-58d7-4eb3-aea7-50e2f7f40c89-memberlist\") pod \"speaker-rpws9\" (UID: \"d39b11b5-58d7-4eb3-aea7-50e2f7f40c89\") " pod="metallb-system/speaker-rpws9" Nov 27 07:09:39 crc kubenswrapper[4971]: I1127 07:09:39.038853 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d39b11b5-58d7-4eb3-aea7-50e2f7f40c89-metrics-certs\") pod \"speaker-rpws9\" (UID: \"d39b11b5-58d7-4eb3-aea7-50e2f7f40c89\") " pod="metallb-system/speaker-rpws9" Nov 27 07:09:39 crc kubenswrapper[4971]: E1127 07:09:39.039598 4971 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Nov 27 07:09:39 crc kubenswrapper[4971]: E1127 07:09:39.039706 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d39b11b5-58d7-4eb3-aea7-50e2f7f40c89-memberlist podName:d39b11b5-58d7-4eb3-aea7-50e2f7f40c89 nodeName:}" failed. No retries permitted until 2025-11-27 07:09:40.039681002 +0000 UTC m=+1018.231724930 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/d39b11b5-58d7-4eb3-aea7-50e2f7f40c89-memberlist") pod "speaker-rpws9" (UID: "d39b11b5-58d7-4eb3-aea7-50e2f7f40c89") : secret "metallb-memberlist" not found Nov 27 07:09:39 crc kubenswrapper[4971]: I1127 07:09:39.046831 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d39b11b5-58d7-4eb3-aea7-50e2f7f40c89-metrics-certs\") pod \"speaker-rpws9\" (UID: \"d39b11b5-58d7-4eb3-aea7-50e2f7f40c89\") " pod="metallb-system/speaker-rpws9" Nov 27 07:09:39 crc kubenswrapper[4971]: I1127 07:09:39.117271 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-bpn6l" event={"ID":"cce86d74-11b3-43c7-bdae-eb87fbeb457a","Type":"ContainerStarted","Data":"bd847cc550e08b673367c3066334b79fe705a4ad3f1fbec1c4ee7742ee7993fd"} Nov 27 07:09:39 crc kubenswrapper[4971]: I1127 07:09:39.118497 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-8pnfh" event={"ID":"b677a99e-e567-4d2f-8c52-a92ec72d82a6","Type":"ContainerStarted","Data":"1e9265e5414f9c818df5f9f62a73ec9bb4582c8f5a4125b8257bd3adfda9ceda"} Nov 27 07:09:39 crc kubenswrapper[4971]: I1127 07:09:39.120328 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-2tmkr" event={"ID":"6f64f6f7-7b67-4928-b73e-198643169a45","Type":"ContainerStarted","Data":"cf0149670882d7f7b4ec46568d93e8b69bbb7da9f6472d75485ca16662c79f83"} Nov 27 07:09:39 crc kubenswrapper[4971]: I1127 07:09:39.120360 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-2tmkr" event={"ID":"6f64f6f7-7b67-4928-b73e-198643169a45","Type":"ContainerStarted","Data":"8c5f81eb13201147243f35828bfb22615330d0aa65036aa930023d5c4b4effb5"} Nov 27 07:09:40 crc kubenswrapper[4971]: I1127 07:09:40.055001 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/d39b11b5-58d7-4eb3-aea7-50e2f7f40c89-memberlist\") pod \"speaker-rpws9\" (UID: \"d39b11b5-58d7-4eb3-aea7-50e2f7f40c89\") " pod="metallb-system/speaker-rpws9" Nov 27 07:09:40 crc kubenswrapper[4971]: I1127 07:09:40.061089 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/d39b11b5-58d7-4eb3-aea7-50e2f7f40c89-memberlist\") pod \"speaker-rpws9\" (UID: \"d39b11b5-58d7-4eb3-aea7-50e2f7f40c89\") " pod="metallb-system/speaker-rpws9" Nov 27 07:09:40 crc kubenswrapper[4971]: I1127 07:09:40.104865 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-rpws9" Nov 27 07:09:40 crc kubenswrapper[4971]: I1127 07:09:40.156884 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-2tmkr" event={"ID":"6f64f6f7-7b67-4928-b73e-198643169a45","Type":"ContainerStarted","Data":"40a48486b99e5cb2aebef22ccb5d49325cfcd294fe514e789174e5b8d603379f"} Nov 27 07:09:40 crc kubenswrapper[4971]: I1127 07:09:40.158404 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-2tmkr" Nov 27 07:09:40 crc kubenswrapper[4971]: I1127 07:09:40.194985 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-2tmkr" podStartSLOduration=2.194963858 podStartE2EDuration="2.194963858s" podCreationTimestamp="2025-11-27 07:09:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:09:40.183984123 +0000 UTC m=+1018.376028041" watchObservedRunningTime="2025-11-27 07:09:40.194963858 +0000 UTC m=+1018.387007776" Nov 27 07:09:41 crc kubenswrapper[4971]: I1127 07:09:41.165621 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-rpws9" event={"ID":"d39b11b5-58d7-4eb3-aea7-50e2f7f40c89","Type":"ContainerStarted","Data":"c6fef9469c5c7ceff2bbc2920d2c2eb6db2531a83389ed7a38184755582c0e3f"} Nov 27 07:09:41 crc kubenswrapper[4971]: I1127 07:09:41.166190 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-rpws9" event={"ID":"d39b11b5-58d7-4eb3-aea7-50e2f7f40c89","Type":"ContainerStarted","Data":"e91ebfbd6528e7e93cd46ff3e0e71d8c910b10841e7fe1dbf7ba9fd8d5644d4e"} Nov 27 07:09:41 crc kubenswrapper[4971]: I1127 07:09:41.166208 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-rpws9" event={"ID":"d39b11b5-58d7-4eb3-aea7-50e2f7f40c89","Type":"ContainerStarted","Data":"2c3762924975fbf3039d0d10fcd9ca287a7417b6e1739801649c1992ea0db6c3"} Nov 27 07:09:41 crc kubenswrapper[4971]: I1127 07:09:41.166636 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-rpws9" Nov 27 07:09:41 crc kubenswrapper[4971]: I1127 07:09:41.191155 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-rpws9" podStartSLOduration=3.191125623 podStartE2EDuration="3.191125623s" podCreationTimestamp="2025-11-27 07:09:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:09:41.189094013 +0000 UTC m=+1019.381137931" watchObservedRunningTime="2025-11-27 07:09:41.191125623 +0000 UTC m=+1019.383169561" Nov 27 07:09:47 crc kubenswrapper[4971]: I1127 07:09:47.215937 4971 generic.go:334] "Generic (PLEG): container finished" podID="cce86d74-11b3-43c7-bdae-eb87fbeb457a" containerID="1ea08c4556b54b2de8886f77ec061398b1d2da9c082194845c897e246a4d5c55" exitCode=0 Nov 27 07:09:47 crc kubenswrapper[4971]: I1127 07:09:47.216074 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-bpn6l" event={"ID":"cce86d74-11b3-43c7-bdae-eb87fbeb457a","Type":"ContainerDied","Data":"1ea08c4556b54b2de8886f77ec061398b1d2da9c082194845c897e246a4d5c55"} Nov 27 07:09:47 crc kubenswrapper[4971]: I1127 07:09:47.220290 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-8pnfh" event={"ID":"b677a99e-e567-4d2f-8c52-a92ec72d82a6","Type":"ContainerStarted","Data":"d689ed3a2ba6d788793d50962e09b57a1db4be1c71801fa13697b432c7ba5396"} Nov 27 07:09:47 crc kubenswrapper[4971]: I1127 07:09:47.220617 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-8pnfh" Nov 27 07:09:48 crc kubenswrapper[4971]: I1127 07:09:48.228876 4971 generic.go:334] "Generic (PLEG): container finished" podID="cce86d74-11b3-43c7-bdae-eb87fbeb457a" containerID="a4c03639132e2816dd6283a37c6e1d814eaab55bfaf8fa4d9c581edd5914ad7e" exitCode=0 Nov 27 07:09:48 crc kubenswrapper[4971]: I1127 07:09:48.229027 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-bpn6l" event={"ID":"cce86d74-11b3-43c7-bdae-eb87fbeb457a","Type":"ContainerDied","Data":"a4c03639132e2816dd6283a37c6e1d814eaab55bfaf8fa4d9c581edd5914ad7e"} Nov 27 07:09:48 crc kubenswrapper[4971]: I1127 07:09:48.259118 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-8pnfh" podStartSLOduration=3.004010705 podStartE2EDuration="10.259098444s" podCreationTimestamp="2025-11-27 07:09:38 +0000 UTC" firstStartedPulling="2025-11-27 07:09:38.817646553 +0000 UTC m=+1017.009690471" lastFinishedPulling="2025-11-27 07:09:46.072734292 +0000 UTC m=+1024.264778210" observedRunningTime="2025-11-27 07:09:47.259181469 +0000 UTC m=+1025.451225417" watchObservedRunningTime="2025-11-27 07:09:48.259098444 +0000 UTC m=+1026.451142362" Nov 27 07:09:49 crc kubenswrapper[4971]: I1127 07:09:49.244307 4971 generic.go:334] "Generic (PLEG): container finished" podID="cce86d74-11b3-43c7-bdae-eb87fbeb457a" containerID="73ba1fe683e80c83161fb54219ac844927d32fd06d4c71263e3162d604d1cd53" exitCode=0 Nov 27 07:09:49 crc kubenswrapper[4971]: I1127 07:09:49.244366 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-bpn6l" event={"ID":"cce86d74-11b3-43c7-bdae-eb87fbeb457a","Type":"ContainerDied","Data":"73ba1fe683e80c83161fb54219ac844927d32fd06d4c71263e3162d604d1cd53"} Nov 27 07:09:50 crc kubenswrapper[4971]: I1127 07:09:50.108906 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-rpws9" Nov 27 07:09:50 crc kubenswrapper[4971]: I1127 07:09:50.256168 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-bpn6l" event={"ID":"cce86d74-11b3-43c7-bdae-eb87fbeb457a","Type":"ContainerStarted","Data":"036cc5a8a6ffa349f0f4487ffdeea698ea119047298b71933251c6efb1693983"} Nov 27 07:09:50 crc kubenswrapper[4971]: I1127 07:09:50.256216 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-bpn6l" event={"ID":"cce86d74-11b3-43c7-bdae-eb87fbeb457a","Type":"ContainerStarted","Data":"901ab478b292d57296e8c2a59682c435d46336d3ca76d2c93f88df4c002463d1"} Nov 27 07:09:50 crc kubenswrapper[4971]: I1127 07:09:50.256231 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-bpn6l" event={"ID":"cce86d74-11b3-43c7-bdae-eb87fbeb457a","Type":"ContainerStarted","Data":"17ef4f8e092a457f9bc33c17e65e965105568bd427688ca7840fa1a6a6eabb15"} Nov 27 07:09:50 crc kubenswrapper[4971]: I1127 07:09:50.256242 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-bpn6l" event={"ID":"cce86d74-11b3-43c7-bdae-eb87fbeb457a","Type":"ContainerStarted","Data":"6b9816c022dcfd43806453bcbc56f063a73b9e2f6ff11ac00426c79d94e27e2a"} Nov 27 07:09:50 crc kubenswrapper[4971]: I1127 07:09:50.256250 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-bpn6l" event={"ID":"cce86d74-11b3-43c7-bdae-eb87fbeb457a","Type":"ContainerStarted","Data":"a739f35fe47ba8893620ea073daa8f1537d51f80dbabaeb5a2bd4137d8a163ad"} Nov 27 07:09:50 crc kubenswrapper[4971]: I1127 07:09:50.256259 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-bpn6l" event={"ID":"cce86d74-11b3-43c7-bdae-eb87fbeb457a","Type":"ContainerStarted","Data":"4a8a2b8012f937cf15126e658ce82ff82d9ed883106359b9d96a96a2ce315404"} Nov 27 07:09:50 crc kubenswrapper[4971]: I1127 07:09:50.257705 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-bpn6l" Nov 27 07:09:50 crc kubenswrapper[4971]: I1127 07:09:50.278944 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-bpn6l" podStartSLOduration=4.890204203 podStartE2EDuration="12.278923299s" podCreationTimestamp="2025-11-27 07:09:38 +0000 UTC" firstStartedPulling="2025-11-27 07:09:38.697872695 +0000 UTC m=+1016.889916613" lastFinishedPulling="2025-11-27 07:09:46.086591791 +0000 UTC m=+1024.278635709" observedRunningTime="2025-11-27 07:09:50.275223159 +0000 UTC m=+1028.467267097" watchObservedRunningTime="2025-11-27 07:09:50.278923299 +0000 UTC m=+1028.470967217" Nov 27 07:09:51 crc kubenswrapper[4971]: I1127 07:09:51.693574 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axk2wm"] Nov 27 07:09:51 crc kubenswrapper[4971]: I1127 07:09:51.695277 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axk2wm" Nov 27 07:09:51 crc kubenswrapper[4971]: I1127 07:09:51.697904 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Nov 27 07:09:51 crc kubenswrapper[4971]: I1127 07:09:51.708910 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axk2wm"] Nov 27 07:09:51 crc kubenswrapper[4971]: I1127 07:09:51.785138 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5bdf\" (UniqueName: \"kubernetes.io/projected/f3242efe-2323-432b-bcb2-5301d60426eb-kube-api-access-b5bdf\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axk2wm\" (UID: \"f3242efe-2323-432b-bcb2-5301d60426eb\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axk2wm" Nov 27 07:09:51 crc kubenswrapper[4971]: I1127 07:09:51.785197 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f3242efe-2323-432b-bcb2-5301d60426eb-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axk2wm\" (UID: \"f3242efe-2323-432b-bcb2-5301d60426eb\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axk2wm" Nov 27 07:09:51 crc kubenswrapper[4971]: I1127 07:09:51.785302 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f3242efe-2323-432b-bcb2-5301d60426eb-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axk2wm\" (UID: \"f3242efe-2323-432b-bcb2-5301d60426eb\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axk2wm" Nov 27 07:09:51 crc kubenswrapper[4971]: I1127 07:09:51.886111 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5bdf\" (UniqueName: \"kubernetes.io/projected/f3242efe-2323-432b-bcb2-5301d60426eb-kube-api-access-b5bdf\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axk2wm\" (UID: \"f3242efe-2323-432b-bcb2-5301d60426eb\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axk2wm" Nov 27 07:09:51 crc kubenswrapper[4971]: I1127 07:09:51.886174 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f3242efe-2323-432b-bcb2-5301d60426eb-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axk2wm\" (UID: \"f3242efe-2323-432b-bcb2-5301d60426eb\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axk2wm" Nov 27 07:09:51 crc kubenswrapper[4971]: I1127 07:09:51.886224 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f3242efe-2323-432b-bcb2-5301d60426eb-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axk2wm\" (UID: \"f3242efe-2323-432b-bcb2-5301d60426eb\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axk2wm" Nov 27 07:09:51 crc kubenswrapper[4971]: I1127 07:09:51.886716 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f3242efe-2323-432b-bcb2-5301d60426eb-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axk2wm\" (UID: \"f3242efe-2323-432b-bcb2-5301d60426eb\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axk2wm" Nov 27 07:09:51 crc kubenswrapper[4971]: I1127 07:09:51.886914 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f3242efe-2323-432b-bcb2-5301d60426eb-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axk2wm\" (UID: \"f3242efe-2323-432b-bcb2-5301d60426eb\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axk2wm" Nov 27 07:09:51 crc kubenswrapper[4971]: I1127 07:09:51.916438 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5bdf\" (UniqueName: \"kubernetes.io/projected/f3242efe-2323-432b-bcb2-5301d60426eb-kube-api-access-b5bdf\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axk2wm\" (UID: \"f3242efe-2323-432b-bcb2-5301d60426eb\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axk2wm" Nov 27 07:09:52 crc kubenswrapper[4971]: I1127 07:09:52.029147 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axk2wm" Nov 27 07:09:52 crc kubenswrapper[4971]: I1127 07:09:52.460128 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axk2wm"] Nov 27 07:09:52 crc kubenswrapper[4971]: W1127 07:09:52.463846 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf3242efe_2323_432b_bcb2_5301d60426eb.slice/crio-eae75950c082a6fb47e3185ddfe26efd0eb7116af938ff47129048c8a71e2c86 WatchSource:0}: Error finding container eae75950c082a6fb47e3185ddfe26efd0eb7116af938ff47129048c8a71e2c86: Status 404 returned error can't find the container with id eae75950c082a6fb47e3185ddfe26efd0eb7116af938ff47129048c8a71e2c86 Nov 27 07:09:53 crc kubenswrapper[4971]: I1127 07:09:53.278491 4971 generic.go:334] "Generic (PLEG): container finished" podID="f3242efe-2323-432b-bcb2-5301d60426eb" containerID="517e505d98108d245a3f055420dcd87f6da539e98ba3171f7cf4157bd83f3d2c" exitCode=0 Nov 27 07:09:53 crc kubenswrapper[4971]: I1127 07:09:53.278884 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axk2wm" event={"ID":"f3242efe-2323-432b-bcb2-5301d60426eb","Type":"ContainerDied","Data":"517e505d98108d245a3f055420dcd87f6da539e98ba3171f7cf4157bd83f3d2c"} Nov 27 07:09:53 crc kubenswrapper[4971]: I1127 07:09:53.278941 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axk2wm" event={"ID":"f3242efe-2323-432b-bcb2-5301d60426eb","Type":"ContainerStarted","Data":"eae75950c082a6fb47e3185ddfe26efd0eb7116af938ff47129048c8a71e2c86"} Nov 27 07:09:53 crc kubenswrapper[4971]: I1127 07:09:53.497814 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-bpn6l" Nov 27 07:09:53 crc kubenswrapper[4971]: I1127 07:09:53.534091 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-bpn6l" Nov 27 07:09:56 crc kubenswrapper[4971]: I1127 07:09:56.298721 4971 generic.go:334] "Generic (PLEG): container finished" podID="f3242efe-2323-432b-bcb2-5301d60426eb" containerID="da2ba2ec945464296aa2a4753f034d9e734d4a18c958e2ffe1cbba70e58f3c28" exitCode=0 Nov 27 07:09:56 crc kubenswrapper[4971]: I1127 07:09:56.298763 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axk2wm" event={"ID":"f3242efe-2323-432b-bcb2-5301d60426eb","Type":"ContainerDied","Data":"da2ba2ec945464296aa2a4753f034d9e734d4a18c958e2ffe1cbba70e58f3c28"} Nov 27 07:09:57 crc kubenswrapper[4971]: I1127 07:09:57.306130 4971 generic.go:334] "Generic (PLEG): container finished" podID="f3242efe-2323-432b-bcb2-5301d60426eb" containerID="c2ce68e15ad6ac24727e3683d77265f8dfa628ca677d70c13f8e795aa6b0f7b4" exitCode=0 Nov 27 07:09:57 crc kubenswrapper[4971]: I1127 07:09:57.306178 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axk2wm" event={"ID":"f3242efe-2323-432b-bcb2-5301d60426eb","Type":"ContainerDied","Data":"c2ce68e15ad6ac24727e3683d77265f8dfa628ca677d70c13f8e795aa6b0f7b4"} Nov 27 07:09:58 crc kubenswrapper[4971]: I1127 07:09:58.516952 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-8pnfh" Nov 27 07:09:58 crc kubenswrapper[4971]: I1127 07:09:58.580826 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axk2wm" Nov 27 07:09:58 crc kubenswrapper[4971]: I1127 07:09:58.608911 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b5bdf\" (UniqueName: \"kubernetes.io/projected/f3242efe-2323-432b-bcb2-5301d60426eb-kube-api-access-b5bdf\") pod \"f3242efe-2323-432b-bcb2-5301d60426eb\" (UID: \"f3242efe-2323-432b-bcb2-5301d60426eb\") " Nov 27 07:09:58 crc kubenswrapper[4971]: I1127 07:09:58.609058 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f3242efe-2323-432b-bcb2-5301d60426eb-util\") pod \"f3242efe-2323-432b-bcb2-5301d60426eb\" (UID: \"f3242efe-2323-432b-bcb2-5301d60426eb\") " Nov 27 07:09:58 crc kubenswrapper[4971]: I1127 07:09:58.609106 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f3242efe-2323-432b-bcb2-5301d60426eb-bundle\") pod \"f3242efe-2323-432b-bcb2-5301d60426eb\" (UID: \"f3242efe-2323-432b-bcb2-5301d60426eb\") " Nov 27 07:09:58 crc kubenswrapper[4971]: I1127 07:09:58.613403 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3242efe-2323-432b-bcb2-5301d60426eb-bundle" (OuterVolumeSpecName: "bundle") pod "f3242efe-2323-432b-bcb2-5301d60426eb" (UID: "f3242efe-2323-432b-bcb2-5301d60426eb"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:09:58 crc kubenswrapper[4971]: I1127 07:09:58.616837 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-2tmkr" Nov 27 07:09:58 crc kubenswrapper[4971]: I1127 07:09:58.643824 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3242efe-2323-432b-bcb2-5301d60426eb-kube-api-access-b5bdf" (OuterVolumeSpecName: "kube-api-access-b5bdf") pod "f3242efe-2323-432b-bcb2-5301d60426eb" (UID: "f3242efe-2323-432b-bcb2-5301d60426eb"). InnerVolumeSpecName "kube-api-access-b5bdf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:09:58 crc kubenswrapper[4971]: I1127 07:09:58.662890 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3242efe-2323-432b-bcb2-5301d60426eb-util" (OuterVolumeSpecName: "util") pod "f3242efe-2323-432b-bcb2-5301d60426eb" (UID: "f3242efe-2323-432b-bcb2-5301d60426eb"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:09:58 crc kubenswrapper[4971]: I1127 07:09:58.711589 4971 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f3242efe-2323-432b-bcb2-5301d60426eb-util\") on node \"crc\" DevicePath \"\"" Nov 27 07:09:58 crc kubenswrapper[4971]: I1127 07:09:58.711646 4971 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f3242efe-2323-432b-bcb2-5301d60426eb-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:09:58 crc kubenswrapper[4971]: I1127 07:09:58.711660 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b5bdf\" (UniqueName: \"kubernetes.io/projected/f3242efe-2323-432b-bcb2-5301d60426eb-kube-api-access-b5bdf\") on node \"crc\" DevicePath \"\"" Nov 27 07:09:59 crc kubenswrapper[4971]: I1127 07:09:59.320670 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axk2wm" event={"ID":"f3242efe-2323-432b-bcb2-5301d60426eb","Type":"ContainerDied","Data":"eae75950c082a6fb47e3185ddfe26efd0eb7116af938ff47129048c8a71e2c86"} Nov 27 07:09:59 crc kubenswrapper[4971]: I1127 07:09:59.321114 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eae75950c082a6fb47e3185ddfe26efd0eb7116af938ff47129048c8a71e2c86" Nov 27 07:09:59 crc kubenswrapper[4971]: I1127 07:09:59.320786 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axk2wm" Nov 27 07:10:04 crc kubenswrapper[4971]: I1127 07:10:04.956029 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-ltq5k"] Nov 27 07:10:04 crc kubenswrapper[4971]: E1127 07:10:04.956886 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3242efe-2323-432b-bcb2-5301d60426eb" containerName="extract" Nov 27 07:10:04 crc kubenswrapper[4971]: I1127 07:10:04.956900 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3242efe-2323-432b-bcb2-5301d60426eb" containerName="extract" Nov 27 07:10:04 crc kubenswrapper[4971]: E1127 07:10:04.956911 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3242efe-2323-432b-bcb2-5301d60426eb" containerName="util" Nov 27 07:10:04 crc kubenswrapper[4971]: I1127 07:10:04.956918 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3242efe-2323-432b-bcb2-5301d60426eb" containerName="util" Nov 27 07:10:04 crc kubenswrapper[4971]: E1127 07:10:04.956937 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3242efe-2323-432b-bcb2-5301d60426eb" containerName="pull" Nov 27 07:10:04 crc kubenswrapper[4971]: I1127 07:10:04.956946 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3242efe-2323-432b-bcb2-5301d60426eb" containerName="pull" Nov 27 07:10:04 crc kubenswrapper[4971]: I1127 07:10:04.957072 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3242efe-2323-432b-bcb2-5301d60426eb" containerName="extract" Nov 27 07:10:04 crc kubenswrapper[4971]: I1127 07:10:04.958466 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-ltq5k" Nov 27 07:10:04 crc kubenswrapper[4971]: I1127 07:10:04.963462 4971 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-5ggx7" Nov 27 07:10:04 crc kubenswrapper[4971]: I1127 07:10:04.963838 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Nov 27 07:10:04 crc kubenswrapper[4971]: I1127 07:10:04.970165 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Nov 27 07:10:04 crc kubenswrapper[4971]: I1127 07:10:04.978099 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-ltq5k"] Nov 27 07:10:05 crc kubenswrapper[4971]: I1127 07:10:05.118120 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/131d2683-160b-4cf9-8811-f9c59dd07fc7-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-ltq5k\" (UID: \"131d2683-160b-4cf9-8811-f9c59dd07fc7\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-ltq5k" Nov 27 07:10:05 crc kubenswrapper[4971]: I1127 07:10:05.118222 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g6r42\" (UniqueName: \"kubernetes.io/projected/131d2683-160b-4cf9-8811-f9c59dd07fc7-kube-api-access-g6r42\") pod \"cert-manager-operator-controller-manager-64cf6dff88-ltq5k\" (UID: \"131d2683-160b-4cf9-8811-f9c59dd07fc7\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-ltq5k" Nov 27 07:10:05 crc kubenswrapper[4971]: I1127 07:10:05.220106 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/131d2683-160b-4cf9-8811-f9c59dd07fc7-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-ltq5k\" (UID: \"131d2683-160b-4cf9-8811-f9c59dd07fc7\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-ltq5k" Nov 27 07:10:05 crc kubenswrapper[4971]: I1127 07:10:05.220178 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g6r42\" (UniqueName: \"kubernetes.io/projected/131d2683-160b-4cf9-8811-f9c59dd07fc7-kube-api-access-g6r42\") pod \"cert-manager-operator-controller-manager-64cf6dff88-ltq5k\" (UID: \"131d2683-160b-4cf9-8811-f9c59dd07fc7\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-ltq5k" Nov 27 07:10:05 crc kubenswrapper[4971]: I1127 07:10:05.220835 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/131d2683-160b-4cf9-8811-f9c59dd07fc7-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-ltq5k\" (UID: \"131d2683-160b-4cf9-8811-f9c59dd07fc7\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-ltq5k" Nov 27 07:10:05 crc kubenswrapper[4971]: I1127 07:10:05.244315 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g6r42\" (UniqueName: \"kubernetes.io/projected/131d2683-160b-4cf9-8811-f9c59dd07fc7-kube-api-access-g6r42\") pod \"cert-manager-operator-controller-manager-64cf6dff88-ltq5k\" (UID: \"131d2683-160b-4cf9-8811-f9c59dd07fc7\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-ltq5k" Nov 27 07:10:05 crc kubenswrapper[4971]: I1127 07:10:05.320380 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-ltq5k" Nov 27 07:10:05 crc kubenswrapper[4971]: I1127 07:10:05.670351 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-ltq5k"] Nov 27 07:10:05 crc kubenswrapper[4971]: W1127 07:10:05.675401 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod131d2683_160b_4cf9_8811_f9c59dd07fc7.slice/crio-e265ac811e1dda47677c5ff5aadc983f614238d376cf72850b250e8340cb7d3d WatchSource:0}: Error finding container e265ac811e1dda47677c5ff5aadc983f614238d376cf72850b250e8340cb7d3d: Status 404 returned error can't find the container with id e265ac811e1dda47677c5ff5aadc983f614238d376cf72850b250e8340cb7d3d Nov 27 07:10:06 crc kubenswrapper[4971]: I1127 07:10:06.366235 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-ltq5k" event={"ID":"131d2683-160b-4cf9-8811-f9c59dd07fc7","Type":"ContainerStarted","Data":"e265ac811e1dda47677c5ff5aadc983f614238d376cf72850b250e8340cb7d3d"} Nov 27 07:10:08 crc kubenswrapper[4971]: I1127 07:10:08.504798 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-bpn6l" Nov 27 07:10:14 crc kubenswrapper[4971]: I1127 07:10:14.426250 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-ltq5k" event={"ID":"131d2683-160b-4cf9-8811-f9c59dd07fc7","Type":"ContainerStarted","Data":"5f3c64d35d49b6523ddb8b7a26e7f044b0b225b72d2fd1c4fdc185a0f73a87bc"} Nov 27 07:10:14 crc kubenswrapper[4971]: I1127 07:10:14.453717 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-ltq5k" podStartSLOduration=2.7481095250000003 podStartE2EDuration="10.453692069s" podCreationTimestamp="2025-11-27 07:10:04 +0000 UTC" firstStartedPulling="2025-11-27 07:10:05.678285953 +0000 UTC m=+1043.870329871" lastFinishedPulling="2025-11-27 07:10:13.383868497 +0000 UTC m=+1051.575912415" observedRunningTime="2025-11-27 07:10:14.448775464 +0000 UTC m=+1052.640819402" watchObservedRunningTime="2025-11-27 07:10:14.453692069 +0000 UTC m=+1052.645735997" Nov 27 07:10:17 crc kubenswrapper[4971]: I1127 07:10:17.369873 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-4c2cw"] Nov 27 07:10:17 crc kubenswrapper[4971]: I1127 07:10:17.371006 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-4c2cw" Nov 27 07:10:17 crc kubenswrapper[4971]: I1127 07:10:17.374808 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Nov 27 07:10:17 crc kubenswrapper[4971]: I1127 07:10:17.374967 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Nov 27 07:10:17 crc kubenswrapper[4971]: I1127 07:10:17.375089 4971 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-f67lz" Nov 27 07:10:17 crc kubenswrapper[4971]: I1127 07:10:17.424335 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-4c2cw"] Nov 27 07:10:17 crc kubenswrapper[4971]: I1127 07:10:17.534778 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kmfrl\" (UniqueName: \"kubernetes.io/projected/427779b7-166e-4422-9db7-b9a2ab5db7f2-kube-api-access-kmfrl\") pod \"cert-manager-webhook-f4fb5df64-4c2cw\" (UID: \"427779b7-166e-4422-9db7-b9a2ab5db7f2\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-4c2cw" Nov 27 07:10:17 crc kubenswrapper[4971]: I1127 07:10:17.534865 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/427779b7-166e-4422-9db7-b9a2ab5db7f2-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-4c2cw\" (UID: \"427779b7-166e-4422-9db7-b9a2ab5db7f2\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-4c2cw" Nov 27 07:10:17 crc kubenswrapper[4971]: I1127 07:10:17.636391 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kmfrl\" (UniqueName: \"kubernetes.io/projected/427779b7-166e-4422-9db7-b9a2ab5db7f2-kube-api-access-kmfrl\") pod \"cert-manager-webhook-f4fb5df64-4c2cw\" (UID: \"427779b7-166e-4422-9db7-b9a2ab5db7f2\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-4c2cw" Nov 27 07:10:17 crc kubenswrapper[4971]: I1127 07:10:17.636449 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/427779b7-166e-4422-9db7-b9a2ab5db7f2-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-4c2cw\" (UID: \"427779b7-166e-4422-9db7-b9a2ab5db7f2\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-4c2cw" Nov 27 07:10:17 crc kubenswrapper[4971]: I1127 07:10:17.658492 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/427779b7-166e-4422-9db7-b9a2ab5db7f2-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-4c2cw\" (UID: \"427779b7-166e-4422-9db7-b9a2ab5db7f2\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-4c2cw" Nov 27 07:10:17 crc kubenswrapper[4971]: I1127 07:10:17.662310 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kmfrl\" (UniqueName: \"kubernetes.io/projected/427779b7-166e-4422-9db7-b9a2ab5db7f2-kube-api-access-kmfrl\") pod \"cert-manager-webhook-f4fb5df64-4c2cw\" (UID: \"427779b7-166e-4422-9db7-b9a2ab5db7f2\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-4c2cw" Nov 27 07:10:17 crc kubenswrapper[4971]: I1127 07:10:17.689156 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-4c2cw" Nov 27 07:10:17 crc kubenswrapper[4971]: I1127 07:10:17.909130 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-4c2cw"] Nov 27 07:10:18 crc kubenswrapper[4971]: I1127 07:10:18.466103 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-4c2cw" event={"ID":"427779b7-166e-4422-9db7-b9a2ab5db7f2","Type":"ContainerStarted","Data":"cf135d558bbdde6697c94a88a423c2bda3d62ac0e4356bf5012b2b6595bd351f"} Nov 27 07:10:20 crc kubenswrapper[4971]: I1127 07:10:20.138140 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-nmpm9"] Nov 27 07:10:20 crc kubenswrapper[4971]: I1127 07:10:20.139401 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-nmpm9" Nov 27 07:10:20 crc kubenswrapper[4971]: I1127 07:10:20.145948 4971 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-vjlbc" Nov 27 07:10:20 crc kubenswrapper[4971]: I1127 07:10:20.148273 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-nmpm9"] Nov 27 07:10:20 crc kubenswrapper[4971]: I1127 07:10:20.275358 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xb24c\" (UniqueName: \"kubernetes.io/projected/ed50780c-b184-42e4-9551-656f8abb0d17-kube-api-access-xb24c\") pod \"cert-manager-cainjector-855d9ccff4-nmpm9\" (UID: \"ed50780c-b184-42e4-9551-656f8abb0d17\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-nmpm9" Nov 27 07:10:20 crc kubenswrapper[4971]: I1127 07:10:20.275526 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ed50780c-b184-42e4-9551-656f8abb0d17-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-nmpm9\" (UID: \"ed50780c-b184-42e4-9551-656f8abb0d17\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-nmpm9" Nov 27 07:10:20 crc kubenswrapper[4971]: I1127 07:10:20.377111 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xb24c\" (UniqueName: \"kubernetes.io/projected/ed50780c-b184-42e4-9551-656f8abb0d17-kube-api-access-xb24c\") pod \"cert-manager-cainjector-855d9ccff4-nmpm9\" (UID: \"ed50780c-b184-42e4-9551-656f8abb0d17\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-nmpm9" Nov 27 07:10:20 crc kubenswrapper[4971]: I1127 07:10:20.377170 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ed50780c-b184-42e4-9551-656f8abb0d17-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-nmpm9\" (UID: \"ed50780c-b184-42e4-9551-656f8abb0d17\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-nmpm9" Nov 27 07:10:20 crc kubenswrapper[4971]: I1127 07:10:20.400499 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xb24c\" (UniqueName: \"kubernetes.io/projected/ed50780c-b184-42e4-9551-656f8abb0d17-kube-api-access-xb24c\") pod \"cert-manager-cainjector-855d9ccff4-nmpm9\" (UID: \"ed50780c-b184-42e4-9551-656f8abb0d17\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-nmpm9" Nov 27 07:10:20 crc kubenswrapper[4971]: I1127 07:10:20.401429 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ed50780c-b184-42e4-9551-656f8abb0d17-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-nmpm9\" (UID: \"ed50780c-b184-42e4-9551-656f8abb0d17\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-nmpm9" Nov 27 07:10:20 crc kubenswrapper[4971]: I1127 07:10:20.477251 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-nmpm9" Nov 27 07:10:20 crc kubenswrapper[4971]: I1127 07:10:20.769118 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-nmpm9"] Nov 27 07:10:20 crc kubenswrapper[4971]: W1127 07:10:20.784929 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poded50780c_b184_42e4_9551_656f8abb0d17.slice/crio-e493ad9d841d748caaa5ce6aec6f2a4b8b2f5878148a61d5b2e4eac141b17a64 WatchSource:0}: Error finding container e493ad9d841d748caaa5ce6aec6f2a4b8b2f5878148a61d5b2e4eac141b17a64: Status 404 returned error can't find the container with id e493ad9d841d748caaa5ce6aec6f2a4b8b2f5878148a61d5b2e4eac141b17a64 Nov 27 07:10:21 crc kubenswrapper[4971]: I1127 07:10:21.488946 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-nmpm9" event={"ID":"ed50780c-b184-42e4-9551-656f8abb0d17","Type":"ContainerStarted","Data":"e493ad9d841d748caaa5ce6aec6f2a4b8b2f5878148a61d5b2e4eac141b17a64"} Nov 27 07:10:26 crc kubenswrapper[4971]: I1127 07:10:26.521362 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-4c2cw" event={"ID":"427779b7-166e-4422-9db7-b9a2ab5db7f2","Type":"ContainerStarted","Data":"2f5e23f8e0902cfa26ea0bf3677994c87ddc10dd87a4488f330bfd3feaff8539"} Nov 27 07:10:26 crc kubenswrapper[4971]: I1127 07:10:26.522961 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-nmpm9" event={"ID":"ed50780c-b184-42e4-9551-656f8abb0d17","Type":"ContainerStarted","Data":"f493e986006020fd32e220afe27d1d23393aee864374f245bd3898106c722b1a"} Nov 27 07:10:26 crc kubenswrapper[4971]: I1127 07:10:26.523047 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-f4fb5df64-4c2cw" Nov 27 07:10:26 crc kubenswrapper[4971]: I1127 07:10:26.536633 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-f4fb5df64-4c2cw" podStartSLOduration=1.7062919380000001 podStartE2EDuration="9.536617599s" podCreationTimestamp="2025-11-27 07:10:17 +0000 UTC" firstStartedPulling="2025-11-27 07:10:17.923040421 +0000 UTC m=+1056.115084339" lastFinishedPulling="2025-11-27 07:10:25.753366082 +0000 UTC m=+1063.945410000" observedRunningTime="2025-11-27 07:10:26.536565328 +0000 UTC m=+1064.728609256" watchObservedRunningTime="2025-11-27 07:10:26.536617599 +0000 UTC m=+1064.728661517" Nov 27 07:10:26 crc kubenswrapper[4971]: I1127 07:10:26.556546 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-855d9ccff4-nmpm9" podStartSLOduration=1.6181770819999999 podStartE2EDuration="6.556513047s" podCreationTimestamp="2025-11-27 07:10:20 +0000 UTC" firstStartedPulling="2025-11-27 07:10:20.787106733 +0000 UTC m=+1058.979150651" lastFinishedPulling="2025-11-27 07:10:25.725442708 +0000 UTC m=+1063.917486616" observedRunningTime="2025-11-27 07:10:26.551618532 +0000 UTC m=+1064.743662450" watchObservedRunningTime="2025-11-27 07:10:26.556513047 +0000 UTC m=+1064.748556955" Nov 27 07:10:32 crc kubenswrapper[4971]: I1127 07:10:32.694422 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-f4fb5df64-4c2cw" Nov 27 07:10:35 crc kubenswrapper[4971]: I1127 07:10:35.290557 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-86cb77c54b-rdkl2"] Nov 27 07:10:35 crc kubenswrapper[4971]: I1127 07:10:35.291507 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-rdkl2" Nov 27 07:10:35 crc kubenswrapper[4971]: I1127 07:10:35.296827 4971 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-t2lcm" Nov 27 07:10:35 crc kubenswrapper[4971]: I1127 07:10:35.316708 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-rdkl2"] Nov 27 07:10:35 crc kubenswrapper[4971]: I1127 07:10:35.415334 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3184f0b5-107c-4f9b-a3aa-e5d2f7b30fba-bound-sa-token\") pod \"cert-manager-86cb77c54b-rdkl2\" (UID: \"3184f0b5-107c-4f9b-a3aa-e5d2f7b30fba\") " pod="cert-manager/cert-manager-86cb77c54b-rdkl2" Nov 27 07:10:35 crc kubenswrapper[4971]: I1127 07:10:35.416073 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rn6nr\" (UniqueName: \"kubernetes.io/projected/3184f0b5-107c-4f9b-a3aa-e5d2f7b30fba-kube-api-access-rn6nr\") pod \"cert-manager-86cb77c54b-rdkl2\" (UID: \"3184f0b5-107c-4f9b-a3aa-e5d2f7b30fba\") " pod="cert-manager/cert-manager-86cb77c54b-rdkl2" Nov 27 07:10:35 crc kubenswrapper[4971]: I1127 07:10:35.517416 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3184f0b5-107c-4f9b-a3aa-e5d2f7b30fba-bound-sa-token\") pod \"cert-manager-86cb77c54b-rdkl2\" (UID: \"3184f0b5-107c-4f9b-a3aa-e5d2f7b30fba\") " pod="cert-manager/cert-manager-86cb77c54b-rdkl2" Nov 27 07:10:35 crc kubenswrapper[4971]: I1127 07:10:35.517520 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rn6nr\" (UniqueName: \"kubernetes.io/projected/3184f0b5-107c-4f9b-a3aa-e5d2f7b30fba-kube-api-access-rn6nr\") pod \"cert-manager-86cb77c54b-rdkl2\" (UID: \"3184f0b5-107c-4f9b-a3aa-e5d2f7b30fba\") " pod="cert-manager/cert-manager-86cb77c54b-rdkl2" Nov 27 07:10:35 crc kubenswrapper[4971]: I1127 07:10:35.542132 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3184f0b5-107c-4f9b-a3aa-e5d2f7b30fba-bound-sa-token\") pod \"cert-manager-86cb77c54b-rdkl2\" (UID: \"3184f0b5-107c-4f9b-a3aa-e5d2f7b30fba\") " pod="cert-manager/cert-manager-86cb77c54b-rdkl2" Nov 27 07:10:35 crc kubenswrapper[4971]: I1127 07:10:35.542360 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rn6nr\" (UniqueName: \"kubernetes.io/projected/3184f0b5-107c-4f9b-a3aa-e5d2f7b30fba-kube-api-access-rn6nr\") pod \"cert-manager-86cb77c54b-rdkl2\" (UID: \"3184f0b5-107c-4f9b-a3aa-e5d2f7b30fba\") " pod="cert-manager/cert-manager-86cb77c54b-rdkl2" Nov 27 07:10:35 crc kubenswrapper[4971]: I1127 07:10:35.616714 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-rdkl2" Nov 27 07:10:36 crc kubenswrapper[4971]: I1127 07:10:36.028188 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-rdkl2"] Nov 27 07:10:36 crc kubenswrapper[4971]: I1127 07:10:36.589906 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-rdkl2" event={"ID":"3184f0b5-107c-4f9b-a3aa-e5d2f7b30fba","Type":"ContainerStarted","Data":"e320ab4dfa899a273d6aa4192a9be2830da16114d8d6e6960c90e73c2de6b972"} Nov 27 07:10:36 crc kubenswrapper[4971]: I1127 07:10:36.589956 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-rdkl2" event={"ID":"3184f0b5-107c-4f9b-a3aa-e5d2f7b30fba","Type":"ContainerStarted","Data":"7b0256c90cc3e275129738ce59761812a30991c33506c00f55042057174b484c"} Nov 27 07:10:36 crc kubenswrapper[4971]: I1127 07:10:36.607017 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-86cb77c54b-rdkl2" podStartSLOduration=1.606996318 podStartE2EDuration="1.606996318s" podCreationTimestamp="2025-11-27 07:10:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:10:36.604033281 +0000 UTC m=+1074.796077209" watchObservedRunningTime="2025-11-27 07:10:36.606996318 +0000 UTC m=+1074.799040246" Nov 27 07:10:46 crc kubenswrapper[4971]: I1127 07:10:46.204609 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-jsnpj"] Nov 27 07:10:46 crc kubenswrapper[4971]: I1127 07:10:46.206286 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jsnpj" Nov 27 07:10:46 crc kubenswrapper[4971]: I1127 07:10:46.209743 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-kw4gw" Nov 27 07:10:46 crc kubenswrapper[4971]: I1127 07:10:46.210844 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Nov 27 07:10:46 crc kubenswrapper[4971]: I1127 07:10:46.210850 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Nov 27 07:10:46 crc kubenswrapper[4971]: I1127 07:10:46.218121 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-jsnpj"] Nov 27 07:10:46 crc kubenswrapper[4971]: I1127 07:10:46.385187 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ncwpn\" (UniqueName: \"kubernetes.io/projected/f9a4680f-4b97-4549-8f14-5115a63c84a2-kube-api-access-ncwpn\") pod \"openstack-operator-index-jsnpj\" (UID: \"f9a4680f-4b97-4549-8f14-5115a63c84a2\") " pod="openstack-operators/openstack-operator-index-jsnpj" Nov 27 07:10:46 crc kubenswrapper[4971]: I1127 07:10:46.487188 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ncwpn\" (UniqueName: \"kubernetes.io/projected/f9a4680f-4b97-4549-8f14-5115a63c84a2-kube-api-access-ncwpn\") pod \"openstack-operator-index-jsnpj\" (UID: \"f9a4680f-4b97-4549-8f14-5115a63c84a2\") " pod="openstack-operators/openstack-operator-index-jsnpj" Nov 27 07:10:46 crc kubenswrapper[4971]: I1127 07:10:46.513060 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ncwpn\" (UniqueName: \"kubernetes.io/projected/f9a4680f-4b97-4549-8f14-5115a63c84a2-kube-api-access-ncwpn\") pod \"openstack-operator-index-jsnpj\" (UID: \"f9a4680f-4b97-4549-8f14-5115a63c84a2\") " pod="openstack-operators/openstack-operator-index-jsnpj" Nov 27 07:10:46 crc kubenswrapper[4971]: I1127 07:10:46.524255 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jsnpj" Nov 27 07:10:46 crc kubenswrapper[4971]: I1127 07:10:46.961973 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-jsnpj"] Nov 27 07:10:46 crc kubenswrapper[4971]: W1127 07:10:46.963665 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf9a4680f_4b97_4549_8f14_5115a63c84a2.slice/crio-4dfd61834f8f62df6fbcb9280665c9f3e70b86392384cd0176f2ffb4f62beaee WatchSource:0}: Error finding container 4dfd61834f8f62df6fbcb9280665c9f3e70b86392384cd0176f2ffb4f62beaee: Status 404 returned error can't find the container with id 4dfd61834f8f62df6fbcb9280665c9f3e70b86392384cd0176f2ffb4f62beaee Nov 27 07:10:47 crc kubenswrapper[4971]: I1127 07:10:47.677058 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jsnpj" event={"ID":"f9a4680f-4b97-4549-8f14-5115a63c84a2","Type":"ContainerStarted","Data":"4dfd61834f8f62df6fbcb9280665c9f3e70b86392384cd0176f2ffb4f62beaee"} Nov 27 07:10:48 crc kubenswrapper[4971]: I1127 07:10:48.684149 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jsnpj" event={"ID":"f9a4680f-4b97-4549-8f14-5115a63c84a2","Type":"ContainerStarted","Data":"7fea0fe79057e12b0a36d67e1da53b8c9722c4d7182b1815b1fa7086776b4992"} Nov 27 07:10:49 crc kubenswrapper[4971]: I1127 07:10:49.573231 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-jsnpj" podStartSLOduration=2.596856044 podStartE2EDuration="3.573204645s" podCreationTimestamp="2025-11-27 07:10:46 +0000 UTC" firstStartedPulling="2025-11-27 07:10:46.966576838 +0000 UTC m=+1085.158620756" lastFinishedPulling="2025-11-27 07:10:47.942925439 +0000 UTC m=+1086.134969357" observedRunningTime="2025-11-27 07:10:48.703771234 +0000 UTC m=+1086.895815172" watchObservedRunningTime="2025-11-27 07:10:49.573204645 +0000 UTC m=+1087.765248573" Nov 27 07:10:49 crc kubenswrapper[4971]: I1127 07:10:49.576986 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-jsnpj"] Nov 27 07:10:50 crc kubenswrapper[4971]: I1127 07:10:50.180768 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-r5brv"] Nov 27 07:10:50 crc kubenswrapper[4971]: I1127 07:10:50.181893 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-r5brv" Nov 27 07:10:50 crc kubenswrapper[4971]: I1127 07:10:50.195800 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-r5brv"] Nov 27 07:10:50 crc kubenswrapper[4971]: I1127 07:10:50.344749 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-76v98\" (UniqueName: \"kubernetes.io/projected/90063128-e4a8-4b8a-b830-44c061b9f533-kube-api-access-76v98\") pod \"openstack-operator-index-r5brv\" (UID: \"90063128-e4a8-4b8a-b830-44c061b9f533\") " pod="openstack-operators/openstack-operator-index-r5brv" Nov 27 07:10:50 crc kubenswrapper[4971]: I1127 07:10:50.446947 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-76v98\" (UniqueName: \"kubernetes.io/projected/90063128-e4a8-4b8a-b830-44c061b9f533-kube-api-access-76v98\") pod \"openstack-operator-index-r5brv\" (UID: \"90063128-e4a8-4b8a-b830-44c061b9f533\") " pod="openstack-operators/openstack-operator-index-r5brv" Nov 27 07:10:50 crc kubenswrapper[4971]: I1127 07:10:50.471781 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-76v98\" (UniqueName: \"kubernetes.io/projected/90063128-e4a8-4b8a-b830-44c061b9f533-kube-api-access-76v98\") pod \"openstack-operator-index-r5brv\" (UID: \"90063128-e4a8-4b8a-b830-44c061b9f533\") " pod="openstack-operators/openstack-operator-index-r5brv" Nov 27 07:10:50 crc kubenswrapper[4971]: I1127 07:10:50.499773 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-r5brv" Nov 27 07:10:50 crc kubenswrapper[4971]: I1127 07:10:50.701015 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-jsnpj" podUID="f9a4680f-4b97-4549-8f14-5115a63c84a2" containerName="registry-server" containerID="cri-o://7fea0fe79057e12b0a36d67e1da53b8c9722c4d7182b1815b1fa7086776b4992" gracePeriod=2 Nov 27 07:10:50 crc kubenswrapper[4971]: I1127 07:10:50.761661 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-r5brv"] Nov 27 07:10:50 crc kubenswrapper[4971]: W1127 07:10:50.799417 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod90063128_e4a8_4b8a_b830_44c061b9f533.slice/crio-0b758a4ce60eac0b8952e9b04f54409409e9ca564174c80a2c8b98e9433dfd99 WatchSource:0}: Error finding container 0b758a4ce60eac0b8952e9b04f54409409e9ca564174c80a2c8b98e9433dfd99: Status 404 returned error can't find the container with id 0b758a4ce60eac0b8952e9b04f54409409e9ca564174c80a2c8b98e9433dfd99 Nov 27 07:10:51 crc kubenswrapper[4971]: I1127 07:10:51.035759 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jsnpj" Nov 27 07:10:51 crc kubenswrapper[4971]: I1127 07:10:51.157665 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ncwpn\" (UniqueName: \"kubernetes.io/projected/f9a4680f-4b97-4549-8f14-5115a63c84a2-kube-api-access-ncwpn\") pod \"f9a4680f-4b97-4549-8f14-5115a63c84a2\" (UID: \"f9a4680f-4b97-4549-8f14-5115a63c84a2\") " Nov 27 07:10:51 crc kubenswrapper[4971]: I1127 07:10:51.165922 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9a4680f-4b97-4549-8f14-5115a63c84a2-kube-api-access-ncwpn" (OuterVolumeSpecName: "kube-api-access-ncwpn") pod "f9a4680f-4b97-4549-8f14-5115a63c84a2" (UID: "f9a4680f-4b97-4549-8f14-5115a63c84a2"). InnerVolumeSpecName "kube-api-access-ncwpn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:10:51 crc kubenswrapper[4971]: I1127 07:10:51.260081 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ncwpn\" (UniqueName: \"kubernetes.io/projected/f9a4680f-4b97-4549-8f14-5115a63c84a2-kube-api-access-ncwpn\") on node \"crc\" DevicePath \"\"" Nov 27 07:10:51 crc kubenswrapper[4971]: I1127 07:10:51.712185 4971 generic.go:334] "Generic (PLEG): container finished" podID="f9a4680f-4b97-4549-8f14-5115a63c84a2" containerID="7fea0fe79057e12b0a36d67e1da53b8c9722c4d7182b1815b1fa7086776b4992" exitCode=0 Nov 27 07:10:51 crc kubenswrapper[4971]: I1127 07:10:51.712441 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jsnpj" event={"ID":"f9a4680f-4b97-4549-8f14-5115a63c84a2","Type":"ContainerDied","Data":"7fea0fe79057e12b0a36d67e1da53b8c9722c4d7182b1815b1fa7086776b4992"} Nov 27 07:10:51 crc kubenswrapper[4971]: I1127 07:10:51.712593 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jsnpj" Nov 27 07:10:51 crc kubenswrapper[4971]: I1127 07:10:51.712735 4971 scope.go:117] "RemoveContainer" containerID="7fea0fe79057e12b0a36d67e1da53b8c9722c4d7182b1815b1fa7086776b4992" Nov 27 07:10:51 crc kubenswrapper[4971]: I1127 07:10:51.712711 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jsnpj" event={"ID":"f9a4680f-4b97-4549-8f14-5115a63c84a2","Type":"ContainerDied","Data":"4dfd61834f8f62df6fbcb9280665c9f3e70b86392384cd0176f2ffb4f62beaee"} Nov 27 07:10:51 crc kubenswrapper[4971]: I1127 07:10:51.715441 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-r5brv" event={"ID":"90063128-e4a8-4b8a-b830-44c061b9f533","Type":"ContainerStarted","Data":"22b1f76048749ccbd35ca551115b8b7627e72facf75f1ff0f339a9ac2099192d"} Nov 27 07:10:51 crc kubenswrapper[4971]: I1127 07:10:51.715507 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-r5brv" event={"ID":"90063128-e4a8-4b8a-b830-44c061b9f533","Type":"ContainerStarted","Data":"0b758a4ce60eac0b8952e9b04f54409409e9ca564174c80a2c8b98e9433dfd99"} Nov 27 07:10:51 crc kubenswrapper[4971]: I1127 07:10:51.732072 4971 scope.go:117] "RemoveContainer" containerID="7fea0fe79057e12b0a36d67e1da53b8c9722c4d7182b1815b1fa7086776b4992" Nov 27 07:10:51 crc kubenswrapper[4971]: E1127 07:10:51.732609 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7fea0fe79057e12b0a36d67e1da53b8c9722c4d7182b1815b1fa7086776b4992\": container with ID starting with 7fea0fe79057e12b0a36d67e1da53b8c9722c4d7182b1815b1fa7086776b4992 not found: ID does not exist" containerID="7fea0fe79057e12b0a36d67e1da53b8c9722c4d7182b1815b1fa7086776b4992" Nov 27 07:10:51 crc kubenswrapper[4971]: I1127 07:10:51.732669 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7fea0fe79057e12b0a36d67e1da53b8c9722c4d7182b1815b1fa7086776b4992"} err="failed to get container status \"7fea0fe79057e12b0a36d67e1da53b8c9722c4d7182b1815b1fa7086776b4992\": rpc error: code = NotFound desc = could not find container \"7fea0fe79057e12b0a36d67e1da53b8c9722c4d7182b1815b1fa7086776b4992\": container with ID starting with 7fea0fe79057e12b0a36d67e1da53b8c9722c4d7182b1815b1fa7086776b4992 not found: ID does not exist" Nov 27 07:10:51 crc kubenswrapper[4971]: I1127 07:10:51.747192 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-r5brv" podStartSLOduration=1.240973159 podStartE2EDuration="1.747173982s" podCreationTimestamp="2025-11-27 07:10:50 +0000 UTC" firstStartedPulling="2025-11-27 07:10:50.803922479 +0000 UTC m=+1088.995966397" lastFinishedPulling="2025-11-27 07:10:51.310123302 +0000 UTC m=+1089.502167220" observedRunningTime="2025-11-27 07:10:51.740286748 +0000 UTC m=+1089.932330876" watchObservedRunningTime="2025-11-27 07:10:51.747173982 +0000 UTC m=+1089.939217900" Nov 27 07:10:51 crc kubenswrapper[4971]: I1127 07:10:51.760236 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-jsnpj"] Nov 27 07:10:51 crc kubenswrapper[4971]: I1127 07:10:51.767329 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-jsnpj"] Nov 27 07:10:52 crc kubenswrapper[4971]: I1127 07:10:52.560105 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9a4680f-4b97-4549-8f14-5115a63c84a2" path="/var/lib/kubelet/pods/f9a4680f-4b97-4549-8f14-5115a63c84a2/volumes" Nov 27 07:11:00 crc kubenswrapper[4971]: I1127 07:11:00.499941 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-r5brv" Nov 27 07:11:00 crc kubenswrapper[4971]: I1127 07:11:00.500577 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-r5brv" Nov 27 07:11:00 crc kubenswrapper[4971]: I1127 07:11:00.531470 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-r5brv" Nov 27 07:11:00 crc kubenswrapper[4971]: I1127 07:11:00.807681 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-r5brv" Nov 27 07:11:06 crc kubenswrapper[4971]: I1127 07:11:06.018524 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/b45623fa79426ee6d52ccc2f61ed894b37aa2fb70e5ce0cf390950ffberxxdg"] Nov 27 07:11:06 crc kubenswrapper[4971]: E1127 07:11:06.019617 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9a4680f-4b97-4549-8f14-5115a63c84a2" containerName="registry-server" Nov 27 07:11:06 crc kubenswrapper[4971]: I1127 07:11:06.019635 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9a4680f-4b97-4549-8f14-5115a63c84a2" containerName="registry-server" Nov 27 07:11:06 crc kubenswrapper[4971]: I1127 07:11:06.019819 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9a4680f-4b97-4549-8f14-5115a63c84a2" containerName="registry-server" Nov 27 07:11:06 crc kubenswrapper[4971]: I1127 07:11:06.020934 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/b45623fa79426ee6d52ccc2f61ed894b37aa2fb70e5ce0cf390950ffberxxdg" Nov 27 07:11:06 crc kubenswrapper[4971]: I1127 07:11:06.023476 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-t2ggj" Nov 27 07:11:06 crc kubenswrapper[4971]: I1127 07:11:06.032691 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/b45623fa79426ee6d52ccc2f61ed894b37aa2fb70e5ce0cf390950ffberxxdg"] Nov 27 07:11:06 crc kubenswrapper[4971]: I1127 07:11:06.188761 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thvhc\" (UniqueName: \"kubernetes.io/projected/5c497b31-bddd-4d0a-bbf8-89dcd999ff3c-kube-api-access-thvhc\") pod \"b45623fa79426ee6d52ccc2f61ed894b37aa2fb70e5ce0cf390950ffberxxdg\" (UID: \"5c497b31-bddd-4d0a-bbf8-89dcd999ff3c\") " pod="openstack-operators/b45623fa79426ee6d52ccc2f61ed894b37aa2fb70e5ce0cf390950ffberxxdg" Nov 27 07:11:06 crc kubenswrapper[4971]: I1127 07:11:06.188817 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5c497b31-bddd-4d0a-bbf8-89dcd999ff3c-bundle\") pod \"b45623fa79426ee6d52ccc2f61ed894b37aa2fb70e5ce0cf390950ffberxxdg\" (UID: \"5c497b31-bddd-4d0a-bbf8-89dcd999ff3c\") " pod="openstack-operators/b45623fa79426ee6d52ccc2f61ed894b37aa2fb70e5ce0cf390950ffberxxdg" Nov 27 07:11:06 crc kubenswrapper[4971]: I1127 07:11:06.188854 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5c497b31-bddd-4d0a-bbf8-89dcd999ff3c-util\") pod \"b45623fa79426ee6d52ccc2f61ed894b37aa2fb70e5ce0cf390950ffberxxdg\" (UID: \"5c497b31-bddd-4d0a-bbf8-89dcd999ff3c\") " pod="openstack-operators/b45623fa79426ee6d52ccc2f61ed894b37aa2fb70e5ce0cf390950ffberxxdg" Nov 27 07:11:06 crc kubenswrapper[4971]: I1127 07:11:06.290632 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5c497b31-bddd-4d0a-bbf8-89dcd999ff3c-bundle\") pod \"b45623fa79426ee6d52ccc2f61ed894b37aa2fb70e5ce0cf390950ffberxxdg\" (UID: \"5c497b31-bddd-4d0a-bbf8-89dcd999ff3c\") " pod="openstack-operators/b45623fa79426ee6d52ccc2f61ed894b37aa2fb70e5ce0cf390950ffberxxdg" Nov 27 07:11:06 crc kubenswrapper[4971]: I1127 07:11:06.290693 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5c497b31-bddd-4d0a-bbf8-89dcd999ff3c-util\") pod \"b45623fa79426ee6d52ccc2f61ed894b37aa2fb70e5ce0cf390950ffberxxdg\" (UID: \"5c497b31-bddd-4d0a-bbf8-89dcd999ff3c\") " pod="openstack-operators/b45623fa79426ee6d52ccc2f61ed894b37aa2fb70e5ce0cf390950ffberxxdg" Nov 27 07:11:06 crc kubenswrapper[4971]: I1127 07:11:06.290790 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thvhc\" (UniqueName: \"kubernetes.io/projected/5c497b31-bddd-4d0a-bbf8-89dcd999ff3c-kube-api-access-thvhc\") pod \"b45623fa79426ee6d52ccc2f61ed894b37aa2fb70e5ce0cf390950ffberxxdg\" (UID: \"5c497b31-bddd-4d0a-bbf8-89dcd999ff3c\") " pod="openstack-operators/b45623fa79426ee6d52ccc2f61ed894b37aa2fb70e5ce0cf390950ffberxxdg" Nov 27 07:11:06 crc kubenswrapper[4971]: I1127 07:11:06.291191 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5c497b31-bddd-4d0a-bbf8-89dcd999ff3c-bundle\") pod \"b45623fa79426ee6d52ccc2f61ed894b37aa2fb70e5ce0cf390950ffberxxdg\" (UID: \"5c497b31-bddd-4d0a-bbf8-89dcd999ff3c\") " pod="openstack-operators/b45623fa79426ee6d52ccc2f61ed894b37aa2fb70e5ce0cf390950ffberxxdg" Nov 27 07:11:06 crc kubenswrapper[4971]: I1127 07:11:06.291230 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5c497b31-bddd-4d0a-bbf8-89dcd999ff3c-util\") pod \"b45623fa79426ee6d52ccc2f61ed894b37aa2fb70e5ce0cf390950ffberxxdg\" (UID: \"5c497b31-bddd-4d0a-bbf8-89dcd999ff3c\") " pod="openstack-operators/b45623fa79426ee6d52ccc2f61ed894b37aa2fb70e5ce0cf390950ffberxxdg" Nov 27 07:11:06 crc kubenswrapper[4971]: I1127 07:11:06.311121 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thvhc\" (UniqueName: \"kubernetes.io/projected/5c497b31-bddd-4d0a-bbf8-89dcd999ff3c-kube-api-access-thvhc\") pod \"b45623fa79426ee6d52ccc2f61ed894b37aa2fb70e5ce0cf390950ffberxxdg\" (UID: \"5c497b31-bddd-4d0a-bbf8-89dcd999ff3c\") " pod="openstack-operators/b45623fa79426ee6d52ccc2f61ed894b37aa2fb70e5ce0cf390950ffberxxdg" Nov 27 07:11:06 crc kubenswrapper[4971]: I1127 07:11:06.338877 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/b45623fa79426ee6d52ccc2f61ed894b37aa2fb70e5ce0cf390950ffberxxdg" Nov 27 07:11:06 crc kubenswrapper[4971]: I1127 07:11:06.570794 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/b45623fa79426ee6d52ccc2f61ed894b37aa2fb70e5ce0cf390950ffberxxdg"] Nov 27 07:11:06 crc kubenswrapper[4971]: I1127 07:11:06.820647 4971 generic.go:334] "Generic (PLEG): container finished" podID="5c497b31-bddd-4d0a-bbf8-89dcd999ff3c" containerID="16979d9b2e314f5e29a5e14e849a1ed35d78c0fc0de6488098080641eccb84de" exitCode=0 Nov 27 07:11:06 crc kubenswrapper[4971]: I1127 07:11:06.820832 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/b45623fa79426ee6d52ccc2f61ed894b37aa2fb70e5ce0cf390950ffberxxdg" event={"ID":"5c497b31-bddd-4d0a-bbf8-89dcd999ff3c","Type":"ContainerDied","Data":"16979d9b2e314f5e29a5e14e849a1ed35d78c0fc0de6488098080641eccb84de"} Nov 27 07:11:06 crc kubenswrapper[4971]: I1127 07:11:06.820928 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/b45623fa79426ee6d52ccc2f61ed894b37aa2fb70e5ce0cf390950ffberxxdg" event={"ID":"5c497b31-bddd-4d0a-bbf8-89dcd999ff3c","Type":"ContainerStarted","Data":"5f72d0a42d526a7890a56950d40bfb64079f5d96559984264e23d67d8e6faf5f"} Nov 27 07:11:08 crc kubenswrapper[4971]: I1127 07:11:08.842725 4971 generic.go:334] "Generic (PLEG): container finished" podID="5c497b31-bddd-4d0a-bbf8-89dcd999ff3c" containerID="096e4de69061e9786f4d17e8ad1629c49b802cea974a190638c3953fe685693a" exitCode=0 Nov 27 07:11:08 crc kubenswrapper[4971]: I1127 07:11:08.842849 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/b45623fa79426ee6d52ccc2f61ed894b37aa2fb70e5ce0cf390950ffberxxdg" event={"ID":"5c497b31-bddd-4d0a-bbf8-89dcd999ff3c","Type":"ContainerDied","Data":"096e4de69061e9786f4d17e8ad1629c49b802cea974a190638c3953fe685693a"} Nov 27 07:11:09 crc kubenswrapper[4971]: I1127 07:11:09.859252 4971 generic.go:334] "Generic (PLEG): container finished" podID="5c497b31-bddd-4d0a-bbf8-89dcd999ff3c" containerID="7a930f7950394c501e07c4b465d14c7bf4c6f04acad85dc569c6e3ad4ab3e2b0" exitCode=0 Nov 27 07:11:09 crc kubenswrapper[4971]: I1127 07:11:09.859346 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/b45623fa79426ee6d52ccc2f61ed894b37aa2fb70e5ce0cf390950ffberxxdg" event={"ID":"5c497b31-bddd-4d0a-bbf8-89dcd999ff3c","Type":"ContainerDied","Data":"7a930f7950394c501e07c4b465d14c7bf4c6f04acad85dc569c6e3ad4ab3e2b0"} Nov 27 07:11:11 crc kubenswrapper[4971]: I1127 07:11:11.150663 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/b45623fa79426ee6d52ccc2f61ed894b37aa2fb70e5ce0cf390950ffberxxdg" Nov 27 07:11:11 crc kubenswrapper[4971]: I1127 07:11:11.267133 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-thvhc\" (UniqueName: \"kubernetes.io/projected/5c497b31-bddd-4d0a-bbf8-89dcd999ff3c-kube-api-access-thvhc\") pod \"5c497b31-bddd-4d0a-bbf8-89dcd999ff3c\" (UID: \"5c497b31-bddd-4d0a-bbf8-89dcd999ff3c\") " Nov 27 07:11:11 crc kubenswrapper[4971]: I1127 07:11:11.267241 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5c497b31-bddd-4d0a-bbf8-89dcd999ff3c-util\") pod \"5c497b31-bddd-4d0a-bbf8-89dcd999ff3c\" (UID: \"5c497b31-bddd-4d0a-bbf8-89dcd999ff3c\") " Nov 27 07:11:11 crc kubenswrapper[4971]: I1127 07:11:11.267277 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5c497b31-bddd-4d0a-bbf8-89dcd999ff3c-bundle\") pod \"5c497b31-bddd-4d0a-bbf8-89dcd999ff3c\" (UID: \"5c497b31-bddd-4d0a-bbf8-89dcd999ff3c\") " Nov 27 07:11:11 crc kubenswrapper[4971]: I1127 07:11:11.268195 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c497b31-bddd-4d0a-bbf8-89dcd999ff3c-bundle" (OuterVolumeSpecName: "bundle") pod "5c497b31-bddd-4d0a-bbf8-89dcd999ff3c" (UID: "5c497b31-bddd-4d0a-bbf8-89dcd999ff3c"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:11:11 crc kubenswrapper[4971]: I1127 07:11:11.272579 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c497b31-bddd-4d0a-bbf8-89dcd999ff3c-kube-api-access-thvhc" (OuterVolumeSpecName: "kube-api-access-thvhc") pod "5c497b31-bddd-4d0a-bbf8-89dcd999ff3c" (UID: "5c497b31-bddd-4d0a-bbf8-89dcd999ff3c"). InnerVolumeSpecName "kube-api-access-thvhc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:11:11 crc kubenswrapper[4971]: I1127 07:11:11.280800 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c497b31-bddd-4d0a-bbf8-89dcd999ff3c-util" (OuterVolumeSpecName: "util") pod "5c497b31-bddd-4d0a-bbf8-89dcd999ff3c" (UID: "5c497b31-bddd-4d0a-bbf8-89dcd999ff3c"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:11:11 crc kubenswrapper[4971]: I1127 07:11:11.369411 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-thvhc\" (UniqueName: \"kubernetes.io/projected/5c497b31-bddd-4d0a-bbf8-89dcd999ff3c-kube-api-access-thvhc\") on node \"crc\" DevicePath \"\"" Nov 27 07:11:11 crc kubenswrapper[4971]: I1127 07:11:11.369449 4971 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5c497b31-bddd-4d0a-bbf8-89dcd999ff3c-util\") on node \"crc\" DevicePath \"\"" Nov 27 07:11:11 crc kubenswrapper[4971]: I1127 07:11:11.369462 4971 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5c497b31-bddd-4d0a-bbf8-89dcd999ff3c-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:11:11 crc kubenswrapper[4971]: I1127 07:11:11.877703 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/b45623fa79426ee6d52ccc2f61ed894b37aa2fb70e5ce0cf390950ffberxxdg" event={"ID":"5c497b31-bddd-4d0a-bbf8-89dcd999ff3c","Type":"ContainerDied","Data":"5f72d0a42d526a7890a56950d40bfb64079f5d96559984264e23d67d8e6faf5f"} Nov 27 07:11:11 crc kubenswrapper[4971]: I1127 07:11:11.878093 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5f72d0a42d526a7890a56950d40bfb64079f5d96559984264e23d67d8e6faf5f" Nov 27 07:11:11 crc kubenswrapper[4971]: I1127 07:11:11.877763 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/b45623fa79426ee6d52ccc2f61ed894b37aa2fb70e5ce0cf390950ffberxxdg" Nov 27 07:11:17 crc kubenswrapper[4971]: I1127 07:11:17.198578 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-67d8f6cc56-69xws"] Nov 27 07:11:17 crc kubenswrapper[4971]: E1127 07:11:17.199286 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c497b31-bddd-4d0a-bbf8-89dcd999ff3c" containerName="util" Nov 27 07:11:17 crc kubenswrapper[4971]: I1127 07:11:17.199304 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c497b31-bddd-4d0a-bbf8-89dcd999ff3c" containerName="util" Nov 27 07:11:17 crc kubenswrapper[4971]: E1127 07:11:17.199319 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c497b31-bddd-4d0a-bbf8-89dcd999ff3c" containerName="extract" Nov 27 07:11:17 crc kubenswrapper[4971]: I1127 07:11:17.199327 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c497b31-bddd-4d0a-bbf8-89dcd999ff3c" containerName="extract" Nov 27 07:11:17 crc kubenswrapper[4971]: E1127 07:11:17.199337 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c497b31-bddd-4d0a-bbf8-89dcd999ff3c" containerName="pull" Nov 27 07:11:17 crc kubenswrapper[4971]: I1127 07:11:17.199345 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c497b31-bddd-4d0a-bbf8-89dcd999ff3c" containerName="pull" Nov 27 07:11:17 crc kubenswrapper[4971]: I1127 07:11:17.199504 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c497b31-bddd-4d0a-bbf8-89dcd999ff3c" containerName="extract" Nov 27 07:11:17 crc kubenswrapper[4971]: I1127 07:11:17.200131 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-67d8f6cc56-69xws" Nov 27 07:11:17 crc kubenswrapper[4971]: I1127 07:11:17.206289 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-h87db" Nov 27 07:11:17 crc kubenswrapper[4971]: I1127 07:11:17.270353 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-67d8f6cc56-69xws"] Nov 27 07:11:17 crc kubenswrapper[4971]: I1127 07:11:17.389034 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44r9k\" (UniqueName: \"kubernetes.io/projected/577bd944-782b-4fb1-aff2-2c7b564be7a1-kube-api-access-44r9k\") pod \"openstack-operator-controller-operator-67d8f6cc56-69xws\" (UID: \"577bd944-782b-4fb1-aff2-2c7b564be7a1\") " pod="openstack-operators/openstack-operator-controller-operator-67d8f6cc56-69xws" Nov 27 07:11:17 crc kubenswrapper[4971]: I1127 07:11:17.490798 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44r9k\" (UniqueName: \"kubernetes.io/projected/577bd944-782b-4fb1-aff2-2c7b564be7a1-kube-api-access-44r9k\") pod \"openstack-operator-controller-operator-67d8f6cc56-69xws\" (UID: \"577bd944-782b-4fb1-aff2-2c7b564be7a1\") " pod="openstack-operators/openstack-operator-controller-operator-67d8f6cc56-69xws" Nov 27 07:11:17 crc kubenswrapper[4971]: I1127 07:11:17.511195 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44r9k\" (UniqueName: \"kubernetes.io/projected/577bd944-782b-4fb1-aff2-2c7b564be7a1-kube-api-access-44r9k\") pod \"openstack-operator-controller-operator-67d8f6cc56-69xws\" (UID: \"577bd944-782b-4fb1-aff2-2c7b564be7a1\") " pod="openstack-operators/openstack-operator-controller-operator-67d8f6cc56-69xws" Nov 27 07:11:17 crc kubenswrapper[4971]: I1127 07:11:17.523755 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-67d8f6cc56-69xws" Nov 27 07:11:17 crc kubenswrapper[4971]: I1127 07:11:17.777438 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-67d8f6cc56-69xws"] Nov 27 07:11:17 crc kubenswrapper[4971]: W1127 07:11:17.789019 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod577bd944_782b_4fb1_aff2_2c7b564be7a1.slice/crio-821f81713ab3e41ab51d9a45e3468365b0a55919094a9f009c2d5c319480c04a WatchSource:0}: Error finding container 821f81713ab3e41ab51d9a45e3468365b0a55919094a9f009c2d5c319480c04a: Status 404 returned error can't find the container with id 821f81713ab3e41ab51d9a45e3468365b0a55919094a9f009c2d5c319480c04a Nov 27 07:11:17 crc kubenswrapper[4971]: I1127 07:11:17.918346 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-67d8f6cc56-69xws" event={"ID":"577bd944-782b-4fb1-aff2-2c7b564be7a1","Type":"ContainerStarted","Data":"821f81713ab3e41ab51d9a45e3468365b0a55919094a9f009c2d5c319480c04a"} Nov 27 07:11:22 crc kubenswrapper[4971]: I1127 07:11:22.983711 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-67d8f6cc56-69xws" event={"ID":"577bd944-782b-4fb1-aff2-2c7b564be7a1","Type":"ContainerStarted","Data":"ef54489f86c2a82ed2790dab0a862da816d1867f3372de564445b8a14322980a"} Nov 27 07:11:22 crc kubenswrapper[4971]: I1127 07:11:22.984292 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-67d8f6cc56-69xws" Nov 27 07:11:23 crc kubenswrapper[4971]: I1127 07:11:23.018911 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-67d8f6cc56-69xws" podStartSLOduration=1.507929152 podStartE2EDuration="6.018881539s" podCreationTimestamp="2025-11-27 07:11:17 +0000 UTC" firstStartedPulling="2025-11-27 07:11:17.791258463 +0000 UTC m=+1115.983302381" lastFinishedPulling="2025-11-27 07:11:22.30221085 +0000 UTC m=+1120.494254768" observedRunningTime="2025-11-27 07:11:23.013688857 +0000 UTC m=+1121.205732815" watchObservedRunningTime="2025-11-27 07:11:23.018881539 +0000 UTC m=+1121.210925467" Nov 27 07:11:26 crc kubenswrapper[4971]: I1127 07:11:26.413186 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 07:11:26 crc kubenswrapper[4971]: I1127 07:11:26.414001 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 07:11:27 crc kubenswrapper[4971]: I1127 07:11:27.526642 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-67d8f6cc56-69xws" Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.612099 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7b64f4fb85-bp29n"] Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.614028 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7b64f4fb85-bp29n" Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.617591 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-vmj4c" Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.641190 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-6b7f75547b-p5tg5"] Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.642510 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-6b7f75547b-p5tg5" Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.648638 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-d582q" Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.693746 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7b64f4fb85-bp29n"] Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.697869 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4nsnm\" (UniqueName: \"kubernetes.io/projected/9be13ef4-b6e0-434f-aea4-a0b374facb97-kube-api-access-4nsnm\") pod \"barbican-operator-controller-manager-7b64f4fb85-bp29n\" (UID: \"9be13ef4-b6e0-434f-aea4-a0b374facb97\") " pod="openstack-operators/barbican-operator-controller-manager-7b64f4fb85-bp29n" Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.726938 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-955677c94-rm5cw"] Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.728434 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-955677c94-rm5cw" Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.732015 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-5kr6f" Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.735300 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-6b7f75547b-p5tg5"] Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.770615 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-955677c94-rm5cw"] Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.784545 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-589cbd6b5b-mg4bw"] Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.785823 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-589cbd6b5b-mg4bw" Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.792404 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-p99bn" Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.799211 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4nsnm\" (UniqueName: \"kubernetes.io/projected/9be13ef4-b6e0-434f-aea4-a0b374facb97-kube-api-access-4nsnm\") pod \"barbican-operator-controller-manager-7b64f4fb85-bp29n\" (UID: \"9be13ef4-b6e0-434f-aea4-a0b374facb97\") " pod="openstack-operators/barbican-operator-controller-manager-7b64f4fb85-bp29n" Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.799257 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5qx75\" (UniqueName: \"kubernetes.io/projected/f9f71029-8412-4967-acf0-ad2d2e7c31f4-kube-api-access-5qx75\") pod \"cinder-operator-controller-manager-6b7f75547b-p5tg5\" (UID: \"f9f71029-8412-4967-acf0-ad2d2e7c31f4\") " pod="openstack-operators/cinder-operator-controller-manager-6b7f75547b-p5tg5" Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.811814 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-589cbd6b5b-mg4bw"] Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.828374 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5d494799bf-q7w5w"] Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.830046 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-5d494799bf-q7w5w" Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.837448 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-lb95b" Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.838017 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4nsnm\" (UniqueName: \"kubernetes.io/projected/9be13ef4-b6e0-434f-aea4-a0b374facb97-kube-api-access-4nsnm\") pod \"barbican-operator-controller-manager-7b64f4fb85-bp29n\" (UID: \"9be13ef4-b6e0-434f-aea4-a0b374facb97\") " pod="openstack-operators/barbican-operator-controller-manager-7b64f4fb85-bp29n" Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.843217 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5b77f656f-5m76m"] Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.844750 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5b77f656f-5m76m" Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.847781 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-9cp9l" Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.852605 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5d494799bf-q7w5w"] Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.865506 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5b77f656f-5m76m"] Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.891243 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-zdjzp"] Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.892489 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-zdjzp" Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.894363 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-j72zz" Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.898096 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.900457 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-67cb4dc6d4-7c6m7"] Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.900610 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dssmw\" (UniqueName: \"kubernetes.io/projected/784d6231-82a4-4f58-8de7-0a3f6378e2b0-kube-api-access-dssmw\") pod \"horizon-operator-controller-manager-5d494799bf-q7w5w\" (UID: \"784d6231-82a4-4f58-8de7-0a3f6378e2b0\") " pod="openstack-operators/horizon-operator-controller-manager-5d494799bf-q7w5w" Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.900679 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5qx75\" (UniqueName: \"kubernetes.io/projected/f9f71029-8412-4967-acf0-ad2d2e7c31f4-kube-api-access-5qx75\") pod \"cinder-operator-controller-manager-6b7f75547b-p5tg5\" (UID: \"f9f71029-8412-4967-acf0-ad2d2e7c31f4\") " pod="openstack-operators/cinder-operator-controller-manager-6b7f75547b-p5tg5" Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.900722 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bzwsp\" (UniqueName: \"kubernetes.io/projected/e9ec811b-a5ea-4068-abe7-fdfbdaba39b3-kube-api-access-bzwsp\") pod \"glance-operator-controller-manager-589cbd6b5b-mg4bw\" (UID: \"e9ec811b-a5ea-4068-abe7-fdfbdaba39b3\") " pod="openstack-operators/glance-operator-controller-manager-589cbd6b5b-mg4bw" Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.900750 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhq5s\" (UniqueName: \"kubernetes.io/projected/d4125c07-9503-4a93-a635-5863fdf26632-kube-api-access-fhq5s\") pod \"designate-operator-controller-manager-955677c94-rm5cw\" (UID: \"d4125c07-9503-4a93-a635-5863fdf26632\") " pod="openstack-operators/designate-operator-controller-manager-955677c94-rm5cw" Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.902420 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-67cb4dc6d4-7c6m7" Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.906816 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-n2lt9" Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.911404 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7b4567c7cf-gn2tw"] Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.913298 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7b4567c7cf-gn2tw" Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.917085 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-r4ffz" Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.927221 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5qx75\" (UniqueName: \"kubernetes.io/projected/f9f71029-8412-4967-acf0-ad2d2e7c31f4-kube-api-access-5qx75\") pod \"cinder-operator-controller-manager-6b7f75547b-p5tg5\" (UID: \"f9f71029-8412-4967-acf0-ad2d2e7c31f4\") " pod="openstack-operators/cinder-operator-controller-manager-6b7f75547b-p5tg5" Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.928707 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-zdjzp"] Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.938755 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-67cb4dc6d4-7c6m7"] Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.939152 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7b64f4fb85-bp29n" Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.952227 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7b4567c7cf-gn2tw"] Nov 27 07:11:44 crc kubenswrapper[4971]: I1127 07:11:44.992606 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-5d499bf58b-t8rr8"] Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:44.994009 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-5d499bf58b-t8rr8" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:44.995083 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-6b7f75547b-p5tg5" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.000957 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-wfvtb" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.002301 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/eee345d4-5e02-4a96-a204-383fd410e564-cert\") pod \"infra-operator-controller-manager-57548d458d-zdjzp\" (UID: \"eee345d4-5e02-4a96-a204-383fd410e564\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-zdjzp" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.002379 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bzwsp\" (UniqueName: \"kubernetes.io/projected/e9ec811b-a5ea-4068-abe7-fdfbdaba39b3-kube-api-access-bzwsp\") pod \"glance-operator-controller-manager-589cbd6b5b-mg4bw\" (UID: \"e9ec811b-a5ea-4068-abe7-fdfbdaba39b3\") " pod="openstack-operators/glance-operator-controller-manager-589cbd6b5b-mg4bw" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.002408 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhq5s\" (UniqueName: \"kubernetes.io/projected/d4125c07-9503-4a93-a635-5863fdf26632-kube-api-access-fhq5s\") pod \"designate-operator-controller-manager-955677c94-rm5cw\" (UID: \"d4125c07-9503-4a93-a635-5863fdf26632\") " pod="openstack-operators/designate-operator-controller-manager-955677c94-rm5cw" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.002463 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jr5lp\" (UniqueName: \"kubernetes.io/projected/bc2fe762-04b6-4c5a-bf42-ce67dd839609-kube-api-access-jr5lp\") pod \"ironic-operator-controller-manager-67cb4dc6d4-7c6m7\" (UID: \"bc2fe762-04b6-4c5a-bf42-ce67dd839609\") " pod="openstack-operators/ironic-operator-controller-manager-67cb4dc6d4-7c6m7" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.002494 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dssmw\" (UniqueName: \"kubernetes.io/projected/784d6231-82a4-4f58-8de7-0a3f6378e2b0-kube-api-access-dssmw\") pod \"horizon-operator-controller-manager-5d494799bf-q7w5w\" (UID: \"784d6231-82a4-4f58-8de7-0a3f6378e2b0\") " pod="openstack-operators/horizon-operator-controller-manager-5d494799bf-q7w5w" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.002512 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wpk9l\" (UniqueName: \"kubernetes.io/projected/2e4648d4-38e3-4e9b-a68d-7db912f67470-kube-api-access-wpk9l\") pod \"heat-operator-controller-manager-5b77f656f-5m76m\" (UID: \"2e4648d4-38e3-4e9b-a68d-7db912f67470\") " pod="openstack-operators/heat-operator-controller-manager-5b77f656f-5m76m" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.002546 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-czrx2\" (UniqueName: \"kubernetes.io/projected/6488635b-4233-4898-b540-8c7433b25d0f-kube-api-access-czrx2\") pod \"keystone-operator-controller-manager-7b4567c7cf-gn2tw\" (UID: \"6488635b-4233-4898-b540-8c7433b25d0f\") " pod="openstack-operators/keystone-operator-controller-manager-7b4567c7cf-gn2tw" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.002593 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nd8t8\" (UniqueName: \"kubernetes.io/projected/eee345d4-5e02-4a96-a204-383fd410e564-kube-api-access-nd8t8\") pod \"infra-operator-controller-manager-57548d458d-zdjzp\" (UID: \"eee345d4-5e02-4a96-a204-383fd410e564\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-zdjzp" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.024403 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-5d499bf58b-t8rr8"] Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.036320 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-66f4dd4bc7-xscln"] Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.037573 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-66f4dd4bc7-xscln" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.043808 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-66f4dd4bc7-xscln"] Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.047994 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-tbrks" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.061242 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-6fdcddb789-q47ql"] Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.064987 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-6fdcddb789-q47ql" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.065469 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bzwsp\" (UniqueName: \"kubernetes.io/projected/e9ec811b-a5ea-4068-abe7-fdfbdaba39b3-kube-api-access-bzwsp\") pod \"glance-operator-controller-manager-589cbd6b5b-mg4bw\" (UID: \"e9ec811b-a5ea-4068-abe7-fdfbdaba39b3\") " pod="openstack-operators/glance-operator-controller-manager-589cbd6b5b-mg4bw" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.072204 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhq5s\" (UniqueName: \"kubernetes.io/projected/d4125c07-9503-4a93-a635-5863fdf26632-kube-api-access-fhq5s\") pod \"designate-operator-controller-manager-955677c94-rm5cw\" (UID: \"d4125c07-9503-4a93-a635-5863fdf26632\") " pod="openstack-operators/designate-operator-controller-manager-955677c94-rm5cw" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.077277 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dssmw\" (UniqueName: \"kubernetes.io/projected/784d6231-82a4-4f58-8de7-0a3f6378e2b0-kube-api-access-dssmw\") pod \"horizon-operator-controller-manager-5d494799bf-q7w5w\" (UID: \"784d6231-82a4-4f58-8de7-0a3f6378e2b0\") " pod="openstack-operators/horizon-operator-controller-manager-5d494799bf-q7w5w" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.080472 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-kw9gm" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.104153 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nd8t8\" (UniqueName: \"kubernetes.io/projected/eee345d4-5e02-4a96-a204-383fd410e564-kube-api-access-nd8t8\") pod \"infra-operator-controller-manager-57548d458d-zdjzp\" (UID: \"eee345d4-5e02-4a96-a204-383fd410e564\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-zdjzp" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.104209 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/eee345d4-5e02-4a96-a204-383fd410e564-cert\") pod \"infra-operator-controller-manager-57548d458d-zdjzp\" (UID: \"eee345d4-5e02-4a96-a204-383fd410e564\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-zdjzp" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.104278 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzthq\" (UniqueName: \"kubernetes.io/projected/ccd00428-c0de-472f-a9a3-19ec6524d4c8-kube-api-access-kzthq\") pod \"mariadb-operator-controller-manager-66f4dd4bc7-xscln\" (UID: \"ccd00428-c0de-472f-a9a3-19ec6524d4c8\") " pod="openstack-operators/mariadb-operator-controller-manager-66f4dd4bc7-xscln" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.104312 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jr5lp\" (UniqueName: \"kubernetes.io/projected/bc2fe762-04b6-4c5a-bf42-ce67dd839609-kube-api-access-jr5lp\") pod \"ironic-operator-controller-manager-67cb4dc6d4-7c6m7\" (UID: \"bc2fe762-04b6-4c5a-bf42-ce67dd839609\") " pod="openstack-operators/ironic-operator-controller-manager-67cb4dc6d4-7c6m7" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.104344 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wpk9l\" (UniqueName: \"kubernetes.io/projected/2e4648d4-38e3-4e9b-a68d-7db912f67470-kube-api-access-wpk9l\") pod \"heat-operator-controller-manager-5b77f656f-5m76m\" (UID: \"2e4648d4-38e3-4e9b-a68d-7db912f67470\") " pod="openstack-operators/heat-operator-controller-manager-5b77f656f-5m76m" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.104362 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-czrx2\" (UniqueName: \"kubernetes.io/projected/6488635b-4233-4898-b540-8c7433b25d0f-kube-api-access-czrx2\") pod \"keystone-operator-controller-manager-7b4567c7cf-gn2tw\" (UID: \"6488635b-4233-4898-b540-8c7433b25d0f\") " pod="openstack-operators/keystone-operator-controller-manager-7b4567c7cf-gn2tw" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.104393 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fm9ml\" (UniqueName: \"kubernetes.io/projected/a5e429ee-33ed-40b1-878c-c4cf36ffd5ff-kube-api-access-fm9ml\") pod \"manila-operator-controller-manager-5d499bf58b-t8rr8\" (UID: \"a5e429ee-33ed-40b1-878c-c4cf36ffd5ff\") " pod="openstack-operators/manila-operator-controller-manager-5d499bf58b-t8rr8" Nov 27 07:11:45 crc kubenswrapper[4971]: E1127 07:11:45.104760 4971 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Nov 27 07:11:45 crc kubenswrapper[4971]: E1127 07:11:45.104820 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/eee345d4-5e02-4a96-a204-383fd410e564-cert podName:eee345d4-5e02-4a96-a204-383fd410e564 nodeName:}" failed. No retries permitted until 2025-11-27 07:11:45.604793651 +0000 UTC m=+1143.796837569 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/eee345d4-5e02-4a96-a204-383fd410e564-cert") pod "infra-operator-controller-manager-57548d458d-zdjzp" (UID: "eee345d4-5e02-4a96-a204-383fd410e564") : secret "infra-operator-webhook-server-cert" not found Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.106036 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-589cbd6b5b-mg4bw" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.109516 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-79556f57fc-2zx72"] Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.110828 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-2zx72" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.114149 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-22p76" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.135753 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wpk9l\" (UniqueName: \"kubernetes.io/projected/2e4648d4-38e3-4e9b-a68d-7db912f67470-kube-api-access-wpk9l\") pod \"heat-operator-controller-manager-5b77f656f-5m76m\" (UID: \"2e4648d4-38e3-4e9b-a68d-7db912f67470\") " pod="openstack-operators/heat-operator-controller-manager-5b77f656f-5m76m" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.146648 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-6fdcddb789-q47ql"] Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.150743 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nd8t8\" (UniqueName: \"kubernetes.io/projected/eee345d4-5e02-4a96-a204-383fd410e564-kube-api-access-nd8t8\") pod \"infra-operator-controller-manager-57548d458d-zdjzp\" (UID: \"eee345d4-5e02-4a96-a204-383fd410e564\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-zdjzp" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.151988 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-czrx2\" (UniqueName: \"kubernetes.io/projected/6488635b-4233-4898-b540-8c7433b25d0f-kube-api-access-czrx2\") pod \"keystone-operator-controller-manager-7b4567c7cf-gn2tw\" (UID: \"6488635b-4233-4898-b540-8c7433b25d0f\") " pod="openstack-operators/keystone-operator-controller-manager-7b4567c7cf-gn2tw" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.162794 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jr5lp\" (UniqueName: \"kubernetes.io/projected/bc2fe762-04b6-4c5a-bf42-ce67dd839609-kube-api-access-jr5lp\") pod \"ironic-operator-controller-manager-67cb4dc6d4-7c6m7\" (UID: \"bc2fe762-04b6-4c5a-bf42-ce67dd839609\") " pod="openstack-operators/ironic-operator-controller-manager-67cb4dc6d4-7c6m7" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.173417 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-79556f57fc-2zx72"] Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.187656 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-64cdc6ff96-7928r"] Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.234037 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-64cdc6ff96-7928r" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.234430 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-5d494799bf-q7w5w" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.238203 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5b77f656f-5m76m" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.259481 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-qfzkl" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.264612 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zk5l9\" (UniqueName: \"kubernetes.io/projected/8e16feb0-cfc3-43ba-9411-00788a5c42dd-kube-api-access-zk5l9\") pod \"neutron-operator-controller-manager-6fdcddb789-q47ql\" (UID: \"8e16feb0-cfc3-43ba-9411-00788a5c42dd\") " pod="openstack-operators/neutron-operator-controller-manager-6fdcddb789-q47ql" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.264772 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fm9ml\" (UniqueName: \"kubernetes.io/projected/a5e429ee-33ed-40b1-878c-c4cf36ffd5ff-kube-api-access-fm9ml\") pod \"manila-operator-controller-manager-5d499bf58b-t8rr8\" (UID: \"a5e429ee-33ed-40b1-878c-c4cf36ffd5ff\") " pod="openstack-operators/manila-operator-controller-manager-5d499bf58b-t8rr8" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.265125 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzthq\" (UniqueName: \"kubernetes.io/projected/ccd00428-c0de-472f-a9a3-19ec6524d4c8-kube-api-access-kzthq\") pod \"mariadb-operator-controller-manager-66f4dd4bc7-xscln\" (UID: \"ccd00428-c0de-472f-a9a3-19ec6524d4c8\") " pod="openstack-operators/mariadb-operator-controller-manager-66f4dd4bc7-xscln" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.265182 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99mbc\" (UniqueName: \"kubernetes.io/projected/f3763945-b889-4f5b-bd9c-2f5bfa93e6d6-kube-api-access-99mbc\") pod \"nova-operator-controller-manager-79556f57fc-2zx72\" (UID: \"f3763945-b889-4f5b-bd9c-2f5bfa93e6d6\") " pod="openstack-operators/nova-operator-controller-manager-79556f57fc-2zx72" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.286407 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-67cb4dc6d4-7c6m7" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.307230 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fm9ml\" (UniqueName: \"kubernetes.io/projected/a5e429ee-33ed-40b1-878c-c4cf36ffd5ff-kube-api-access-fm9ml\") pod \"manila-operator-controller-manager-5d499bf58b-t8rr8\" (UID: \"a5e429ee-33ed-40b1-878c-c4cf36ffd5ff\") " pod="openstack-operators/manila-operator-controller-manager-5d499bf58b-t8rr8" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.309852 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzthq\" (UniqueName: \"kubernetes.io/projected/ccd00428-c0de-472f-a9a3-19ec6524d4c8-kube-api-access-kzthq\") pod \"mariadb-operator-controller-manager-66f4dd4bc7-xscln\" (UID: \"ccd00428-c0de-472f-a9a3-19ec6524d4c8\") " pod="openstack-operators/mariadb-operator-controller-manager-66f4dd4bc7-xscln" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.320354 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7b4567c7cf-gn2tw" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.325611 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-64cdc6ff96-7928r"] Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.361458 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-955677c94-rm5cw" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.369085 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-5d9f9695db7tlww"] Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.370892 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5d9f9695db7tlww" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.372737 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fkgxm\" (UniqueName: \"kubernetes.io/projected/4f041cb0-6235-484c-829e-3bf4b6a3e5e6-kube-api-access-fkgxm\") pod \"octavia-operator-controller-manager-64cdc6ff96-7928r\" (UID: \"4f041cb0-6235-484c-829e-3bf4b6a3e5e6\") " pod="openstack-operators/octavia-operator-controller-manager-64cdc6ff96-7928r" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.372844 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99mbc\" (UniqueName: \"kubernetes.io/projected/f3763945-b889-4f5b-bd9c-2f5bfa93e6d6-kube-api-access-99mbc\") pod \"nova-operator-controller-manager-79556f57fc-2zx72\" (UID: \"f3763945-b889-4f5b-bd9c-2f5bfa93e6d6\") " pod="openstack-operators/nova-operator-controller-manager-79556f57fc-2zx72" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.372886 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zk5l9\" (UniqueName: \"kubernetes.io/projected/8e16feb0-cfc3-43ba-9411-00788a5c42dd-kube-api-access-zk5l9\") pod \"neutron-operator-controller-manager-6fdcddb789-q47ql\" (UID: \"8e16feb0-cfc3-43ba-9411-00788a5c42dd\") " pod="openstack-operators/neutron-operator-controller-manager-6fdcddb789-q47ql" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.378179 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.378596 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-jmlpp" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.422966 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zk5l9\" (UniqueName: \"kubernetes.io/projected/8e16feb0-cfc3-43ba-9411-00788a5c42dd-kube-api-access-zk5l9\") pod \"neutron-operator-controller-manager-6fdcddb789-q47ql\" (UID: \"8e16feb0-cfc3-43ba-9411-00788a5c42dd\") " pod="openstack-operators/neutron-operator-controller-manager-6fdcddb789-q47ql" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.440169 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-56897c768d-2zcq7"] Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.441563 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99mbc\" (UniqueName: \"kubernetes.io/projected/f3763945-b889-4f5b-bd9c-2f5bfa93e6d6-kube-api-access-99mbc\") pod \"nova-operator-controller-manager-79556f57fc-2zx72\" (UID: \"f3763945-b889-4f5b-bd9c-2f5bfa93e6d6\") " pod="openstack-operators/nova-operator-controller-manager-79556f57fc-2zx72" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.442201 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-56897c768d-2zcq7" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.443714 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-5d499bf58b-t8rr8" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.447673 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-mvr8h" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.457208 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-56897c768d-2zcq7"] Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.464101 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-66f4dd4bc7-xscln" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.474578 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p5fzv\" (UniqueName: \"kubernetes.io/projected/d1299562-0e0b-4dc5-916a-a72c2a79993d-kube-api-access-p5fzv\") pod \"openstack-baremetal-operator-controller-manager-5d9f9695db7tlww\" (UID: \"d1299562-0e0b-4dc5-916a-a72c2a79993d\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5d9f9695db7tlww" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.474896 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fkgxm\" (UniqueName: \"kubernetes.io/projected/4f041cb0-6235-484c-829e-3bf4b6a3e5e6-kube-api-access-fkgxm\") pod \"octavia-operator-controller-manager-64cdc6ff96-7928r\" (UID: \"4f041cb0-6235-484c-829e-3bf4b6a3e5e6\") " pod="openstack-operators/octavia-operator-controller-manager-64cdc6ff96-7928r" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.475036 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d1299562-0e0b-4dc5-916a-a72c2a79993d-cert\") pod \"openstack-baremetal-operator-controller-manager-5d9f9695db7tlww\" (UID: \"d1299562-0e0b-4dc5-916a-a72c2a79993d\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5d9f9695db7tlww" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.478240 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-6fdcddb789-q47ql" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.501577 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-57988cc5b5-kglvq"] Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.504574 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fkgxm\" (UniqueName: \"kubernetes.io/projected/4f041cb0-6235-484c-829e-3bf4b6a3e5e6-kube-api-access-fkgxm\") pod \"octavia-operator-controller-manager-64cdc6ff96-7928r\" (UID: \"4f041cb0-6235-484c-829e-3bf4b6a3e5e6\") " pod="openstack-operators/octavia-operator-controller-manager-64cdc6ff96-7928r" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.506462 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-5d9f9695db7tlww"] Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.506730 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-57988cc5b5-kglvq" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.510917 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-6lxqp" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.523066 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-57988cc5b5-kglvq"] Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.532876 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-2zx72" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.564035 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-d77b94747-zkgrb"] Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.565668 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-d77b94747-zkgrb" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.572517 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-6nnq7" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.575014 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-64cdc6ff96-7928r" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.576198 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d1299562-0e0b-4dc5-916a-a72c2a79993d-cert\") pod \"openstack-baremetal-operator-controller-manager-5d9f9695db7tlww\" (UID: \"d1299562-0e0b-4dc5-916a-a72c2a79993d\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5d9f9695db7tlww" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.576357 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p5fzv\" (UniqueName: \"kubernetes.io/projected/d1299562-0e0b-4dc5-916a-a72c2a79993d-kube-api-access-p5fzv\") pod \"openstack-baremetal-operator-controller-manager-5d9f9695db7tlww\" (UID: \"d1299562-0e0b-4dc5-916a-a72c2a79993d\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5d9f9695db7tlww" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.576968 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwd9b\" (UniqueName: \"kubernetes.io/projected/bd9bf1a7-8218-4ea2-b68a-4f5961835bf3-kube-api-access-lwd9b\") pod \"ovn-operator-controller-manager-56897c768d-2zcq7\" (UID: \"bd9bf1a7-8218-4ea2-b68a-4f5961835bf3\") " pod="openstack-operators/ovn-operator-controller-manager-56897c768d-2zcq7" Nov 27 07:11:45 crc kubenswrapper[4971]: E1127 07:11:45.576380 4971 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 27 07:11:45 crc kubenswrapper[4971]: E1127 07:11:45.577231 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d1299562-0e0b-4dc5-916a-a72c2a79993d-cert podName:d1299562-0e0b-4dc5-916a-a72c2a79993d nodeName:}" failed. No retries permitted until 2025-11-27 07:11:46.077207302 +0000 UTC m=+1144.269251210 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/d1299562-0e0b-4dc5-916a-a72c2a79993d-cert") pod "openstack-baremetal-operator-controller-manager-5d9f9695db7tlww" (UID: "d1299562-0e0b-4dc5-916a-a72c2a79993d") : secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.579437 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-p4brb"] Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.584870 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-p4brb" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.589336 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-hlwbb" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.603945 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p5fzv\" (UniqueName: \"kubernetes.io/projected/d1299562-0e0b-4dc5-916a-a72c2a79993d-kube-api-access-p5fzv\") pod \"openstack-baremetal-operator-controller-manager-5d9f9695db7tlww\" (UID: \"d1299562-0e0b-4dc5-916a-a72c2a79993d\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5d9f9695db7tlww" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.652044 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5cd6c7f4c8-jl2db"] Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.662921 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-d77b94747-zkgrb"] Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.663117 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5cd6c7f4c8-jl2db" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.666282 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5cd6c7f4c8-jl2db"] Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.667262 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-mjn2k" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.678667 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwd9b\" (UniqueName: \"kubernetes.io/projected/bd9bf1a7-8218-4ea2-b68a-4f5961835bf3-kube-api-access-lwd9b\") pod \"ovn-operator-controller-manager-56897c768d-2zcq7\" (UID: \"bd9bf1a7-8218-4ea2-b68a-4f5961835bf3\") " pod="openstack-operators/ovn-operator-controller-manager-56897c768d-2zcq7" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.678749 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sztg8\" (UniqueName: \"kubernetes.io/projected/5a3f31c6-891f-4fc3-9979-ada42facf791-kube-api-access-sztg8\") pod \"placement-operator-controller-manager-57988cc5b5-kglvq\" (UID: \"5a3f31c6-891f-4fc3-9979-ada42facf791\") " pod="openstack-operators/placement-operator-controller-manager-57988cc5b5-kglvq" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.678819 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtqm4\" (UniqueName: \"kubernetes.io/projected/13bda70c-c880-4a51-aee8-6de35bcf8a35-kube-api-access-xtqm4\") pod \"telemetry-operator-controller-manager-76cc84c6bb-p4brb\" (UID: \"13bda70c-c880-4a51-aee8-6de35bcf8a35\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-p4brb" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.678850 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfxbv\" (UniqueName: \"kubernetes.io/projected/8f49b4e8-d2ef-4a8b-8751-f480782f970f-kube-api-access-kfxbv\") pod \"swift-operator-controller-manager-d77b94747-zkgrb\" (UID: \"8f49b4e8-d2ef-4a8b-8751-f480782f970f\") " pod="openstack-operators/swift-operator-controller-manager-d77b94747-zkgrb" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.678885 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/eee345d4-5e02-4a96-a204-383fd410e564-cert\") pod \"infra-operator-controller-manager-57548d458d-zdjzp\" (UID: \"eee345d4-5e02-4a96-a204-383fd410e564\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-zdjzp" Nov 27 07:11:45 crc kubenswrapper[4971]: E1127 07:11:45.680708 4971 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Nov 27 07:11:45 crc kubenswrapper[4971]: E1127 07:11:45.680810 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/eee345d4-5e02-4a96-a204-383fd410e564-cert podName:eee345d4-5e02-4a96-a204-383fd410e564 nodeName:}" failed. No retries permitted until 2025-11-27 07:11:46.68078225 +0000 UTC m=+1144.872826328 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/eee345d4-5e02-4a96-a204-383fd410e564-cert") pod "infra-operator-controller-manager-57548d458d-zdjzp" (UID: "eee345d4-5e02-4a96-a204-383fd410e564") : secret "infra-operator-webhook-server-cert" not found Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.705729 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-p4brb"] Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.708706 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwd9b\" (UniqueName: \"kubernetes.io/projected/bd9bf1a7-8218-4ea2-b68a-4f5961835bf3-kube-api-access-lwd9b\") pod \"ovn-operator-controller-manager-56897c768d-2zcq7\" (UID: \"bd9bf1a7-8218-4ea2-b68a-4f5961835bf3\") " pod="openstack-operators/ovn-operator-controller-manager-56897c768d-2zcq7" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.725702 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-656dcb59d4-ctgpd"] Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.727407 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-656dcb59d4-ctgpd" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.734123 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-8sqms" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.735218 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-656dcb59d4-ctgpd"] Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.775244 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-56897c768d-2zcq7" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.779973 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sztg8\" (UniqueName: \"kubernetes.io/projected/5a3f31c6-891f-4fc3-9979-ada42facf791-kube-api-access-sztg8\") pod \"placement-operator-controller-manager-57988cc5b5-kglvq\" (UID: \"5a3f31c6-891f-4fc3-9979-ada42facf791\") " pod="openstack-operators/placement-operator-controller-manager-57988cc5b5-kglvq" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.780032 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mz87m\" (UniqueName: \"kubernetes.io/projected/defd6a4c-7400-4f6b-8258-1c70426bcb85-kube-api-access-mz87m\") pod \"test-operator-controller-manager-5cd6c7f4c8-jl2db\" (UID: \"defd6a4c-7400-4f6b-8258-1c70426bcb85\") " pod="openstack-operators/test-operator-controller-manager-5cd6c7f4c8-jl2db" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.780081 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtqm4\" (UniqueName: \"kubernetes.io/projected/13bda70c-c880-4a51-aee8-6de35bcf8a35-kube-api-access-xtqm4\") pod \"telemetry-operator-controller-manager-76cc84c6bb-p4brb\" (UID: \"13bda70c-c880-4a51-aee8-6de35bcf8a35\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-p4brb" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.780106 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfxbv\" (UniqueName: \"kubernetes.io/projected/8f49b4e8-d2ef-4a8b-8751-f480782f970f-kube-api-access-kfxbv\") pod \"swift-operator-controller-manager-d77b94747-zkgrb\" (UID: \"8f49b4e8-d2ef-4a8b-8751-f480782f970f\") " pod="openstack-operators/swift-operator-controller-manager-d77b94747-zkgrb" Nov 27 07:11:45 crc kubenswrapper[4971]: W1127 07:11:45.787021 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9be13ef4_b6e0_434f_aea4_a0b374facb97.slice/crio-ca9af26789836df55ecc31cdf4db7514356b2303343f95b378d6d30d37ed062d WatchSource:0}: Error finding container ca9af26789836df55ecc31cdf4db7514356b2303343f95b378d6d30d37ed062d: Status 404 returned error can't find the container with id ca9af26789836df55ecc31cdf4db7514356b2303343f95b378d6d30d37ed062d Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.787119 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-66f75ddbcc-fpsdk"] Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.789051 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-66f75ddbcc-fpsdk" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.797210 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-6jc65" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.797859 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.801819 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.803026 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfxbv\" (UniqueName: \"kubernetes.io/projected/8f49b4e8-d2ef-4a8b-8751-f480782f970f-kube-api-access-kfxbv\") pod \"swift-operator-controller-manager-d77b94747-zkgrb\" (UID: \"8f49b4e8-d2ef-4a8b-8751-f480782f970f\") " pod="openstack-operators/swift-operator-controller-manager-d77b94747-zkgrb" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.803279 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sztg8\" (UniqueName: \"kubernetes.io/projected/5a3f31c6-891f-4fc3-9979-ada42facf791-kube-api-access-sztg8\") pod \"placement-operator-controller-manager-57988cc5b5-kglvq\" (UID: \"5a3f31c6-891f-4fc3-9979-ada42facf791\") " pod="openstack-operators/placement-operator-controller-manager-57988cc5b5-kglvq" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.808045 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-66f75ddbcc-fpsdk"] Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.829170 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtqm4\" (UniqueName: \"kubernetes.io/projected/13bda70c-c880-4a51-aee8-6de35bcf8a35-kube-api-access-xtqm4\") pod \"telemetry-operator-controller-manager-76cc84c6bb-p4brb\" (UID: \"13bda70c-c880-4a51-aee8-6de35bcf8a35\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-p4brb" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.838979 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-57988cc5b5-kglvq" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.848665 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ssqrc"] Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.849994 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ssqrc" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.853985 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-wbnz4" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.863055 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ssqrc"] Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.881846 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j8rnl\" (UniqueName: \"kubernetes.io/projected/f6b1aca7-dda9-4f85-b305-ecc1f7e40c36-kube-api-access-j8rnl\") pod \"watcher-operator-controller-manager-656dcb59d4-ctgpd\" (UID: \"f6b1aca7-dda9-4f85-b305-ecc1f7e40c36\") " pod="openstack-operators/watcher-operator-controller-manager-656dcb59d4-ctgpd" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.881931 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4trpf\" (UniqueName: \"kubernetes.io/projected/03c0f055-04c8-42da-b1bf-a35ba598aeb3-kube-api-access-4trpf\") pod \"openstack-operator-controller-manager-66f75ddbcc-fpsdk\" (UID: \"03c0f055-04c8-42da-b1bf-a35ba598aeb3\") " pod="openstack-operators/openstack-operator-controller-manager-66f75ddbcc-fpsdk" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.881988 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/03c0f055-04c8-42da-b1bf-a35ba598aeb3-webhook-certs\") pod \"openstack-operator-controller-manager-66f75ddbcc-fpsdk\" (UID: \"03c0f055-04c8-42da-b1bf-a35ba598aeb3\") " pod="openstack-operators/openstack-operator-controller-manager-66f75ddbcc-fpsdk" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.882022 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/03c0f055-04c8-42da-b1bf-a35ba598aeb3-metrics-certs\") pod \"openstack-operator-controller-manager-66f75ddbcc-fpsdk\" (UID: \"03c0f055-04c8-42da-b1bf-a35ba598aeb3\") " pod="openstack-operators/openstack-operator-controller-manager-66f75ddbcc-fpsdk" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.882061 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mz87m\" (UniqueName: \"kubernetes.io/projected/defd6a4c-7400-4f6b-8258-1c70426bcb85-kube-api-access-mz87m\") pod \"test-operator-controller-manager-5cd6c7f4c8-jl2db\" (UID: \"defd6a4c-7400-4f6b-8258-1c70426bcb85\") " pod="openstack-operators/test-operator-controller-manager-5cd6c7f4c8-jl2db" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.909459 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mz87m\" (UniqueName: \"kubernetes.io/projected/defd6a4c-7400-4f6b-8258-1c70426bcb85-kube-api-access-mz87m\") pod \"test-operator-controller-manager-5cd6c7f4c8-jl2db\" (UID: \"defd6a4c-7400-4f6b-8258-1c70426bcb85\") " pod="openstack-operators/test-operator-controller-manager-5cd6c7f4c8-jl2db" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.910094 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-d77b94747-zkgrb" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.937478 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-p4brb" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.984072 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hnzzm\" (UniqueName: \"kubernetes.io/projected/e539f873-39f9-493b-be0b-af897894466e-kube-api-access-hnzzm\") pod \"rabbitmq-cluster-operator-manager-668c99d594-ssqrc\" (UID: \"e539f873-39f9-493b-be0b-af897894466e\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ssqrc" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.984133 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4trpf\" (UniqueName: \"kubernetes.io/projected/03c0f055-04c8-42da-b1bf-a35ba598aeb3-kube-api-access-4trpf\") pod \"openstack-operator-controller-manager-66f75ddbcc-fpsdk\" (UID: \"03c0f055-04c8-42da-b1bf-a35ba598aeb3\") " pod="openstack-operators/openstack-operator-controller-manager-66f75ddbcc-fpsdk" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.984180 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/03c0f055-04c8-42da-b1bf-a35ba598aeb3-webhook-certs\") pod \"openstack-operator-controller-manager-66f75ddbcc-fpsdk\" (UID: \"03c0f055-04c8-42da-b1bf-a35ba598aeb3\") " pod="openstack-operators/openstack-operator-controller-manager-66f75ddbcc-fpsdk" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.984209 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/03c0f055-04c8-42da-b1bf-a35ba598aeb3-metrics-certs\") pod \"openstack-operator-controller-manager-66f75ddbcc-fpsdk\" (UID: \"03c0f055-04c8-42da-b1bf-a35ba598aeb3\") " pod="openstack-operators/openstack-operator-controller-manager-66f75ddbcc-fpsdk" Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.984256 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j8rnl\" (UniqueName: \"kubernetes.io/projected/f6b1aca7-dda9-4f85-b305-ecc1f7e40c36-kube-api-access-j8rnl\") pod \"watcher-operator-controller-manager-656dcb59d4-ctgpd\" (UID: \"f6b1aca7-dda9-4f85-b305-ecc1f7e40c36\") " pod="openstack-operators/watcher-operator-controller-manager-656dcb59d4-ctgpd" Nov 27 07:11:45 crc kubenswrapper[4971]: E1127 07:11:45.985414 4971 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Nov 27 07:11:45 crc kubenswrapper[4971]: E1127 07:11:45.985457 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/03c0f055-04c8-42da-b1bf-a35ba598aeb3-webhook-certs podName:03c0f055-04c8-42da-b1bf-a35ba598aeb3 nodeName:}" failed. No retries permitted until 2025-11-27 07:11:46.485442415 +0000 UTC m=+1144.677486333 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/03c0f055-04c8-42da-b1bf-a35ba598aeb3-webhook-certs") pod "openstack-operator-controller-manager-66f75ddbcc-fpsdk" (UID: "03c0f055-04c8-42da-b1bf-a35ba598aeb3") : secret "webhook-server-cert" not found Nov 27 07:11:45 crc kubenswrapper[4971]: E1127 07:11:45.985660 4971 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Nov 27 07:11:45 crc kubenswrapper[4971]: E1127 07:11:45.985690 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/03c0f055-04c8-42da-b1bf-a35ba598aeb3-metrics-certs podName:03c0f055-04c8-42da-b1bf-a35ba598aeb3 nodeName:}" failed. No retries permitted until 2025-11-27 07:11:46.485682832 +0000 UTC m=+1144.677726750 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/03c0f055-04c8-42da-b1bf-a35ba598aeb3-metrics-certs") pod "openstack-operator-controller-manager-66f75ddbcc-fpsdk" (UID: "03c0f055-04c8-42da-b1bf-a35ba598aeb3") : secret "metrics-server-cert" not found Nov 27 07:11:45 crc kubenswrapper[4971]: I1127 07:11:45.990780 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7b64f4fb85-bp29n"] Nov 27 07:11:46 crc kubenswrapper[4971]: I1127 07:11:46.000861 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5cd6c7f4c8-jl2db" Nov 27 07:11:46 crc kubenswrapper[4971]: I1127 07:11:46.024097 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4trpf\" (UniqueName: \"kubernetes.io/projected/03c0f055-04c8-42da-b1bf-a35ba598aeb3-kube-api-access-4trpf\") pod \"openstack-operator-controller-manager-66f75ddbcc-fpsdk\" (UID: \"03c0f055-04c8-42da-b1bf-a35ba598aeb3\") " pod="openstack-operators/openstack-operator-controller-manager-66f75ddbcc-fpsdk" Nov 27 07:11:46 crc kubenswrapper[4971]: I1127 07:11:46.029419 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j8rnl\" (UniqueName: \"kubernetes.io/projected/f6b1aca7-dda9-4f85-b305-ecc1f7e40c36-kube-api-access-j8rnl\") pod \"watcher-operator-controller-manager-656dcb59d4-ctgpd\" (UID: \"f6b1aca7-dda9-4f85-b305-ecc1f7e40c36\") " pod="openstack-operators/watcher-operator-controller-manager-656dcb59d4-ctgpd" Nov 27 07:11:46 crc kubenswrapper[4971]: I1127 07:11:46.051699 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-589cbd6b5b-mg4bw"] Nov 27 07:11:46 crc kubenswrapper[4971]: I1127 07:11:46.069284 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-656dcb59d4-ctgpd" Nov 27 07:11:46 crc kubenswrapper[4971]: I1127 07:11:46.073833 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-6b7f75547b-p5tg5"] Nov 27 07:11:46 crc kubenswrapper[4971]: I1127 07:11:46.088388 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d1299562-0e0b-4dc5-916a-a72c2a79993d-cert\") pod \"openstack-baremetal-operator-controller-manager-5d9f9695db7tlww\" (UID: \"d1299562-0e0b-4dc5-916a-a72c2a79993d\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5d9f9695db7tlww" Nov 27 07:11:46 crc kubenswrapper[4971]: I1127 07:11:46.088475 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hnzzm\" (UniqueName: \"kubernetes.io/projected/e539f873-39f9-493b-be0b-af897894466e-kube-api-access-hnzzm\") pod \"rabbitmq-cluster-operator-manager-668c99d594-ssqrc\" (UID: \"e539f873-39f9-493b-be0b-af897894466e\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ssqrc" Nov 27 07:11:46 crc kubenswrapper[4971]: E1127 07:11:46.089039 4971 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 27 07:11:46 crc kubenswrapper[4971]: E1127 07:11:46.089091 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d1299562-0e0b-4dc5-916a-a72c2a79993d-cert podName:d1299562-0e0b-4dc5-916a-a72c2a79993d nodeName:}" failed. No retries permitted until 2025-11-27 07:11:47.089072355 +0000 UTC m=+1145.281116273 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/d1299562-0e0b-4dc5-916a-a72c2a79993d-cert") pod "openstack-baremetal-operator-controller-manager-5d9f9695db7tlww" (UID: "d1299562-0e0b-4dc5-916a-a72c2a79993d") : secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 27 07:11:46 crc kubenswrapper[4971]: I1127 07:11:46.094694 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5b77f656f-5m76m"] Nov 27 07:11:46 crc kubenswrapper[4971]: I1127 07:11:46.121645 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hnzzm\" (UniqueName: \"kubernetes.io/projected/e539f873-39f9-493b-be0b-af897894466e-kube-api-access-hnzzm\") pod \"rabbitmq-cluster-operator-manager-668c99d594-ssqrc\" (UID: \"e539f873-39f9-493b-be0b-af897894466e\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ssqrc" Nov 27 07:11:46 crc kubenswrapper[4971]: I1127 07:11:46.131619 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ssqrc" Nov 27 07:11:46 crc kubenswrapper[4971]: I1127 07:11:46.148244 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7b4567c7cf-gn2tw"] Nov 27 07:11:46 crc kubenswrapper[4971]: I1127 07:11:46.163380 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5d494799bf-q7w5w"] Nov 27 07:11:46 crc kubenswrapper[4971]: I1127 07:11:46.203853 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-589cbd6b5b-mg4bw" event={"ID":"e9ec811b-a5ea-4068-abe7-fdfbdaba39b3","Type":"ContainerStarted","Data":"4192a944bf76552af193d2e9f2eb0b667b8e086003c335135eb6641ecb3407f4"} Nov 27 07:11:46 crc kubenswrapper[4971]: I1127 07:11:46.216460 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-6b7f75547b-p5tg5" event={"ID":"f9f71029-8412-4967-acf0-ad2d2e7c31f4","Type":"ContainerStarted","Data":"2c3dc848f6c457e38a02eae51952b617071fdc6d1fb512d50fdea17af07be6b6"} Nov 27 07:11:46 crc kubenswrapper[4971]: I1127 07:11:46.222711 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7b64f4fb85-bp29n" event={"ID":"9be13ef4-b6e0-434f-aea4-a0b374facb97","Type":"ContainerStarted","Data":"ca9af26789836df55ecc31cdf4db7514356b2303343f95b378d6d30d37ed062d"} Nov 27 07:11:46 crc kubenswrapper[4971]: I1127 07:11:46.225328 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5b77f656f-5m76m" event={"ID":"2e4648d4-38e3-4e9b-a68d-7db912f67470","Type":"ContainerStarted","Data":"a7b197ef5e1ad43b2adcf8a29c9ca4e682de682ac5e4e97e5ee7d9b2b92de296"} Nov 27 07:11:46 crc kubenswrapper[4971]: W1127 07:11:46.233776 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6488635b_4233_4898_b540_8c7433b25d0f.slice/crio-fe1d8cda279129cb77790fb450e4dffbe835fd7f847c06daf28114870260fb80 WatchSource:0}: Error finding container fe1d8cda279129cb77790fb450e4dffbe835fd7f847c06daf28114870260fb80: Status 404 returned error can't find the container with id fe1d8cda279129cb77790fb450e4dffbe835fd7f847c06daf28114870260fb80 Nov 27 07:11:46 crc kubenswrapper[4971]: I1127 07:11:46.242420 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-5d499bf58b-t8rr8"] Nov 27 07:11:46 crc kubenswrapper[4971]: I1127 07:11:46.273606 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-955677c94-rm5cw"] Nov 27 07:11:46 crc kubenswrapper[4971]: I1127 07:11:46.464665 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-6fdcddb789-q47ql"] Nov 27 07:11:46 crc kubenswrapper[4971]: I1127 07:11:46.491433 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-79556f57fc-2zx72"] Nov 27 07:11:46 crc kubenswrapper[4971]: I1127 07:11:46.500472 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/03c0f055-04c8-42da-b1bf-a35ba598aeb3-webhook-certs\") pod \"openstack-operator-controller-manager-66f75ddbcc-fpsdk\" (UID: \"03c0f055-04c8-42da-b1bf-a35ba598aeb3\") " pod="openstack-operators/openstack-operator-controller-manager-66f75ddbcc-fpsdk" Nov 27 07:11:46 crc kubenswrapper[4971]: I1127 07:11:46.500566 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/03c0f055-04c8-42da-b1bf-a35ba598aeb3-metrics-certs\") pod \"openstack-operator-controller-manager-66f75ddbcc-fpsdk\" (UID: \"03c0f055-04c8-42da-b1bf-a35ba598aeb3\") " pod="openstack-operators/openstack-operator-controller-manager-66f75ddbcc-fpsdk" Nov 27 07:11:46 crc kubenswrapper[4971]: E1127 07:11:46.500812 4971 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Nov 27 07:11:46 crc kubenswrapper[4971]: E1127 07:11:46.500874 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/03c0f055-04c8-42da-b1bf-a35ba598aeb3-metrics-certs podName:03c0f055-04c8-42da-b1bf-a35ba598aeb3 nodeName:}" failed. No retries permitted until 2025-11-27 07:11:47.500855401 +0000 UTC m=+1145.692899319 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/03c0f055-04c8-42da-b1bf-a35ba598aeb3-metrics-certs") pod "openstack-operator-controller-manager-66f75ddbcc-fpsdk" (UID: "03c0f055-04c8-42da-b1bf-a35ba598aeb3") : secret "metrics-server-cert" not found Nov 27 07:11:46 crc kubenswrapper[4971]: E1127 07:11:46.501103 4971 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Nov 27 07:11:46 crc kubenswrapper[4971]: E1127 07:11:46.501186 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/03c0f055-04c8-42da-b1bf-a35ba598aeb3-webhook-certs podName:03c0f055-04c8-42da-b1bf-a35ba598aeb3 nodeName:}" failed. No retries permitted until 2025-11-27 07:11:47.50116324 +0000 UTC m=+1145.693207218 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/03c0f055-04c8-42da-b1bf-a35ba598aeb3-webhook-certs") pod "openstack-operator-controller-manager-66f75ddbcc-fpsdk" (UID: "03c0f055-04c8-42da-b1bf-a35ba598aeb3") : secret "webhook-server-cert" not found Nov 27 07:11:46 crc kubenswrapper[4971]: I1127 07:11:46.512413 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-67cb4dc6d4-7c6m7"] Nov 27 07:11:46 crc kubenswrapper[4971]: I1127 07:11:46.520697 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-66f4dd4bc7-xscln"] Nov 27 07:11:46 crc kubenswrapper[4971]: I1127 07:11:46.587478 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-64cdc6ff96-7928r"] Nov 27 07:11:46 crc kubenswrapper[4971]: W1127 07:11:46.589049 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4f041cb0_6235_484c_829e_3bf4b6a3e5e6.slice/crio-30c09ac7ae4f7699e8a271eca36cfd0d25d02b094d50b61f0f275561fcfb701a WatchSource:0}: Error finding container 30c09ac7ae4f7699e8a271eca36cfd0d25d02b094d50b61f0f275561fcfb701a: Status 404 returned error can't find the container with id 30c09ac7ae4f7699e8a271eca36cfd0d25d02b094d50b61f0f275561fcfb701a Nov 27 07:11:46 crc kubenswrapper[4971]: I1127 07:11:46.644608 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-56897c768d-2zcq7"] Nov 27 07:11:46 crc kubenswrapper[4971]: I1127 07:11:46.650759 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-d77b94747-zkgrb"] Nov 27 07:11:46 crc kubenswrapper[4971]: E1127 07:11:46.654725 4971 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-xtqm4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-p4brb_openstack-operators(13bda70c-c880-4a51-aee8-6de35bcf8a35): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 27 07:11:46 crc kubenswrapper[4971]: E1127 07:11:46.656044 4971 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:bbb543d2d67c73e5df5d6357c3251363eb34a99575c5bf10416edd45dbdae2f6,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lwd9b,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-56897c768d-2zcq7_openstack-operators(bd9bf1a7-8218-4ea2-b68a-4f5961835bf3): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 27 07:11:46 crc kubenswrapper[4971]: I1127 07:11:46.657751 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-p4brb"] Nov 27 07:11:46 crc kubenswrapper[4971]: E1127 07:11:46.660194 4971 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-xtqm4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-p4brb_openstack-operators(13bda70c-c880-4a51-aee8-6de35bcf8a35): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 27 07:11:46 crc kubenswrapper[4971]: E1127 07:11:46.660194 4971 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lwd9b,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-56897c768d-2zcq7_openstack-operators(bd9bf1a7-8218-4ea2-b68a-4f5961835bf3): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 27 07:11:46 crc kubenswrapper[4971]: E1127 07:11:46.661360 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-p4brb" podUID="13bda70c-c880-4a51-aee8-6de35bcf8a35" Nov 27 07:11:46 crc kubenswrapper[4971]: E1127 07:11:46.661338 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/ovn-operator-controller-manager-56897c768d-2zcq7" podUID="bd9bf1a7-8218-4ea2-b68a-4f5961835bf3" Nov 27 07:11:46 crc kubenswrapper[4971]: I1127 07:11:46.704241 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/eee345d4-5e02-4a96-a204-383fd410e564-cert\") pod \"infra-operator-controller-manager-57548d458d-zdjzp\" (UID: \"eee345d4-5e02-4a96-a204-383fd410e564\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-zdjzp" Nov 27 07:11:46 crc kubenswrapper[4971]: E1127 07:11:46.704523 4971 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Nov 27 07:11:46 crc kubenswrapper[4971]: E1127 07:11:46.704646 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/eee345d4-5e02-4a96-a204-383fd410e564-cert podName:eee345d4-5e02-4a96-a204-383fd410e564 nodeName:}" failed. No retries permitted until 2025-11-27 07:11:48.704618057 +0000 UTC m=+1146.896662055 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/eee345d4-5e02-4a96-a204-383fd410e564-cert") pod "infra-operator-controller-manager-57548d458d-zdjzp" (UID: "eee345d4-5e02-4a96-a204-383fd410e564") : secret "infra-operator-webhook-server-cert" not found Nov 27 07:11:46 crc kubenswrapper[4971]: I1127 07:11:46.769692 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5cd6c7f4c8-jl2db"] Nov 27 07:11:46 crc kubenswrapper[4971]: I1127 07:11:46.782473 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-57988cc5b5-kglvq"] Nov 27 07:11:46 crc kubenswrapper[4971]: W1127 07:11:46.786433 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddefd6a4c_7400_4f6b_8258_1c70426bcb85.slice/crio-2179289395cfb0ac53191c94eea526bf6a38f860481d994a23f481014ceb300d WatchSource:0}: Error finding container 2179289395cfb0ac53191c94eea526bf6a38f860481d994a23f481014ceb300d: Status 404 returned error can't find the container with id 2179289395cfb0ac53191c94eea526bf6a38f860481d994a23f481014ceb300d Nov 27 07:11:46 crc kubenswrapper[4971]: W1127 07:11:46.788656 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5a3f31c6_891f_4fc3_9979_ada42facf791.slice/crio-b29b92f7539495a2b5b644106e0c63bc96fa9c43d168504679a9c3ca37babb5a WatchSource:0}: Error finding container b29b92f7539495a2b5b644106e0c63bc96fa9c43d168504679a9c3ca37babb5a: Status 404 returned error can't find the container with id b29b92f7539495a2b5b644106e0c63bc96fa9c43d168504679a9c3ca37babb5a Nov 27 07:11:46 crc kubenswrapper[4971]: E1127 07:11:46.792090 4971 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:210517b918e30df1c95fc7d961c8e57e9a9d1cc2b9fe7eb4dad2034dd53a90aa,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mz87m,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5cd6c7f4c8-jl2db_openstack-operators(defd6a4c-7400-4f6b-8258-1c70426bcb85): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 27 07:11:46 crc kubenswrapper[4971]: E1127 07:11:46.794600 4971 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mz87m,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5cd6c7f4c8-jl2db_openstack-operators(defd6a4c-7400-4f6b-8258-1c70426bcb85): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 27 07:11:46 crc kubenswrapper[4971]: E1127 07:11:46.801174 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/test-operator-controller-manager-5cd6c7f4c8-jl2db" podUID="defd6a4c-7400-4f6b-8258-1c70426bcb85" Nov 27 07:11:46 crc kubenswrapper[4971]: E1127 07:11:46.814139 4971 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:225958f250a1075b69439d776a13acc45c78695c21abda23600fb53ca1640423,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-sztg8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-57988cc5b5-kglvq_openstack-operators(5a3f31c6-891f-4fc3-9979-ada42facf791): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 27 07:11:46 crc kubenswrapper[4971]: E1127 07:11:46.823214 4971 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-sztg8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-57988cc5b5-kglvq_openstack-operators(5a3f31c6-891f-4fc3-9979-ada42facf791): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 27 07:11:46 crc kubenswrapper[4971]: E1127 07:11:46.825242 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/placement-operator-controller-manager-57988cc5b5-kglvq" podUID="5a3f31c6-891f-4fc3-9979-ada42facf791" Nov 27 07:11:46 crc kubenswrapper[4971]: I1127 07:11:46.861261 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-656dcb59d4-ctgpd"] Nov 27 07:11:46 crc kubenswrapper[4971]: I1127 07:11:46.869751 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ssqrc"] Nov 27 07:11:46 crc kubenswrapper[4971]: E1127 07:11:46.875088 4971 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:6bed55b172b9ee8ccc3952cbfc543d8bd44e2690f6db94348a754152fd78f4cf,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-j8rnl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-656dcb59d4-ctgpd_openstack-operators(f6b1aca7-dda9-4f85-b305-ecc1f7e40c36): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 27 07:11:46 crc kubenswrapper[4971]: E1127 07:11:46.877912 4971 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-j8rnl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-656dcb59d4-ctgpd_openstack-operators(f6b1aca7-dda9-4f85-b305-ecc1f7e40c36): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 27 07:11:46 crc kubenswrapper[4971]: E1127 07:11:46.879107 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/watcher-operator-controller-manager-656dcb59d4-ctgpd" podUID="f6b1aca7-dda9-4f85-b305-ecc1f7e40c36" Nov 27 07:11:47 crc kubenswrapper[4971]: I1127 07:11:47.115438 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d1299562-0e0b-4dc5-916a-a72c2a79993d-cert\") pod \"openstack-baremetal-operator-controller-manager-5d9f9695db7tlww\" (UID: \"d1299562-0e0b-4dc5-916a-a72c2a79993d\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5d9f9695db7tlww" Nov 27 07:11:47 crc kubenswrapper[4971]: E1127 07:11:47.115662 4971 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 27 07:11:47 crc kubenswrapper[4971]: E1127 07:11:47.115733 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d1299562-0e0b-4dc5-916a-a72c2a79993d-cert podName:d1299562-0e0b-4dc5-916a-a72c2a79993d nodeName:}" failed. No retries permitted until 2025-11-27 07:11:49.115712783 +0000 UTC m=+1147.307756701 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/d1299562-0e0b-4dc5-916a-a72c2a79993d-cert") pod "openstack-baremetal-operator-controller-manager-5d9f9695db7tlww" (UID: "d1299562-0e0b-4dc5-916a-a72c2a79993d") : secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 27 07:11:47 crc kubenswrapper[4971]: I1127 07:11:47.243890 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-57988cc5b5-kglvq" event={"ID":"5a3f31c6-891f-4fc3-9979-ada42facf791","Type":"ContainerStarted","Data":"b29b92f7539495a2b5b644106e0c63bc96fa9c43d168504679a9c3ca37babb5a"} Nov 27 07:11:47 crc kubenswrapper[4971]: I1127 07:11:47.248727 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-656dcb59d4-ctgpd" event={"ID":"f6b1aca7-dda9-4f85-b305-ecc1f7e40c36","Type":"ContainerStarted","Data":"f28274687428b3fb60cb5dbed8be1c695318cdfcac26586cade5db4ea9433bf1"} Nov 27 07:11:47 crc kubenswrapper[4971]: E1127 07:11:47.257561 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:225958f250a1075b69439d776a13acc45c78695c21abda23600fb53ca1640423\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/placement-operator-controller-manager-57988cc5b5-kglvq" podUID="5a3f31c6-891f-4fc3-9979-ada42facf791" Nov 27 07:11:47 crc kubenswrapper[4971]: E1127 07:11:47.258255 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:6bed55b172b9ee8ccc3952cbfc543d8bd44e2690f6db94348a754152fd78f4cf\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-656dcb59d4-ctgpd" podUID="f6b1aca7-dda9-4f85-b305-ecc1f7e40c36" Nov 27 07:11:47 crc kubenswrapper[4971]: I1127 07:11:47.259646 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-2zx72" event={"ID":"f3763945-b889-4f5b-bd9c-2f5bfa93e6d6","Type":"ContainerStarted","Data":"e73d3415af2960d0e3d7d92d915bcb7bc3eb3794f9f4e60b2c079d4a11a0d55b"} Nov 27 07:11:47 crc kubenswrapper[4971]: I1127 07:11:47.269046 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-d77b94747-zkgrb" event={"ID":"8f49b4e8-d2ef-4a8b-8751-f480782f970f","Type":"ContainerStarted","Data":"9ee9060a82483267b1cf9711518d84fcff197d3818c86c8f55e19c5304a3af99"} Nov 27 07:11:47 crc kubenswrapper[4971]: I1127 07:11:47.273408 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-955677c94-rm5cw" event={"ID":"d4125c07-9503-4a93-a635-5863fdf26632","Type":"ContainerStarted","Data":"745c1c7a0915d5efda739034fdf217863f1f85e8a2d94ae75d0ab3d64bf970cd"} Nov 27 07:11:47 crc kubenswrapper[4971]: I1127 07:11:47.281429 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-67cb4dc6d4-7c6m7" event={"ID":"bc2fe762-04b6-4c5a-bf42-ce67dd839609","Type":"ContainerStarted","Data":"7a2673518f085c9a4d97bd0933ec45604255b1cd7740a479178347ee1d267eb3"} Nov 27 07:11:47 crc kubenswrapper[4971]: I1127 07:11:47.292903 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-56897c768d-2zcq7" event={"ID":"bd9bf1a7-8218-4ea2-b68a-4f5961835bf3","Type":"ContainerStarted","Data":"ccde319bf971b6a35dd062be4a52cb8233272feec1cb97ddeb1e64ba79b11f6d"} Nov 27 07:11:47 crc kubenswrapper[4971]: I1127 07:11:47.295355 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7b4567c7cf-gn2tw" event={"ID":"6488635b-4233-4898-b540-8c7433b25d0f","Type":"ContainerStarted","Data":"fe1d8cda279129cb77790fb450e4dffbe835fd7f847c06daf28114870260fb80"} Nov 27 07:11:47 crc kubenswrapper[4971]: E1127 07:11:47.295829 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:bbb543d2d67c73e5df5d6357c3251363eb34a99575c5bf10416edd45dbdae2f6\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/ovn-operator-controller-manager-56897c768d-2zcq7" podUID="bd9bf1a7-8218-4ea2-b68a-4f5961835bf3" Nov 27 07:11:47 crc kubenswrapper[4971]: I1127 07:11:47.299290 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-5d494799bf-q7w5w" event={"ID":"784d6231-82a4-4f58-8de7-0a3f6378e2b0","Type":"ContainerStarted","Data":"a01de9d43b54210d8fe6f556148efcf045a6783a381e20c225566021b09ff4c0"} Nov 27 07:11:47 crc kubenswrapper[4971]: I1127 07:11:47.302645 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5d499bf58b-t8rr8" event={"ID":"a5e429ee-33ed-40b1-878c-c4cf36ffd5ff","Type":"ContainerStarted","Data":"e8253d1db1bf6ba67a6f9617a1baffdba5ad1124e7bd1c80db969c6d8fddeced"} Nov 27 07:11:47 crc kubenswrapper[4971]: I1127 07:11:47.304304 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ssqrc" event={"ID":"e539f873-39f9-493b-be0b-af897894466e","Type":"ContainerStarted","Data":"c0975a6ba6a3c2d31f6ee30805428f2637b1d3430691a59bb7c010cf882841b9"} Nov 27 07:11:47 crc kubenswrapper[4971]: I1127 07:11:47.306377 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-66f4dd4bc7-xscln" event={"ID":"ccd00428-c0de-472f-a9a3-19ec6524d4c8","Type":"ContainerStarted","Data":"b29b03882359f8bbed5bcdefca9a575f8b18e0d56095a32df46d4fd26f537e34"} Nov 27 07:11:47 crc kubenswrapper[4971]: I1127 07:11:47.308257 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5cd6c7f4c8-jl2db" event={"ID":"defd6a4c-7400-4f6b-8258-1c70426bcb85","Type":"ContainerStarted","Data":"2179289395cfb0ac53191c94eea526bf6a38f860481d994a23f481014ceb300d"} Nov 27 07:11:47 crc kubenswrapper[4971]: E1127 07:11:47.311852 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:210517b918e30df1c95fc7d961c8e57e9a9d1cc2b9fe7eb4dad2034dd53a90aa\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5cd6c7f4c8-jl2db" podUID="defd6a4c-7400-4f6b-8258-1c70426bcb85" Nov 27 07:11:47 crc kubenswrapper[4971]: I1127 07:11:47.319988 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-6fdcddb789-q47ql" event={"ID":"8e16feb0-cfc3-43ba-9411-00788a5c42dd","Type":"ContainerStarted","Data":"60a997c11dcc9d103267d821985125e9680d41eb622e20a6cd0424d09666b32b"} Nov 27 07:11:47 crc kubenswrapper[4971]: I1127 07:11:47.339248 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-p4brb" event={"ID":"13bda70c-c880-4a51-aee8-6de35bcf8a35","Type":"ContainerStarted","Data":"cf96b44dc89ca5af9a1759b55cb21ea75e22952cf7777a7aa863845080dbab25"} Nov 27 07:11:47 crc kubenswrapper[4971]: E1127 07:11:47.345573 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-p4brb" podUID="13bda70c-c880-4a51-aee8-6de35bcf8a35" Nov 27 07:11:47 crc kubenswrapper[4971]: I1127 07:11:47.345815 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-64cdc6ff96-7928r" event={"ID":"4f041cb0-6235-484c-829e-3bf4b6a3e5e6","Type":"ContainerStarted","Data":"30c09ac7ae4f7699e8a271eca36cfd0d25d02b094d50b61f0f275561fcfb701a"} Nov 27 07:11:47 crc kubenswrapper[4971]: I1127 07:11:47.528342 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/03c0f055-04c8-42da-b1bf-a35ba598aeb3-webhook-certs\") pod \"openstack-operator-controller-manager-66f75ddbcc-fpsdk\" (UID: \"03c0f055-04c8-42da-b1bf-a35ba598aeb3\") " pod="openstack-operators/openstack-operator-controller-manager-66f75ddbcc-fpsdk" Nov 27 07:11:47 crc kubenswrapper[4971]: I1127 07:11:47.528405 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/03c0f055-04c8-42da-b1bf-a35ba598aeb3-metrics-certs\") pod \"openstack-operator-controller-manager-66f75ddbcc-fpsdk\" (UID: \"03c0f055-04c8-42da-b1bf-a35ba598aeb3\") " pod="openstack-operators/openstack-operator-controller-manager-66f75ddbcc-fpsdk" Nov 27 07:11:47 crc kubenswrapper[4971]: E1127 07:11:47.528561 4971 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Nov 27 07:11:47 crc kubenswrapper[4971]: E1127 07:11:47.528644 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/03c0f055-04c8-42da-b1bf-a35ba598aeb3-webhook-certs podName:03c0f055-04c8-42da-b1bf-a35ba598aeb3 nodeName:}" failed. No retries permitted until 2025-11-27 07:11:49.528625094 +0000 UTC m=+1147.720669002 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/03c0f055-04c8-42da-b1bf-a35ba598aeb3-webhook-certs") pod "openstack-operator-controller-manager-66f75ddbcc-fpsdk" (UID: "03c0f055-04c8-42da-b1bf-a35ba598aeb3") : secret "webhook-server-cert" not found Nov 27 07:11:47 crc kubenswrapper[4971]: E1127 07:11:47.528654 4971 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Nov 27 07:11:47 crc kubenswrapper[4971]: E1127 07:11:47.528736 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/03c0f055-04c8-42da-b1bf-a35ba598aeb3-metrics-certs podName:03c0f055-04c8-42da-b1bf-a35ba598aeb3 nodeName:}" failed. No retries permitted until 2025-11-27 07:11:49.528719347 +0000 UTC m=+1147.720763265 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/03c0f055-04c8-42da-b1bf-a35ba598aeb3-metrics-certs") pod "openstack-operator-controller-manager-66f75ddbcc-fpsdk" (UID: "03c0f055-04c8-42da-b1bf-a35ba598aeb3") : secret "metrics-server-cert" not found Nov 27 07:11:48 crc kubenswrapper[4971]: E1127 07:11:48.360029 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-p4brb" podUID="13bda70c-c880-4a51-aee8-6de35bcf8a35" Nov 27 07:11:48 crc kubenswrapper[4971]: E1127 07:11:48.368867 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:bbb543d2d67c73e5df5d6357c3251363eb34a99575c5bf10416edd45dbdae2f6\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/ovn-operator-controller-manager-56897c768d-2zcq7" podUID="bd9bf1a7-8218-4ea2-b68a-4f5961835bf3" Nov 27 07:11:48 crc kubenswrapper[4971]: E1127 07:11:48.368875 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:210517b918e30df1c95fc7d961c8e57e9a9d1cc2b9fe7eb4dad2034dd53a90aa\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5cd6c7f4c8-jl2db" podUID="defd6a4c-7400-4f6b-8258-1c70426bcb85" Nov 27 07:11:48 crc kubenswrapper[4971]: E1127 07:11:48.368977 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:6bed55b172b9ee8ccc3952cbfc543d8bd44e2690f6db94348a754152fd78f4cf\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-656dcb59d4-ctgpd" podUID="f6b1aca7-dda9-4f85-b305-ecc1f7e40c36" Nov 27 07:11:48 crc kubenswrapper[4971]: E1127 07:11:48.368997 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:225958f250a1075b69439d776a13acc45c78695c21abda23600fb53ca1640423\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/placement-operator-controller-manager-57988cc5b5-kglvq" podUID="5a3f31c6-891f-4fc3-9979-ada42facf791" Nov 27 07:11:48 crc kubenswrapper[4971]: I1127 07:11:48.757638 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/eee345d4-5e02-4a96-a204-383fd410e564-cert\") pod \"infra-operator-controller-manager-57548d458d-zdjzp\" (UID: \"eee345d4-5e02-4a96-a204-383fd410e564\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-zdjzp" Nov 27 07:11:48 crc kubenswrapper[4971]: E1127 07:11:48.757789 4971 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Nov 27 07:11:48 crc kubenswrapper[4971]: E1127 07:11:48.757888 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/eee345d4-5e02-4a96-a204-383fd410e564-cert podName:eee345d4-5e02-4a96-a204-383fd410e564 nodeName:}" failed. No retries permitted until 2025-11-27 07:11:52.757867825 +0000 UTC m=+1150.949911743 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/eee345d4-5e02-4a96-a204-383fd410e564-cert") pod "infra-operator-controller-manager-57548d458d-zdjzp" (UID: "eee345d4-5e02-4a96-a204-383fd410e564") : secret "infra-operator-webhook-server-cert" not found Nov 27 07:11:49 crc kubenswrapper[4971]: I1127 07:11:49.165970 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d1299562-0e0b-4dc5-916a-a72c2a79993d-cert\") pod \"openstack-baremetal-operator-controller-manager-5d9f9695db7tlww\" (UID: \"d1299562-0e0b-4dc5-916a-a72c2a79993d\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5d9f9695db7tlww" Nov 27 07:11:49 crc kubenswrapper[4971]: E1127 07:11:49.166279 4971 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 27 07:11:49 crc kubenswrapper[4971]: E1127 07:11:49.166549 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d1299562-0e0b-4dc5-916a-a72c2a79993d-cert podName:d1299562-0e0b-4dc5-916a-a72c2a79993d nodeName:}" failed. No retries permitted until 2025-11-27 07:11:53.166490279 +0000 UTC m=+1151.358534197 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/d1299562-0e0b-4dc5-916a-a72c2a79993d-cert") pod "openstack-baremetal-operator-controller-manager-5d9f9695db7tlww" (UID: "d1299562-0e0b-4dc5-916a-a72c2a79993d") : secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 27 07:11:49 crc kubenswrapper[4971]: I1127 07:11:49.572744 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/03c0f055-04c8-42da-b1bf-a35ba598aeb3-webhook-certs\") pod \"openstack-operator-controller-manager-66f75ddbcc-fpsdk\" (UID: \"03c0f055-04c8-42da-b1bf-a35ba598aeb3\") " pod="openstack-operators/openstack-operator-controller-manager-66f75ddbcc-fpsdk" Nov 27 07:11:49 crc kubenswrapper[4971]: I1127 07:11:49.572818 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/03c0f055-04c8-42da-b1bf-a35ba598aeb3-metrics-certs\") pod \"openstack-operator-controller-manager-66f75ddbcc-fpsdk\" (UID: \"03c0f055-04c8-42da-b1bf-a35ba598aeb3\") " pod="openstack-operators/openstack-operator-controller-manager-66f75ddbcc-fpsdk" Nov 27 07:11:49 crc kubenswrapper[4971]: E1127 07:11:49.573066 4971 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Nov 27 07:11:49 crc kubenswrapper[4971]: E1127 07:11:49.573133 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/03c0f055-04c8-42da-b1bf-a35ba598aeb3-metrics-certs podName:03c0f055-04c8-42da-b1bf-a35ba598aeb3 nodeName:}" failed. No retries permitted until 2025-11-27 07:11:53.573115324 +0000 UTC m=+1151.765159242 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/03c0f055-04c8-42da-b1bf-a35ba598aeb3-metrics-certs") pod "openstack-operator-controller-manager-66f75ddbcc-fpsdk" (UID: "03c0f055-04c8-42da-b1bf-a35ba598aeb3") : secret "metrics-server-cert" not found Nov 27 07:11:49 crc kubenswrapper[4971]: E1127 07:11:49.573429 4971 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Nov 27 07:11:49 crc kubenswrapper[4971]: E1127 07:11:49.574285 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/03c0f055-04c8-42da-b1bf-a35ba598aeb3-webhook-certs podName:03c0f055-04c8-42da-b1bf-a35ba598aeb3 nodeName:}" failed. No retries permitted until 2025-11-27 07:11:53.573462954 +0000 UTC m=+1151.765506872 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/03c0f055-04c8-42da-b1bf-a35ba598aeb3-webhook-certs") pod "openstack-operator-controller-manager-66f75ddbcc-fpsdk" (UID: "03c0f055-04c8-42da-b1bf-a35ba598aeb3") : secret "webhook-server-cert" not found Nov 27 07:11:52 crc kubenswrapper[4971]: I1127 07:11:52.831086 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/eee345d4-5e02-4a96-a204-383fd410e564-cert\") pod \"infra-operator-controller-manager-57548d458d-zdjzp\" (UID: \"eee345d4-5e02-4a96-a204-383fd410e564\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-zdjzp" Nov 27 07:11:52 crc kubenswrapper[4971]: E1127 07:11:52.832259 4971 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Nov 27 07:11:52 crc kubenswrapper[4971]: E1127 07:11:52.832357 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/eee345d4-5e02-4a96-a204-383fd410e564-cert podName:eee345d4-5e02-4a96-a204-383fd410e564 nodeName:}" failed. No retries permitted until 2025-11-27 07:12:00.832331558 +0000 UTC m=+1159.024375476 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/eee345d4-5e02-4a96-a204-383fd410e564-cert") pod "infra-operator-controller-manager-57548d458d-zdjzp" (UID: "eee345d4-5e02-4a96-a204-383fd410e564") : secret "infra-operator-webhook-server-cert" not found Nov 27 07:11:53 crc kubenswrapper[4971]: I1127 07:11:53.239760 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d1299562-0e0b-4dc5-916a-a72c2a79993d-cert\") pod \"openstack-baremetal-operator-controller-manager-5d9f9695db7tlww\" (UID: \"d1299562-0e0b-4dc5-916a-a72c2a79993d\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5d9f9695db7tlww" Nov 27 07:11:53 crc kubenswrapper[4971]: E1127 07:11:53.240063 4971 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 27 07:11:53 crc kubenswrapper[4971]: E1127 07:11:53.240199 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d1299562-0e0b-4dc5-916a-a72c2a79993d-cert podName:d1299562-0e0b-4dc5-916a-a72c2a79993d nodeName:}" failed. No retries permitted until 2025-11-27 07:12:01.240163569 +0000 UTC m=+1159.432207497 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/d1299562-0e0b-4dc5-916a-a72c2a79993d-cert") pod "openstack-baremetal-operator-controller-manager-5d9f9695db7tlww" (UID: "d1299562-0e0b-4dc5-916a-a72c2a79993d") : secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 27 07:11:53 crc kubenswrapper[4971]: I1127 07:11:53.646280 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/03c0f055-04c8-42da-b1bf-a35ba598aeb3-webhook-certs\") pod \"openstack-operator-controller-manager-66f75ddbcc-fpsdk\" (UID: \"03c0f055-04c8-42da-b1bf-a35ba598aeb3\") " pod="openstack-operators/openstack-operator-controller-manager-66f75ddbcc-fpsdk" Nov 27 07:11:53 crc kubenswrapper[4971]: I1127 07:11:53.646365 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/03c0f055-04c8-42da-b1bf-a35ba598aeb3-metrics-certs\") pod \"openstack-operator-controller-manager-66f75ddbcc-fpsdk\" (UID: \"03c0f055-04c8-42da-b1bf-a35ba598aeb3\") " pod="openstack-operators/openstack-operator-controller-manager-66f75ddbcc-fpsdk" Nov 27 07:11:53 crc kubenswrapper[4971]: E1127 07:11:53.646568 4971 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Nov 27 07:11:53 crc kubenswrapper[4971]: E1127 07:11:53.646584 4971 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Nov 27 07:11:53 crc kubenswrapper[4971]: E1127 07:11:53.646705 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/03c0f055-04c8-42da-b1bf-a35ba598aeb3-webhook-certs podName:03c0f055-04c8-42da-b1bf-a35ba598aeb3 nodeName:}" failed. No retries permitted until 2025-11-27 07:12:01.64667845 +0000 UTC m=+1159.838722418 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/03c0f055-04c8-42da-b1bf-a35ba598aeb3-webhook-certs") pod "openstack-operator-controller-manager-66f75ddbcc-fpsdk" (UID: "03c0f055-04c8-42da-b1bf-a35ba598aeb3") : secret "webhook-server-cert" not found Nov 27 07:11:53 crc kubenswrapper[4971]: E1127 07:11:53.646789 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/03c0f055-04c8-42da-b1bf-a35ba598aeb3-metrics-certs podName:03c0f055-04c8-42da-b1bf-a35ba598aeb3 nodeName:}" failed. No retries permitted until 2025-11-27 07:12:01.646771653 +0000 UTC m=+1159.838815641 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/03c0f055-04c8-42da-b1bf-a35ba598aeb3-metrics-certs") pod "openstack-operator-controller-manager-66f75ddbcc-fpsdk" (UID: "03c0f055-04c8-42da-b1bf-a35ba598aeb3") : secret "metrics-server-cert" not found Nov 27 07:11:56 crc kubenswrapper[4971]: I1127 07:11:56.413037 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 07:11:56 crc kubenswrapper[4971]: I1127 07:11:56.413466 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 07:11:59 crc kubenswrapper[4971]: I1127 07:11:59.552648 4971 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 27 07:12:00 crc kubenswrapper[4971]: E1127 07:12:00.739333 4971 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:25faa5b0e4801d4d3b01a28b877ed3188eee71f33ad66f3c2e86b7921758e711" Nov 27 07:12:00 crc kubenswrapper[4971]: E1127 07:12:00.739789 4971 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:25faa5b0e4801d4d3b01a28b877ed3188eee71f33ad66f3c2e86b7921758e711,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-czrx2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7b4567c7cf-gn2tw_openstack-operators(6488635b-4233-4898-b540-8c7433b25d0f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 27 07:12:00 crc kubenswrapper[4971]: I1127 07:12:00.904499 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/eee345d4-5e02-4a96-a204-383fd410e564-cert\") pod \"infra-operator-controller-manager-57548d458d-zdjzp\" (UID: \"eee345d4-5e02-4a96-a204-383fd410e564\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-zdjzp" Nov 27 07:12:00 crc kubenswrapper[4971]: I1127 07:12:00.941963 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/eee345d4-5e02-4a96-a204-383fd410e564-cert\") pod \"infra-operator-controller-manager-57548d458d-zdjzp\" (UID: \"eee345d4-5e02-4a96-a204-383fd410e564\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-zdjzp" Nov 27 07:12:01 crc kubenswrapper[4971]: I1127 07:12:01.122730 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-zdjzp" Nov 27 07:12:01 crc kubenswrapper[4971]: I1127 07:12:01.311200 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d1299562-0e0b-4dc5-916a-a72c2a79993d-cert\") pod \"openstack-baremetal-operator-controller-manager-5d9f9695db7tlww\" (UID: \"d1299562-0e0b-4dc5-916a-a72c2a79993d\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5d9f9695db7tlww" Nov 27 07:12:01 crc kubenswrapper[4971]: I1127 07:12:01.325506 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d1299562-0e0b-4dc5-916a-a72c2a79993d-cert\") pod \"openstack-baremetal-operator-controller-manager-5d9f9695db7tlww\" (UID: \"d1299562-0e0b-4dc5-916a-a72c2a79993d\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5d9f9695db7tlww" Nov 27 07:12:01 crc kubenswrapper[4971]: I1127 07:12:01.343392 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5d9f9695db7tlww" Nov 27 07:12:01 crc kubenswrapper[4971]: I1127 07:12:01.577771 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-6b7f75547b-p5tg5" event={"ID":"f9f71029-8412-4967-acf0-ad2d2e7c31f4","Type":"ContainerStarted","Data":"a1f2f59dd155c9b36f58341260cce7c95bbe60dfe0a1089cf900c7211ecf435a"} Nov 27 07:12:01 crc kubenswrapper[4971]: I1127 07:12:01.587017 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-d77b94747-zkgrb" event={"ID":"8f49b4e8-d2ef-4a8b-8751-f480782f970f","Type":"ContainerStarted","Data":"22d583cdc1c8610ea2b1ce6b4e5b2c52e9c87604f664ad87d65977ec2010c751"} Nov 27 07:12:01 crc kubenswrapper[4971]: I1127 07:12:01.623852 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-66f4dd4bc7-xscln" event={"ID":"ccd00428-c0de-472f-a9a3-19ec6524d4c8","Type":"ContainerStarted","Data":"1e2f75757e7ec216a33b3d63c76f12708a27ad3db3bed568be719f9bc8e90479"} Nov 27 07:12:01 crc kubenswrapper[4971]: I1127 07:12:01.632652 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-6fdcddb789-q47ql" event={"ID":"8e16feb0-cfc3-43ba-9411-00788a5c42dd","Type":"ContainerStarted","Data":"953646b4ddf67008021133033274c95bb3298322634138228514ac0863e000c3"} Nov 27 07:12:01 crc kubenswrapper[4971]: E1127 07:12:01.709086 4971 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4nsnm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-operator-controller-manager-7b64f4fb85-bp29n_openstack-operators(9be13ef4-b6e0-434f-aea4-a0b374facb97): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 27 07:12:01 crc kubenswrapper[4971]: E1127 07:12:01.712572 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/barbican-operator-controller-manager-7b64f4fb85-bp29n" podUID="9be13ef4-b6e0-434f-aea4-a0b374facb97" Nov 27 07:12:01 crc kubenswrapper[4971]: I1127 07:12:01.728858 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/03c0f055-04c8-42da-b1bf-a35ba598aeb3-webhook-certs\") pod \"openstack-operator-controller-manager-66f75ddbcc-fpsdk\" (UID: \"03c0f055-04c8-42da-b1bf-a35ba598aeb3\") " pod="openstack-operators/openstack-operator-controller-manager-66f75ddbcc-fpsdk" Nov 27 07:12:01 crc kubenswrapper[4971]: I1127 07:12:01.728911 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/03c0f055-04c8-42da-b1bf-a35ba598aeb3-metrics-certs\") pod \"openstack-operator-controller-manager-66f75ddbcc-fpsdk\" (UID: \"03c0f055-04c8-42da-b1bf-a35ba598aeb3\") " pod="openstack-operators/openstack-operator-controller-manager-66f75ddbcc-fpsdk" Nov 27 07:12:01 crc kubenswrapper[4971]: I1127 07:12:01.758865 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/03c0f055-04c8-42da-b1bf-a35ba598aeb3-metrics-certs\") pod \"openstack-operator-controller-manager-66f75ddbcc-fpsdk\" (UID: \"03c0f055-04c8-42da-b1bf-a35ba598aeb3\") " pod="openstack-operators/openstack-operator-controller-manager-66f75ddbcc-fpsdk" Nov 27 07:12:01 crc kubenswrapper[4971]: I1127 07:12:01.759128 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/03c0f055-04c8-42da-b1bf-a35ba598aeb3-webhook-certs\") pod \"openstack-operator-controller-manager-66f75ddbcc-fpsdk\" (UID: \"03c0f055-04c8-42da-b1bf-a35ba598aeb3\") " pod="openstack-operators/openstack-operator-controller-manager-66f75ddbcc-fpsdk" Nov 27 07:12:01 crc kubenswrapper[4971]: I1127 07:12:01.895846 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-zdjzp"] Nov 27 07:12:02 crc kubenswrapper[4971]: I1127 07:12:02.025916 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-66f75ddbcc-fpsdk" Nov 27 07:12:02 crc kubenswrapper[4971]: I1127 07:12:02.249121 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-5d9f9695db7tlww"] Nov 27 07:12:02 crc kubenswrapper[4971]: I1127 07:12:02.647772 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7b64f4fb85-bp29n" event={"ID":"9be13ef4-b6e0-434f-aea4-a0b374facb97","Type":"ContainerStarted","Data":"0869fe9ea7e4f4203d4e059477c51f14dd5999133ef7af35ef927922907514d3"} Nov 27 07:12:02 crc kubenswrapper[4971]: I1127 07:12:02.648980 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7b64f4fb85-bp29n" Nov 27 07:12:02 crc kubenswrapper[4971]: E1127 07:12:02.658104 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/barbican-operator-controller-manager-7b64f4fb85-bp29n" podUID="9be13ef4-b6e0-434f-aea4-a0b374facb97" Nov 27 07:12:02 crc kubenswrapper[4971]: I1127 07:12:02.659999 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-zdjzp" event={"ID":"eee345d4-5e02-4a96-a204-383fd410e564","Type":"ContainerStarted","Data":"db6c8a6119a61bad41e35b7e470670d500f71e3e504cd86de955317fd2e531ca"} Nov 27 07:12:02 crc kubenswrapper[4971]: I1127 07:12:02.668091 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-64cdc6ff96-7928r" event={"ID":"4f041cb0-6235-484c-829e-3bf4b6a3e5e6","Type":"ContainerStarted","Data":"685ac2dc27d0aa5ccb45e722697429c62d9fde777b25586f9ac2bfaed4ab8c28"} Nov 27 07:12:02 crc kubenswrapper[4971]: I1127 07:12:02.686267 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-589cbd6b5b-mg4bw" event={"ID":"e9ec811b-a5ea-4068-abe7-fdfbdaba39b3","Type":"ContainerStarted","Data":"eb750e72a4a9c3569d7cfb8942f65ce99acd2a912712057630b72841f3d76467"} Nov 27 07:12:02 crc kubenswrapper[4971]: I1127 07:12:02.689110 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5d499bf58b-t8rr8" event={"ID":"a5e429ee-33ed-40b1-878c-c4cf36ffd5ff","Type":"ContainerStarted","Data":"cbd32a751db4e7a72f50334b496e4812a356a85ea0731ec80f75cf5682ce42b7"} Nov 27 07:12:02 crc kubenswrapper[4971]: I1127 07:12:02.693041 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-955677c94-rm5cw" event={"ID":"d4125c07-9503-4a93-a635-5863fdf26632","Type":"ContainerStarted","Data":"ac3a64d25365ba5ff99420bd78ec8d2b9a9ffeb97a2a1a1ae54a61dec0a8f8f6"} Nov 27 07:12:02 crc kubenswrapper[4971]: I1127 07:12:02.712377 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5b77f656f-5m76m" event={"ID":"2e4648d4-38e3-4e9b-a68d-7db912f67470","Type":"ContainerStarted","Data":"a9ecc6a1a650520176a9553f741121745fb8b4698f1f73936065446b19dae5f3"} Nov 27 07:12:02 crc kubenswrapper[4971]: I1127 07:12:02.715501 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-67cb4dc6d4-7c6m7" event={"ID":"bc2fe762-04b6-4c5a-bf42-ce67dd839609","Type":"ContainerStarted","Data":"5723ff5c1ecb5573329bc68c9b3bfe14aaeb3e18fb5b11725467fc4a5dd8302c"} Nov 27 07:12:02 crc kubenswrapper[4971]: I1127 07:12:02.716832 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-5d494799bf-q7w5w" event={"ID":"784d6231-82a4-4f58-8de7-0a3f6378e2b0","Type":"ContainerStarted","Data":"7b9e53bf84317b4037caef3b4989a90988dc79d3be20b46046f3c8219c2ec28d"} Nov 27 07:12:02 crc kubenswrapper[4971]: I1127 07:12:02.720825 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ssqrc" event={"ID":"e539f873-39f9-493b-be0b-af897894466e","Type":"ContainerStarted","Data":"234e9a8e565ccd4716b1e25f0800312e9d46fa3717c3957f59b2d0e67a271fcd"} Nov 27 07:12:02 crc kubenswrapper[4971]: I1127 07:12:02.722803 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-2zx72" event={"ID":"f3763945-b889-4f5b-bd9c-2f5bfa93e6d6","Type":"ContainerStarted","Data":"31206fda27489f541b6b15eed0e4d59f44460f5d81b0618ab4fb9120ed58d70e"} Nov 27 07:12:02 crc kubenswrapper[4971]: I1127 07:12:02.741665 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ssqrc" podStartSLOduration=3.725097511 podStartE2EDuration="17.741639436s" podCreationTimestamp="2025-11-27 07:11:45 +0000 UTC" firstStartedPulling="2025-11-27 07:11:46.865240093 +0000 UTC m=+1145.057284011" lastFinishedPulling="2025-11-27 07:12:00.881782018 +0000 UTC m=+1159.073825936" observedRunningTime="2025-11-27 07:12:02.737101562 +0000 UTC m=+1160.929145480" watchObservedRunningTime="2025-11-27 07:12:02.741639436 +0000 UTC m=+1160.933683354" Nov 27 07:12:03 crc kubenswrapper[4971]: I1127 07:12:03.766803 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5d9f9695db7tlww" event={"ID":"d1299562-0e0b-4dc5-916a-a72c2a79993d","Type":"ContainerStarted","Data":"eb22f89764f1047f0f67b50b180ca1c1ccc4fd84dfa1278ff83b86a1a8d11fab"} Nov 27 07:12:03 crc kubenswrapper[4971]: E1127 07:12:03.769451 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/barbican-operator-controller-manager-7b64f4fb85-bp29n" podUID="9be13ef4-b6e0-434f-aea4-a0b374facb97" Nov 27 07:12:12 crc kubenswrapper[4971]: I1127 07:12:12.208035 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-66f75ddbcc-fpsdk"] Nov 27 07:12:12 crc kubenswrapper[4971]: W1127 07:12:12.525593 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod03c0f055_04c8_42da_b1bf_a35ba598aeb3.slice/crio-7f38a5cde3fe86d2fe951fff4dcf0c13eafe1a9cdc979a9786886f581c52ac60 WatchSource:0}: Error finding container 7f38a5cde3fe86d2fe951fff4dcf0c13eafe1a9cdc979a9786886f581c52ac60: Status 404 returned error can't find the container with id 7f38a5cde3fe86d2fe951fff4dcf0c13eafe1a9cdc979a9786886f581c52ac60 Nov 27 07:12:12 crc kubenswrapper[4971]: I1127 07:12:12.865873 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-66f75ddbcc-fpsdk" event={"ID":"03c0f055-04c8-42da-b1bf-a35ba598aeb3","Type":"ContainerStarted","Data":"7f38a5cde3fe86d2fe951fff4dcf0c13eafe1a9cdc979a9786886f581c52ac60"} Nov 27 07:12:13 crc kubenswrapper[4971]: E1127 07:12:13.129228 4971 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385" Nov 27 07:12:13 crc kubenswrapper[4971]: E1127 07:12:13.129482 4971 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-xtqm4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-p4brb_openstack-operators(13bda70c-c880-4a51-aee8-6de35bcf8a35): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 27 07:12:14 crc kubenswrapper[4971]: I1127 07:12:14.942184 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7b64f4fb85-bp29n" Nov 27 07:12:16 crc kubenswrapper[4971]: E1127 07:12:16.929026 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-7b4567c7cf-gn2tw" podUID="6488635b-4233-4898-b540-8c7433b25d0f" Nov 27 07:12:16 crc kubenswrapper[4971]: E1127 07:12:16.966246 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-p4brb" podUID="13bda70c-c880-4a51-aee8-6de35bcf8a35" Nov 27 07:12:16 crc kubenswrapper[4971]: I1127 07:12:16.977514 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7b64f4fb85-bp29n" event={"ID":"9be13ef4-b6e0-434f-aea4-a0b374facb97","Type":"ContainerStarted","Data":"9a656b1a82a5af297665bb807bfa7a67e0704f5dd2dec1ac900546ea52052966"} Nov 27 07:12:16 crc kubenswrapper[4971]: I1127 07:12:16.982140 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5d9f9695db7tlww" event={"ID":"d1299562-0e0b-4dc5-916a-a72c2a79993d","Type":"ContainerStarted","Data":"8c8a6f01ae993ca4849484df0c896460a74f364b24b811169bcf82165302cb1c"} Nov 27 07:12:16 crc kubenswrapper[4971]: I1127 07:12:16.999408 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-57988cc5b5-kglvq" event={"ID":"5a3f31c6-891f-4fc3-9979-ada42facf791","Type":"ContainerStarted","Data":"41ecfb07cc88fc8cb9076835d6c5528176a2c52bf325fc01d694a8279c432c94"} Nov 27 07:12:17 crc kubenswrapper[4971]: I1127 07:12:17.005480 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-656dcb59d4-ctgpd" event={"ID":"f6b1aca7-dda9-4f85-b305-ecc1f7e40c36","Type":"ContainerStarted","Data":"13d269f25a9c973f3817d341a2745ffa33479cd1487b61d15d6ea70714421456"} Nov 27 07:12:17 crc kubenswrapper[4971]: I1127 07:12:17.032007 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7b64f4fb85-bp29n" podStartSLOduration=2.4782016049999998 podStartE2EDuration="33.031981448s" podCreationTimestamp="2025-11-27 07:11:44 +0000 UTC" firstStartedPulling="2025-11-27 07:11:45.843515939 +0000 UTC m=+1144.035559857" lastFinishedPulling="2025-11-27 07:12:16.397295782 +0000 UTC m=+1174.589339700" observedRunningTime="2025-11-27 07:12:17.022983563 +0000 UTC m=+1175.215027481" watchObservedRunningTime="2025-11-27 07:12:17.031981448 +0000 UTC m=+1175.224025366" Nov 27 07:12:17 crc kubenswrapper[4971]: I1127 07:12:17.042233 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5cd6c7f4c8-jl2db" event={"ID":"defd6a4c-7400-4f6b-8258-1c70426bcb85","Type":"ContainerStarted","Data":"a7442873dbdd71e0ae1157177d1f24766c479d134d04eff048f64803db87d3b6"} Nov 27 07:12:17 crc kubenswrapper[4971]: I1127 07:12:17.046689 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-67cb4dc6d4-7c6m7" Nov 27 07:12:17 crc kubenswrapper[4971]: I1127 07:12:17.062128 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-67cb4dc6d4-7c6m7" Nov 27 07:12:17 crc kubenswrapper[4971]: I1127 07:12:17.076504 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-5d494799bf-q7w5w" event={"ID":"784d6231-82a4-4f58-8de7-0a3f6378e2b0","Type":"ContainerStarted","Data":"89b67ec810ad6994538d974d9fb1b12eec8686ffee34233871007ad82e6d0d94"} Nov 27 07:12:17 crc kubenswrapper[4971]: I1127 07:12:17.077771 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-5d494799bf-q7w5w" Nov 27 07:12:17 crc kubenswrapper[4971]: I1127 07:12:17.090905 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-5d494799bf-q7w5w" Nov 27 07:12:17 crc kubenswrapper[4971]: I1127 07:12:17.102130 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-56897c768d-2zcq7" event={"ID":"bd9bf1a7-8218-4ea2-b68a-4f5961835bf3","Type":"ContainerStarted","Data":"b18bf80854be9c0793a0a611c52a7cb68cc9779a69e780111c342122296acfac"} Nov 27 07:12:17 crc kubenswrapper[4971]: I1127 07:12:17.115900 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-zdjzp" event={"ID":"eee345d4-5e02-4a96-a204-383fd410e564","Type":"ContainerStarted","Data":"f39d3b0fdd6e2bed5bf0bfdc9e79eb22f33c0e45ef7ff574898a3f3475dcecc9"} Nov 27 07:12:17 crc kubenswrapper[4971]: I1127 07:12:17.132164 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-67cb4dc6d4-7c6m7" podStartSLOduration=3.202153787 podStartE2EDuration="33.132145455s" podCreationTimestamp="2025-11-27 07:11:44 +0000 UTC" firstStartedPulling="2025-11-27 07:11:46.522601491 +0000 UTC m=+1144.714645409" lastFinishedPulling="2025-11-27 07:12:16.452593159 +0000 UTC m=+1174.644637077" observedRunningTime="2025-11-27 07:12:17.124065408 +0000 UTC m=+1175.316109326" watchObservedRunningTime="2025-11-27 07:12:17.132145455 +0000 UTC m=+1175.324189373" Nov 27 07:12:17 crc kubenswrapper[4971]: I1127 07:12:17.154336 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-66f75ddbcc-fpsdk" event={"ID":"03c0f055-04c8-42da-b1bf-a35ba598aeb3","Type":"ContainerStarted","Data":"eea5df763004be4d92b145635f691c2736cacb6178c77a681307b79bc136ef4d"} Nov 27 07:12:17 crc kubenswrapper[4971]: I1127 07:12:17.155208 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-66f75ddbcc-fpsdk" Nov 27 07:12:17 crc kubenswrapper[4971]: I1127 07:12:17.175948 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-5d494799bf-q7w5w" podStartSLOduration=2.94972388 podStartE2EDuration="33.175919284s" podCreationTimestamp="2025-11-27 07:11:44 +0000 UTC" firstStartedPulling="2025-11-27 07:11:46.19086484 +0000 UTC m=+1144.382908758" lastFinishedPulling="2025-11-27 07:12:16.417060234 +0000 UTC m=+1174.609104162" observedRunningTime="2025-11-27 07:12:17.17511052 +0000 UTC m=+1175.367154428" watchObservedRunningTime="2025-11-27 07:12:17.175919284 +0000 UTC m=+1175.367963202" Nov 27 07:12:17 crc kubenswrapper[4971]: I1127 07:12:17.211589 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-6b7f75547b-p5tg5" event={"ID":"f9f71029-8412-4967-acf0-ad2d2e7c31f4","Type":"ContainerStarted","Data":"64a44a41c60ddede60d7b00612c07a9a56c2ec8d042ec4a72b1bbaa1e3bbafac"} Nov 27 07:12:17 crc kubenswrapper[4971]: I1127 07:12:17.217479 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-6b7f75547b-p5tg5" Nov 27 07:12:17 crc kubenswrapper[4971]: I1127 07:12:17.249738 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-6b7f75547b-p5tg5" Nov 27 07:12:17 crc kubenswrapper[4971]: I1127 07:12:17.360437 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-66f75ddbcc-fpsdk" podStartSLOduration=32.360409692 podStartE2EDuration="32.360409692s" podCreationTimestamp="2025-11-27 07:11:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:12:17.3372155 +0000 UTC m=+1175.529259428" watchObservedRunningTime="2025-11-27 07:12:17.360409692 +0000 UTC m=+1175.552453610" Nov 27 07:12:17 crc kubenswrapper[4971]: I1127 07:12:17.478633 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-6b7f75547b-p5tg5" podStartSLOduration=3.195195024 podStartE2EDuration="33.47858085s" podCreationTimestamp="2025-11-27 07:11:44 +0000 UTC" firstStartedPulling="2025-11-27 07:11:46.109464835 +0000 UTC m=+1144.301508753" lastFinishedPulling="2025-11-27 07:12:16.392850661 +0000 UTC m=+1174.584894579" observedRunningTime="2025-11-27 07:12:17.464000081 +0000 UTC m=+1175.656043999" watchObservedRunningTime="2025-11-27 07:12:17.47858085 +0000 UTC m=+1175.670624768" Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.253674 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5d499bf58b-t8rr8" event={"ID":"a5e429ee-33ed-40b1-878c-c4cf36ffd5ff","Type":"ContainerStarted","Data":"89eebe351e2e92ac6d65fa60287a5c7ae0f9e475d57e55b5182240ac085d2144"} Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.254093 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-5d499bf58b-t8rr8" Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.256027 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-67cb4dc6d4-7c6m7" event={"ID":"bc2fe762-04b6-4c5a-bf42-ce67dd839609","Type":"ContainerStarted","Data":"8e64fda22eb6eb1ef6fef23e54f5812cb9b1641ef85b08a2fdbc60a2d900af96"} Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.257559 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-56897c768d-2zcq7" event={"ID":"bd9bf1a7-8218-4ea2-b68a-4f5961835bf3","Type":"ContainerStarted","Data":"64c7b8a10343a65fb036e4675b115cf30249760b326cab1eef8002024c031d35"} Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.257682 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-56897c768d-2zcq7" Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.259033 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-p4brb" event={"ID":"13bda70c-c880-4a51-aee8-6de35bcf8a35","Type":"ContainerStarted","Data":"bfd72566ebf1711a7697b2a4ff582c94139172bc578089857ee8ba4b419c19d7"} Nov 27 07:12:18 crc kubenswrapper[4971]: E1127 07:12:18.260447 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-p4brb" podUID="13bda70c-c880-4a51-aee8-6de35bcf8a35" Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.262119 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5d9f9695db7tlww" event={"ID":"d1299562-0e0b-4dc5-916a-a72c2a79993d","Type":"ContainerStarted","Data":"f1e76989c7324c99fb2c32c94873e53ac434c130ba1f2959b664cc8ead229d81"} Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.262705 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5d9f9695db7tlww" Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.268836 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-589cbd6b5b-mg4bw" event={"ID":"e9ec811b-a5ea-4068-abe7-fdfbdaba39b3","Type":"ContainerStarted","Data":"82117d3c7f96e07c598bea9460962f3a97f52e5f6af6709a440c695eb89f758b"} Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.269082 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-589cbd6b5b-mg4bw" Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.271149 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7b4567c7cf-gn2tw" event={"ID":"6488635b-4233-4898-b540-8c7433b25d0f","Type":"ContainerStarted","Data":"0f944f7ced7b01a7549f008a882c5c0d69aa5e54feff6dcc3d79be6da687ba7f"} Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.273190 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-5d499bf58b-t8rr8" Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.281807 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-589cbd6b5b-mg4bw" Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.284060 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-57988cc5b5-kglvq" event={"ID":"5a3f31c6-891f-4fc3-9979-ada42facf791","Type":"ContainerStarted","Data":"07bd545a1ee574df144f4863dc019f13fe7fdf1819c54c4abd13abdf6860df56"} Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.284176 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-57988cc5b5-kglvq" Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.284676 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-5d499bf58b-t8rr8" podStartSLOduration=3.656475526 podStartE2EDuration="34.284650899s" podCreationTimestamp="2025-11-27 07:11:44 +0000 UTC" firstStartedPulling="2025-11-27 07:11:46.295189909 +0000 UTC m=+1144.487233827" lastFinishedPulling="2025-11-27 07:12:16.923365282 +0000 UTC m=+1175.115409200" observedRunningTime="2025-11-27 07:12:18.281987101 +0000 UTC m=+1176.474031039" watchObservedRunningTime="2025-11-27 07:12:18.284650899 +0000 UTC m=+1176.476694817" Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.289493 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-66f4dd4bc7-xscln" event={"ID":"ccd00428-c0de-472f-a9a3-19ec6524d4c8","Type":"ContainerStarted","Data":"8b9514b5ed8fa791db4f01e93f0aae25dc55e7fb79cd709137f35a2b849c3ec2"} Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.289690 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-66f4dd4bc7-xscln" Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.292226 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-955677c94-rm5cw" event={"ID":"d4125c07-9503-4a93-a635-5863fdf26632","Type":"ContainerStarted","Data":"43564a2004c5150a72a5af59d12c4266816cd9718f37b32f76973c033c56ea66"} Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.292690 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-955677c94-rm5cw" Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.297606 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-66f4dd4bc7-xscln" Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.297801 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5cd6c7f4c8-jl2db" event={"ID":"defd6a4c-7400-4f6b-8258-1c70426bcb85","Type":"ContainerStarted","Data":"6f0f67a6ef4831d1b33d8afbb2185c6c5d9db64d728973e6a919b18b736b7333"} Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.297962 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5cd6c7f4c8-jl2db" Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.301116 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-zdjzp" event={"ID":"eee345d4-5e02-4a96-a204-383fd410e564","Type":"ContainerStarted","Data":"4278e25744ee70a17e9fc6e8fc18b40acde7fac21a9125f4a6f09251d42320b0"} Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.301274 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-57548d458d-zdjzp" Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.303400 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-64cdc6ff96-7928r" event={"ID":"4f041cb0-6235-484c-829e-3bf4b6a3e5e6","Type":"ContainerStarted","Data":"77c071426a960ab928b7778f70b02ffa2ab5add0819e9a6c62723f7ef76f42ba"} Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.303712 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-64cdc6ff96-7928r" Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.304470 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-955677c94-rm5cw" Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.306650 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-64cdc6ff96-7928r" Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.306940 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-656dcb59d4-ctgpd" event={"ID":"f6b1aca7-dda9-4f85-b305-ecc1f7e40c36","Type":"ContainerStarted","Data":"189a609dc6693561ffe114292c7916c731afcc304aaa2adceb9ef4c24a3ec090"} Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.307042 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-656dcb59d4-ctgpd" Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.316892 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-56897c768d-2zcq7" podStartSLOduration=5.302830171 podStartE2EDuration="34.316865647s" podCreationTimestamp="2025-11-27 07:11:44 +0000 UTC" firstStartedPulling="2025-11-27 07:11:46.655838271 +0000 UTC m=+1144.847882189" lastFinishedPulling="2025-11-27 07:12:15.669873747 +0000 UTC m=+1173.861917665" observedRunningTime="2025-11-27 07:12:18.310907682 +0000 UTC m=+1176.502951600" watchObservedRunningTime="2025-11-27 07:12:18.316865647 +0000 UTC m=+1176.508909575" Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.324483 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-d77b94747-zkgrb" event={"ID":"8f49b4e8-d2ef-4a8b-8751-f480782f970f","Type":"ContainerStarted","Data":"1ec14abd23fccfb7ebc4c78c745b1ddf24cbe04d37b869ee3f42743fd5c2cf45"} Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.325374 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-d77b94747-zkgrb" Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.329108 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-d77b94747-zkgrb" Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.330900 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-6fdcddb789-q47ql" event={"ID":"8e16feb0-cfc3-43ba-9411-00788a5c42dd","Type":"ContainerStarted","Data":"f3f5ada088f92b89258b0cbf944e431a312a87bb6f16d9d93331db52263baf8f"} Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.331170 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-6fdcddb789-q47ql" Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.333230 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-6fdcddb789-q47ql" Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.333597 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-2zx72" event={"ID":"f3763945-b889-4f5b-bd9c-2f5bfa93e6d6","Type":"ContainerStarted","Data":"4b2cf6961cafd006230fc0dc034a8b1de6cb0a1a531596685f8201f1601be12a"} Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.334112 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-2zx72" Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.336132 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-2zx72" Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.336805 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5b77f656f-5m76m" event={"ID":"2e4648d4-38e3-4e9b-a68d-7db912f67470","Type":"ContainerStarted","Data":"a9594547d976da7e56084bd0af88009e3609a9960be88b7a4e975b1b89e2ff37"} Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.357998 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5d9f9695db7tlww" podStartSLOduration=21.252450029 podStartE2EDuration="34.357967837s" podCreationTimestamp="2025-11-27 07:11:44 +0000 UTC" firstStartedPulling="2025-11-27 07:12:03.170867736 +0000 UTC m=+1161.362911654" lastFinishedPulling="2025-11-27 07:12:16.276385544 +0000 UTC m=+1174.468429462" observedRunningTime="2025-11-27 07:12:18.342132061 +0000 UTC m=+1176.534175979" watchObservedRunningTime="2025-11-27 07:12:18.357967837 +0000 UTC m=+1176.550011755" Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.491450 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-589cbd6b5b-mg4bw" podStartSLOduration=3.202145167 podStartE2EDuration="34.491430573s" podCreationTimestamp="2025-11-27 07:11:44 +0000 UTC" firstStartedPulling="2025-11-27 07:11:45.985762365 +0000 UTC m=+1144.177806283" lastFinishedPulling="2025-11-27 07:12:17.275047781 +0000 UTC m=+1175.467091689" observedRunningTime="2025-11-27 07:12:18.490152595 +0000 UTC m=+1176.682196543" watchObservedRunningTime="2025-11-27 07:12:18.491430573 +0000 UTC m=+1176.683474491" Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.541778 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5b77f656f-5m76m" podStartSLOduration=4.091782665 podStartE2EDuration="34.541744413s" podCreationTimestamp="2025-11-27 07:11:44 +0000 UTC" firstStartedPulling="2025-11-27 07:11:46.152057108 +0000 UTC m=+1144.344101026" lastFinishedPulling="2025-11-27 07:12:16.602018856 +0000 UTC m=+1174.794062774" observedRunningTime="2025-11-27 07:12:18.521699024 +0000 UTC m=+1176.713742942" watchObservedRunningTime="2025-11-27 07:12:18.541744413 +0000 UTC m=+1176.733788321" Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.578856 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-57988cc5b5-kglvq" podStartSLOduration=4.145402199 podStartE2EDuration="33.578833315s" podCreationTimestamp="2025-11-27 07:11:45 +0000 UTC" firstStartedPulling="2025-11-27 07:11:46.813359687 +0000 UTC m=+1145.005403615" lastFinishedPulling="2025-11-27 07:12:16.246790813 +0000 UTC m=+1174.438834731" observedRunningTime="2025-11-27 07:12:18.560181346 +0000 UTC m=+1176.752225284" watchObservedRunningTime="2025-11-27 07:12:18.578833315 +0000 UTC m=+1176.770877233" Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.619582 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-6fdcddb789-q47ql" podStartSLOduration=4.193334663 podStartE2EDuration="34.619557733s" podCreationTimestamp="2025-11-27 07:11:44 +0000 UTC" firstStartedPulling="2025-11-27 07:11:46.489871898 +0000 UTC m=+1144.681915816" lastFinishedPulling="2025-11-27 07:12:16.916094968 +0000 UTC m=+1175.108138886" observedRunningTime="2025-11-27 07:12:18.589238581 +0000 UTC m=+1176.781282509" watchObservedRunningTime="2025-11-27 07:12:18.619557733 +0000 UTC m=+1176.811601661" Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.621502 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-656dcb59d4-ctgpd" podStartSLOduration=4.225020101 podStartE2EDuration="33.62149571s" podCreationTimestamp="2025-11-27 07:11:45 +0000 UTC" firstStartedPulling="2025-11-27 07:11:46.874893437 +0000 UTC m=+1145.066937355" lastFinishedPulling="2025-11-27 07:12:16.271369046 +0000 UTC m=+1174.463412964" observedRunningTime="2025-11-27 07:12:18.618189823 +0000 UTC m=+1176.810233741" watchObservedRunningTime="2025-11-27 07:12:18.62149571 +0000 UTC m=+1176.813539628" Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.684643 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-66f4dd4bc7-xscln" podStartSLOduration=4.591735766 podStartE2EDuration="34.684617178s" podCreationTimestamp="2025-11-27 07:11:44 +0000 UTC" firstStartedPulling="2025-11-27 07:11:46.504032164 +0000 UTC m=+1144.696076082" lastFinishedPulling="2025-11-27 07:12:16.596913576 +0000 UTC m=+1174.788957494" observedRunningTime="2025-11-27 07:12:18.652967466 +0000 UTC m=+1176.845011414" watchObservedRunningTime="2025-11-27 07:12:18.684617178 +0000 UTC m=+1176.876661096" Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.690664 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5cd6c7f4c8-jl2db" podStartSLOduration=4.711026372 podStartE2EDuration="33.690646195s" podCreationTimestamp="2025-11-27 07:11:45 +0000 UTC" firstStartedPulling="2025-11-27 07:11:46.791861074 +0000 UTC m=+1144.983904992" lastFinishedPulling="2025-11-27 07:12:15.771480857 +0000 UTC m=+1173.963524815" observedRunningTime="2025-11-27 07:12:18.677770746 +0000 UTC m=+1176.869814674" watchObservedRunningTime="2025-11-27 07:12:18.690646195 +0000 UTC m=+1176.882690113" Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.709168 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-64cdc6ff96-7928r" podStartSLOduration=4.238633625 podStartE2EDuration="34.709144949s" podCreationTimestamp="2025-11-27 07:11:44 +0000 UTC" firstStartedPulling="2025-11-27 07:11:46.591814117 +0000 UTC m=+1144.783858045" lastFinishedPulling="2025-11-27 07:12:17.062325461 +0000 UTC m=+1175.254369369" observedRunningTime="2025-11-27 07:12:18.705909254 +0000 UTC m=+1176.897953172" watchObservedRunningTime="2025-11-27 07:12:18.709144949 +0000 UTC m=+1176.901188867" Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.730303 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-57548d458d-zdjzp" podStartSLOduration=20.4889382 podStartE2EDuration="34.730277851s" podCreationTimestamp="2025-11-27 07:11:44 +0000 UTC" firstStartedPulling="2025-11-27 07:12:02.005981218 +0000 UTC m=+1160.198025136" lastFinishedPulling="2025-11-27 07:12:16.247320869 +0000 UTC m=+1174.439364787" observedRunningTime="2025-11-27 07:12:18.726477279 +0000 UTC m=+1176.918521197" watchObservedRunningTime="2025-11-27 07:12:18.730277851 +0000 UTC m=+1176.922321769" Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.768772 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-955677c94-rm5cw" podStartSLOduration=3.9978977909999998 podStartE2EDuration="34.768752613s" podCreationTimestamp="2025-11-27 07:11:44 +0000 UTC" firstStartedPulling="2025-11-27 07:11:46.321240105 +0000 UTC m=+1144.513284023" lastFinishedPulling="2025-11-27 07:12:17.092094927 +0000 UTC m=+1175.284138845" observedRunningTime="2025-11-27 07:12:18.763942912 +0000 UTC m=+1176.955986830" watchObservedRunningTime="2025-11-27 07:12:18.768752613 +0000 UTC m=+1176.960796531" Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.785674 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-79556f57fc-2zx72" podStartSLOduration=3.671376304 podStartE2EDuration="34.785647671s" podCreationTimestamp="2025-11-27 07:11:44 +0000 UTC" firstStartedPulling="2025-11-27 07:11:46.481102299 +0000 UTC m=+1144.673146217" lastFinishedPulling="2025-11-27 07:12:17.595373666 +0000 UTC m=+1175.787417584" observedRunningTime="2025-11-27 07:12:18.781748606 +0000 UTC m=+1176.973792534" watchObservedRunningTime="2025-11-27 07:12:18.785647671 +0000 UTC m=+1176.977691589" Nov 27 07:12:18 crc kubenswrapper[4971]: I1127 07:12:18.814658 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-d77b94747-zkgrb" podStartSLOduration=3.41587963 podStartE2EDuration="33.814633873s" podCreationTimestamp="2025-11-27 07:11:45 +0000 UTC" firstStartedPulling="2025-11-27 07:11:46.650066011 +0000 UTC m=+1144.842109929" lastFinishedPulling="2025-11-27 07:12:17.048820254 +0000 UTC m=+1175.240864172" observedRunningTime="2025-11-27 07:12:18.810150291 +0000 UTC m=+1177.002194209" watchObservedRunningTime="2025-11-27 07:12:18.814633873 +0000 UTC m=+1177.006677791" Nov 27 07:12:19 crc kubenswrapper[4971]: I1127 07:12:19.352555 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7b4567c7cf-gn2tw" event={"ID":"6488635b-4233-4898-b540-8c7433b25d0f","Type":"ContainerStarted","Data":"1c3daadfff9a2c15d531e290b2470eff11e3f6531b74f30e3cf41f09cb54a540"} Nov 27 07:12:19 crc kubenswrapper[4971]: I1127 07:12:19.357036 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5b77f656f-5m76m" Nov 27 07:12:19 crc kubenswrapper[4971]: I1127 07:12:19.357417 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7b4567c7cf-gn2tw" Nov 27 07:12:19 crc kubenswrapper[4971]: I1127 07:12:19.359476 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5b77f656f-5m76m" Nov 27 07:12:19 crc kubenswrapper[4971]: I1127 07:12:19.388693 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7b4567c7cf-gn2tw" podStartSLOduration=3.261196344 podStartE2EDuration="35.388656875s" podCreationTimestamp="2025-11-27 07:11:44 +0000 UTC" firstStartedPulling="2025-11-27 07:11:46.239220862 +0000 UTC m=+1144.431264780" lastFinishedPulling="2025-11-27 07:12:18.366681403 +0000 UTC m=+1176.558725311" observedRunningTime="2025-11-27 07:12:19.371978854 +0000 UTC m=+1177.564022782" watchObservedRunningTime="2025-11-27 07:12:19.388656875 +0000 UTC m=+1177.580700853" Nov 27 07:12:21 crc kubenswrapper[4971]: I1127 07:12:21.129223 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-57548d458d-zdjzp" Nov 27 07:12:21 crc kubenswrapper[4971]: I1127 07:12:21.354787 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5d9f9695db7tlww" Nov 27 07:12:22 crc kubenswrapper[4971]: I1127 07:12:22.038017 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-66f75ddbcc-fpsdk" Nov 27 07:12:25 crc kubenswrapper[4971]: I1127 07:12:25.326265 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7b4567c7cf-gn2tw" Nov 27 07:12:25 crc kubenswrapper[4971]: I1127 07:12:25.780804 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-56897c768d-2zcq7" Nov 27 07:12:25 crc kubenswrapper[4971]: I1127 07:12:25.842479 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-57988cc5b5-kglvq" Nov 27 07:12:26 crc kubenswrapper[4971]: I1127 07:12:26.005227 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5cd6c7f4c8-jl2db" Nov 27 07:12:26 crc kubenswrapper[4971]: I1127 07:12:26.073132 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-656dcb59d4-ctgpd" Nov 27 07:12:26 crc kubenswrapper[4971]: I1127 07:12:26.413671 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 07:12:26 crc kubenswrapper[4971]: I1127 07:12:26.413726 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 07:12:26 crc kubenswrapper[4971]: I1127 07:12:26.413769 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 07:12:26 crc kubenswrapper[4971]: I1127 07:12:26.414316 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"30867a48d7e31eebb704511f63aa767a3965e3de0a82008135ef08d2702adfac"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 07:12:26 crc kubenswrapper[4971]: I1127 07:12:26.414384 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://30867a48d7e31eebb704511f63aa767a3965e3de0a82008135ef08d2702adfac" gracePeriod=600 Nov 27 07:12:27 crc kubenswrapper[4971]: I1127 07:12:27.443047 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="30867a48d7e31eebb704511f63aa767a3965e3de0a82008135ef08d2702adfac" exitCode=0 Nov 27 07:12:27 crc kubenswrapper[4971]: I1127 07:12:27.443124 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"30867a48d7e31eebb704511f63aa767a3965e3de0a82008135ef08d2702adfac"} Nov 27 07:12:27 crc kubenswrapper[4971]: I1127 07:12:27.444104 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"5137c970e6cdfd6e2022087bc7914ffa98a593d65bc63eec37cd087c8fcec32d"} Nov 27 07:12:27 crc kubenswrapper[4971]: I1127 07:12:27.444163 4971 scope.go:117] "RemoveContainer" containerID="4b10f4e9a06fcbdd9ab8460b0527e15bbc485600c51c9ce5ae3fa173ac3281f7" Nov 27 07:12:30 crc kubenswrapper[4971]: E1127 07:12:30.553899 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-p4brb" podUID="13bda70c-c880-4a51-aee8-6de35bcf8a35" Nov 27 07:12:45 crc kubenswrapper[4971]: I1127 07:12:45.624973 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-p4brb" event={"ID":"13bda70c-c880-4a51-aee8-6de35bcf8a35","Type":"ContainerStarted","Data":"bf89cd132092282ecf585e4970a018425cd56b4bf807c5482f4e01f958f5dadd"} Nov 27 07:12:45 crc kubenswrapper[4971]: I1127 07:12:45.626081 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-p4brb" Nov 27 07:12:45 crc kubenswrapper[4971]: I1127 07:12:45.649459 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-p4brb" podStartSLOduration=2.200037245 podStartE2EDuration="1m0.649427091s" podCreationTimestamp="2025-11-27 07:11:45 +0000 UTC" firstStartedPulling="2025-11-27 07:11:46.654387539 +0000 UTC m=+1144.846431457" lastFinishedPulling="2025-11-27 07:12:45.103777345 +0000 UTC m=+1203.295821303" observedRunningTime="2025-11-27 07:12:45.643735024 +0000 UTC m=+1203.835778962" watchObservedRunningTime="2025-11-27 07:12:45.649427091 +0000 UTC m=+1203.841471019" Nov 27 07:12:55 crc kubenswrapper[4971]: I1127 07:12:55.941684 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-p4brb" Nov 27 07:13:13 crc kubenswrapper[4971]: I1127 07:13:13.144563 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-557f57d995-hr4xs"] Nov 27 07:13:13 crc kubenswrapper[4971]: I1127 07:13:13.148463 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-557f57d995-hr4xs" Nov 27 07:13:13 crc kubenswrapper[4971]: I1127 07:13:13.152702 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-557f57d995-hr4xs"] Nov 27 07:13:13 crc kubenswrapper[4971]: I1127 07:13:13.156333 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Nov 27 07:13:13 crc kubenswrapper[4971]: I1127 07:13:13.156620 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Nov 27 07:13:13 crc kubenswrapper[4971]: I1127 07:13:13.156829 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-hgs7c" Nov 27 07:13:13 crc kubenswrapper[4971]: I1127 07:13:13.158441 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Nov 27 07:13:13 crc kubenswrapper[4971]: I1127 07:13:13.227915 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/059f6657-eda5-4f44-91fb-abd66cc1d9e9-config\") pod \"dnsmasq-dns-557f57d995-hr4xs\" (UID: \"059f6657-eda5-4f44-91fb-abd66cc1d9e9\") " pod="openstack/dnsmasq-dns-557f57d995-hr4xs" Nov 27 07:13:13 crc kubenswrapper[4971]: I1127 07:13:13.228065 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j78p4\" (UniqueName: \"kubernetes.io/projected/059f6657-eda5-4f44-91fb-abd66cc1d9e9-kube-api-access-j78p4\") pod \"dnsmasq-dns-557f57d995-hr4xs\" (UID: \"059f6657-eda5-4f44-91fb-abd66cc1d9e9\") " pod="openstack/dnsmasq-dns-557f57d995-hr4xs" Nov 27 07:13:13 crc kubenswrapper[4971]: I1127 07:13:13.263222 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-766fdc659c-2kj78"] Nov 27 07:13:13 crc kubenswrapper[4971]: I1127 07:13:13.267482 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-766fdc659c-2kj78" Nov 27 07:13:13 crc kubenswrapper[4971]: I1127 07:13:13.272028 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Nov 27 07:13:13 crc kubenswrapper[4971]: I1127 07:13:13.290095 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-766fdc659c-2kj78"] Nov 27 07:13:13 crc kubenswrapper[4971]: I1127 07:13:13.330847 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/059f6657-eda5-4f44-91fb-abd66cc1d9e9-config\") pod \"dnsmasq-dns-557f57d995-hr4xs\" (UID: \"059f6657-eda5-4f44-91fb-abd66cc1d9e9\") " pod="openstack/dnsmasq-dns-557f57d995-hr4xs" Nov 27 07:13:13 crc kubenswrapper[4971]: I1127 07:13:13.331869 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f46735aa-eb5c-475c-a810-c60a3d192807-config\") pod \"dnsmasq-dns-766fdc659c-2kj78\" (UID: \"f46735aa-eb5c-475c-a810-c60a3d192807\") " pod="openstack/dnsmasq-dns-766fdc659c-2kj78" Nov 27 07:13:13 crc kubenswrapper[4971]: I1127 07:13:13.331943 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j78p4\" (UniqueName: \"kubernetes.io/projected/059f6657-eda5-4f44-91fb-abd66cc1d9e9-kube-api-access-j78p4\") pod \"dnsmasq-dns-557f57d995-hr4xs\" (UID: \"059f6657-eda5-4f44-91fb-abd66cc1d9e9\") " pod="openstack/dnsmasq-dns-557f57d995-hr4xs" Nov 27 07:13:13 crc kubenswrapper[4971]: I1127 07:13:13.332020 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58d2c\" (UniqueName: \"kubernetes.io/projected/f46735aa-eb5c-475c-a810-c60a3d192807-kube-api-access-58d2c\") pod \"dnsmasq-dns-766fdc659c-2kj78\" (UID: \"f46735aa-eb5c-475c-a810-c60a3d192807\") " pod="openstack/dnsmasq-dns-766fdc659c-2kj78" Nov 27 07:13:13 crc kubenswrapper[4971]: I1127 07:13:13.332024 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/059f6657-eda5-4f44-91fb-abd66cc1d9e9-config\") pod \"dnsmasq-dns-557f57d995-hr4xs\" (UID: \"059f6657-eda5-4f44-91fb-abd66cc1d9e9\") " pod="openstack/dnsmasq-dns-557f57d995-hr4xs" Nov 27 07:13:13 crc kubenswrapper[4971]: I1127 07:13:13.333041 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f46735aa-eb5c-475c-a810-c60a3d192807-dns-svc\") pod \"dnsmasq-dns-766fdc659c-2kj78\" (UID: \"f46735aa-eb5c-475c-a810-c60a3d192807\") " pod="openstack/dnsmasq-dns-766fdc659c-2kj78" Nov 27 07:13:13 crc kubenswrapper[4971]: I1127 07:13:13.363704 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j78p4\" (UniqueName: \"kubernetes.io/projected/059f6657-eda5-4f44-91fb-abd66cc1d9e9-kube-api-access-j78p4\") pod \"dnsmasq-dns-557f57d995-hr4xs\" (UID: \"059f6657-eda5-4f44-91fb-abd66cc1d9e9\") " pod="openstack/dnsmasq-dns-557f57d995-hr4xs" Nov 27 07:13:13 crc kubenswrapper[4971]: I1127 07:13:13.434924 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58d2c\" (UniqueName: \"kubernetes.io/projected/f46735aa-eb5c-475c-a810-c60a3d192807-kube-api-access-58d2c\") pod \"dnsmasq-dns-766fdc659c-2kj78\" (UID: \"f46735aa-eb5c-475c-a810-c60a3d192807\") " pod="openstack/dnsmasq-dns-766fdc659c-2kj78" Nov 27 07:13:13 crc kubenswrapper[4971]: I1127 07:13:13.435099 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f46735aa-eb5c-475c-a810-c60a3d192807-dns-svc\") pod \"dnsmasq-dns-766fdc659c-2kj78\" (UID: \"f46735aa-eb5c-475c-a810-c60a3d192807\") " pod="openstack/dnsmasq-dns-766fdc659c-2kj78" Nov 27 07:13:13 crc kubenswrapper[4971]: I1127 07:13:13.435175 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f46735aa-eb5c-475c-a810-c60a3d192807-config\") pod \"dnsmasq-dns-766fdc659c-2kj78\" (UID: \"f46735aa-eb5c-475c-a810-c60a3d192807\") " pod="openstack/dnsmasq-dns-766fdc659c-2kj78" Nov 27 07:13:13 crc kubenswrapper[4971]: I1127 07:13:13.436422 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f46735aa-eb5c-475c-a810-c60a3d192807-dns-svc\") pod \"dnsmasq-dns-766fdc659c-2kj78\" (UID: \"f46735aa-eb5c-475c-a810-c60a3d192807\") " pod="openstack/dnsmasq-dns-766fdc659c-2kj78" Nov 27 07:13:13 crc kubenswrapper[4971]: I1127 07:13:13.436596 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f46735aa-eb5c-475c-a810-c60a3d192807-config\") pod \"dnsmasq-dns-766fdc659c-2kj78\" (UID: \"f46735aa-eb5c-475c-a810-c60a3d192807\") " pod="openstack/dnsmasq-dns-766fdc659c-2kj78" Nov 27 07:13:13 crc kubenswrapper[4971]: I1127 07:13:13.460863 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58d2c\" (UniqueName: \"kubernetes.io/projected/f46735aa-eb5c-475c-a810-c60a3d192807-kube-api-access-58d2c\") pod \"dnsmasq-dns-766fdc659c-2kj78\" (UID: \"f46735aa-eb5c-475c-a810-c60a3d192807\") " pod="openstack/dnsmasq-dns-766fdc659c-2kj78" Nov 27 07:13:13 crc kubenswrapper[4971]: I1127 07:13:13.486273 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-557f57d995-hr4xs" Nov 27 07:13:13 crc kubenswrapper[4971]: I1127 07:13:13.588496 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-766fdc659c-2kj78" Nov 27 07:13:13 crc kubenswrapper[4971]: I1127 07:13:13.910140 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-557f57d995-hr4xs"] Nov 27 07:13:14 crc kubenswrapper[4971]: I1127 07:13:14.107669 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-766fdc659c-2kj78"] Nov 27 07:13:14 crc kubenswrapper[4971]: W1127 07:13:14.112109 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf46735aa_eb5c_475c_a810_c60a3d192807.slice/crio-48966907910f9a5d0bc057813b79bed057a37ed64972d96e5ccd53cc08c4da7b WatchSource:0}: Error finding container 48966907910f9a5d0bc057813b79bed057a37ed64972d96e5ccd53cc08c4da7b: Status 404 returned error can't find the container with id 48966907910f9a5d0bc057813b79bed057a37ed64972d96e5ccd53cc08c4da7b Nov 27 07:13:14 crc kubenswrapper[4971]: I1127 07:13:14.868511 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-766fdc659c-2kj78" event={"ID":"f46735aa-eb5c-475c-a810-c60a3d192807","Type":"ContainerStarted","Data":"48966907910f9a5d0bc057813b79bed057a37ed64972d96e5ccd53cc08c4da7b"} Nov 27 07:13:14 crc kubenswrapper[4971]: I1127 07:13:14.870478 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-557f57d995-hr4xs" event={"ID":"059f6657-eda5-4f44-91fb-abd66cc1d9e9","Type":"ContainerStarted","Data":"340d70f52c3b6b1d209575efb93d7bdaa4c40599c2242c232dd2ed54e2fb64e6"} Nov 27 07:13:15 crc kubenswrapper[4971]: I1127 07:13:15.479244 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-557f57d995-hr4xs"] Nov 27 07:13:15 crc kubenswrapper[4971]: I1127 07:13:15.533171 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57dc4c6697-d7m57"] Nov 27 07:13:15 crc kubenswrapper[4971]: I1127 07:13:15.535093 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57dc4c6697-d7m57" Nov 27 07:13:15 crc kubenswrapper[4971]: I1127 07:13:15.549133 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57dc4c6697-d7m57"] Nov 27 07:13:15 crc kubenswrapper[4971]: I1127 07:13:15.690987 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e4df58e-1b19-439d-aee0-a09ee67f328e-dns-svc\") pod \"dnsmasq-dns-57dc4c6697-d7m57\" (UID: \"5e4df58e-1b19-439d-aee0-a09ee67f328e\") " pod="openstack/dnsmasq-dns-57dc4c6697-d7m57" Nov 27 07:13:15 crc kubenswrapper[4971]: I1127 07:13:15.691346 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e4df58e-1b19-439d-aee0-a09ee67f328e-config\") pod \"dnsmasq-dns-57dc4c6697-d7m57\" (UID: \"5e4df58e-1b19-439d-aee0-a09ee67f328e\") " pod="openstack/dnsmasq-dns-57dc4c6697-d7m57" Nov 27 07:13:15 crc kubenswrapper[4971]: I1127 07:13:15.691376 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4p6h\" (UniqueName: \"kubernetes.io/projected/5e4df58e-1b19-439d-aee0-a09ee67f328e-kube-api-access-s4p6h\") pod \"dnsmasq-dns-57dc4c6697-d7m57\" (UID: \"5e4df58e-1b19-439d-aee0-a09ee67f328e\") " pod="openstack/dnsmasq-dns-57dc4c6697-d7m57" Nov 27 07:13:15 crc kubenswrapper[4971]: I1127 07:13:15.792599 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e4df58e-1b19-439d-aee0-a09ee67f328e-dns-svc\") pod \"dnsmasq-dns-57dc4c6697-d7m57\" (UID: \"5e4df58e-1b19-439d-aee0-a09ee67f328e\") " pod="openstack/dnsmasq-dns-57dc4c6697-d7m57" Nov 27 07:13:15 crc kubenswrapper[4971]: I1127 07:13:15.792700 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e4df58e-1b19-439d-aee0-a09ee67f328e-config\") pod \"dnsmasq-dns-57dc4c6697-d7m57\" (UID: \"5e4df58e-1b19-439d-aee0-a09ee67f328e\") " pod="openstack/dnsmasq-dns-57dc4c6697-d7m57" Nov 27 07:13:15 crc kubenswrapper[4971]: I1127 07:13:15.792734 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4p6h\" (UniqueName: \"kubernetes.io/projected/5e4df58e-1b19-439d-aee0-a09ee67f328e-kube-api-access-s4p6h\") pod \"dnsmasq-dns-57dc4c6697-d7m57\" (UID: \"5e4df58e-1b19-439d-aee0-a09ee67f328e\") " pod="openstack/dnsmasq-dns-57dc4c6697-d7m57" Nov 27 07:13:15 crc kubenswrapper[4971]: I1127 07:13:15.793860 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e4df58e-1b19-439d-aee0-a09ee67f328e-dns-svc\") pod \"dnsmasq-dns-57dc4c6697-d7m57\" (UID: \"5e4df58e-1b19-439d-aee0-a09ee67f328e\") " pod="openstack/dnsmasq-dns-57dc4c6697-d7m57" Nov 27 07:13:15 crc kubenswrapper[4971]: I1127 07:13:15.794577 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e4df58e-1b19-439d-aee0-a09ee67f328e-config\") pod \"dnsmasq-dns-57dc4c6697-d7m57\" (UID: \"5e4df58e-1b19-439d-aee0-a09ee67f328e\") " pod="openstack/dnsmasq-dns-57dc4c6697-d7m57" Nov 27 07:13:15 crc kubenswrapper[4971]: I1127 07:13:15.832552 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4p6h\" (UniqueName: \"kubernetes.io/projected/5e4df58e-1b19-439d-aee0-a09ee67f328e-kube-api-access-s4p6h\") pod \"dnsmasq-dns-57dc4c6697-d7m57\" (UID: \"5e4df58e-1b19-439d-aee0-a09ee67f328e\") " pod="openstack/dnsmasq-dns-57dc4c6697-d7m57" Nov 27 07:13:15 crc kubenswrapper[4971]: I1127 07:13:15.856163 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-766fdc659c-2kj78"] Nov 27 07:13:15 crc kubenswrapper[4971]: I1127 07:13:15.874903 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57dc4c6697-d7m57" Nov 27 07:13:15 crc kubenswrapper[4971]: I1127 07:13:15.893628 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8446fd7c75-z9w86"] Nov 27 07:13:15 crc kubenswrapper[4971]: I1127 07:13:15.895044 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8446fd7c75-z9w86" Nov 27 07:13:15 crc kubenswrapper[4971]: I1127 07:13:15.903774 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8446fd7c75-z9w86"] Nov 27 07:13:15 crc kubenswrapper[4971]: I1127 07:13:15.997561 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r2tm2\" (UniqueName: \"kubernetes.io/projected/42377322-3180-4e68-94b9-601dcd82f6f4-kube-api-access-r2tm2\") pod \"dnsmasq-dns-8446fd7c75-z9w86\" (UID: \"42377322-3180-4e68-94b9-601dcd82f6f4\") " pod="openstack/dnsmasq-dns-8446fd7c75-z9w86" Nov 27 07:13:15 crc kubenswrapper[4971]: I1127 07:13:15.997749 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/42377322-3180-4e68-94b9-601dcd82f6f4-dns-svc\") pod \"dnsmasq-dns-8446fd7c75-z9w86\" (UID: \"42377322-3180-4e68-94b9-601dcd82f6f4\") " pod="openstack/dnsmasq-dns-8446fd7c75-z9w86" Nov 27 07:13:15 crc kubenswrapper[4971]: I1127 07:13:15.998036 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42377322-3180-4e68-94b9-601dcd82f6f4-config\") pod \"dnsmasq-dns-8446fd7c75-z9w86\" (UID: \"42377322-3180-4e68-94b9-601dcd82f6f4\") " pod="openstack/dnsmasq-dns-8446fd7c75-z9w86" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.101896 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r2tm2\" (UniqueName: \"kubernetes.io/projected/42377322-3180-4e68-94b9-601dcd82f6f4-kube-api-access-r2tm2\") pod \"dnsmasq-dns-8446fd7c75-z9w86\" (UID: \"42377322-3180-4e68-94b9-601dcd82f6f4\") " pod="openstack/dnsmasq-dns-8446fd7c75-z9w86" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.101953 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/42377322-3180-4e68-94b9-601dcd82f6f4-dns-svc\") pod \"dnsmasq-dns-8446fd7c75-z9w86\" (UID: \"42377322-3180-4e68-94b9-601dcd82f6f4\") " pod="openstack/dnsmasq-dns-8446fd7c75-z9w86" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.102105 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42377322-3180-4e68-94b9-601dcd82f6f4-config\") pod \"dnsmasq-dns-8446fd7c75-z9w86\" (UID: \"42377322-3180-4e68-94b9-601dcd82f6f4\") " pod="openstack/dnsmasq-dns-8446fd7c75-z9w86" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.104380 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42377322-3180-4e68-94b9-601dcd82f6f4-config\") pod \"dnsmasq-dns-8446fd7c75-z9w86\" (UID: \"42377322-3180-4e68-94b9-601dcd82f6f4\") " pod="openstack/dnsmasq-dns-8446fd7c75-z9w86" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.104380 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/42377322-3180-4e68-94b9-601dcd82f6f4-dns-svc\") pod \"dnsmasq-dns-8446fd7c75-z9w86\" (UID: \"42377322-3180-4e68-94b9-601dcd82f6f4\") " pod="openstack/dnsmasq-dns-8446fd7c75-z9w86" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.141384 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r2tm2\" (UniqueName: \"kubernetes.io/projected/42377322-3180-4e68-94b9-601dcd82f6f4-kube-api-access-r2tm2\") pod \"dnsmasq-dns-8446fd7c75-z9w86\" (UID: \"42377322-3180-4e68-94b9-601dcd82f6f4\") " pod="openstack/dnsmasq-dns-8446fd7c75-z9w86" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.268754 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57dc4c6697-d7m57"] Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.299693 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8446fd7c75-z9w86" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.719786 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.721577 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.730716 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.730978 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.731133 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.731245 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.731365 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.731480 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-86xj7" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.731607 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.742952 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8446fd7c75-z9w86"] Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.749072 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 27 07:13:16 crc kubenswrapper[4971]: W1127 07:13:16.793773 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod42377322_3180_4e68_94b9_601dcd82f6f4.slice/crio-b4a1263f944c2fd8d6778a72cc5629e01ac88e840a570b2445c603bd63e4af1d WatchSource:0}: Error finding container b4a1263f944c2fd8d6778a72cc5629e01ac88e840a570b2445c603bd63e4af1d: Status 404 returned error can't find the container with id b4a1263f944c2fd8d6778a72cc5629e01ac88e840a570b2445c603bd63e4af1d Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.831453 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d6439a3c-ee26-467c-8e42-5abbbf390f16-server-conf\") pod \"rabbitmq-server-0\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " pod="openstack/rabbitmq-server-0" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.831508 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d6439a3c-ee26-467c-8e42-5abbbf390f16-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " pod="openstack/rabbitmq-server-0" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.831553 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d6439a3c-ee26-467c-8e42-5abbbf390f16-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " pod="openstack/rabbitmq-server-0" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.831620 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d6439a3c-ee26-467c-8e42-5abbbf390f16-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " pod="openstack/rabbitmq-server-0" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.831660 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d6439a3c-ee26-467c-8e42-5abbbf390f16-config-data\") pod \"rabbitmq-server-0\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " pod="openstack/rabbitmq-server-0" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.831689 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d6439a3c-ee26-467c-8e42-5abbbf390f16-pod-info\") pod \"rabbitmq-server-0\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " pod="openstack/rabbitmq-server-0" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.831707 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d6439a3c-ee26-467c-8e42-5abbbf390f16-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " pod="openstack/rabbitmq-server-0" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.831733 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " pod="openstack/rabbitmq-server-0" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.831749 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d6439a3c-ee26-467c-8e42-5abbbf390f16-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " pod="openstack/rabbitmq-server-0" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.831768 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kmnnq\" (UniqueName: \"kubernetes.io/projected/d6439a3c-ee26-467c-8e42-5abbbf390f16-kube-api-access-kmnnq\") pod \"rabbitmq-server-0\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " pod="openstack/rabbitmq-server-0" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.831796 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d6439a3c-ee26-467c-8e42-5abbbf390f16-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " pod="openstack/rabbitmq-server-0" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.923851 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8446fd7c75-z9w86" event={"ID":"42377322-3180-4e68-94b9-601dcd82f6f4","Type":"ContainerStarted","Data":"b4a1263f944c2fd8d6778a72cc5629e01ac88e840a570b2445c603bd63e4af1d"} Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.933055 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d6439a3c-ee26-467c-8e42-5abbbf390f16-server-conf\") pod \"rabbitmq-server-0\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " pod="openstack/rabbitmq-server-0" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.933117 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d6439a3c-ee26-467c-8e42-5abbbf390f16-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " pod="openstack/rabbitmq-server-0" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.933149 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d6439a3c-ee26-467c-8e42-5abbbf390f16-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " pod="openstack/rabbitmq-server-0" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.933232 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d6439a3c-ee26-467c-8e42-5abbbf390f16-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " pod="openstack/rabbitmq-server-0" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.933274 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d6439a3c-ee26-467c-8e42-5abbbf390f16-config-data\") pod \"rabbitmq-server-0\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " pod="openstack/rabbitmq-server-0" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.933300 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d6439a3c-ee26-467c-8e42-5abbbf390f16-pod-info\") pod \"rabbitmq-server-0\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " pod="openstack/rabbitmq-server-0" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.933322 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d6439a3c-ee26-467c-8e42-5abbbf390f16-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " pod="openstack/rabbitmq-server-0" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.933348 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " pod="openstack/rabbitmq-server-0" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.933366 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d6439a3c-ee26-467c-8e42-5abbbf390f16-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " pod="openstack/rabbitmq-server-0" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.933382 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kmnnq\" (UniqueName: \"kubernetes.io/projected/d6439a3c-ee26-467c-8e42-5abbbf390f16-kube-api-access-kmnnq\") pod \"rabbitmq-server-0\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " pod="openstack/rabbitmq-server-0" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.933408 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d6439a3c-ee26-467c-8e42-5abbbf390f16-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " pod="openstack/rabbitmq-server-0" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.935878 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d6439a3c-ee26-467c-8e42-5abbbf390f16-server-conf\") pod \"rabbitmq-server-0\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " pod="openstack/rabbitmq-server-0" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.936589 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57dc4c6697-d7m57" event={"ID":"5e4df58e-1b19-439d-aee0-a09ee67f328e","Type":"ContainerStarted","Data":"51418f03259c3784502d88fc015197915de54a579198bcbfd6bef29fa8c5f498"} Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.937073 4971 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/rabbitmq-server-0" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.937242 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d6439a3c-ee26-467c-8e42-5abbbf390f16-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " pod="openstack/rabbitmq-server-0" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.937590 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d6439a3c-ee26-467c-8e42-5abbbf390f16-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " pod="openstack/rabbitmq-server-0" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.941556 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d6439a3c-ee26-467c-8e42-5abbbf390f16-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " pod="openstack/rabbitmq-server-0" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.942621 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d6439a3c-ee26-467c-8e42-5abbbf390f16-config-data\") pod \"rabbitmq-server-0\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " pod="openstack/rabbitmq-server-0" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.944870 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d6439a3c-ee26-467c-8e42-5abbbf390f16-pod-info\") pod \"rabbitmq-server-0\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " pod="openstack/rabbitmq-server-0" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.970083 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kmnnq\" (UniqueName: \"kubernetes.io/projected/d6439a3c-ee26-467c-8e42-5abbbf390f16-kube-api-access-kmnnq\") pod \"rabbitmq-server-0\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " pod="openstack/rabbitmq-server-0" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.984382 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d6439a3c-ee26-467c-8e42-5abbbf390f16-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " pod="openstack/rabbitmq-server-0" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.984866 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d6439a3c-ee26-467c-8e42-5abbbf390f16-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " pod="openstack/rabbitmq-server-0" Nov 27 07:13:16 crc kubenswrapper[4971]: I1127 07:13:16.991316 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d6439a3c-ee26-467c-8e42-5abbbf390f16-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " pod="openstack/rabbitmq-server-0" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.043449 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.044327 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " pod="openstack/rabbitmq-server-0" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.044916 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.055019 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.055282 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.055422 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-nqrkc" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.055525 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.055670 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.055807 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.055930 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.063174 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.144819 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/640c3829-d2e9-49e1-82e3-bd213aa992dd-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.144901 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/640c3829-d2e9-49e1-82e3-bd213aa992dd-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.144925 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.144967 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/640c3829-d2e9-49e1-82e3-bd213aa992dd-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.144996 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/640c3829-d2e9-49e1-82e3-bd213aa992dd-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.145048 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/640c3829-d2e9-49e1-82e3-bd213aa992dd-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.145065 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/640c3829-d2e9-49e1-82e3-bd213aa992dd-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.145085 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzvhk\" (UniqueName: \"kubernetes.io/projected/640c3829-d2e9-49e1-82e3-bd213aa992dd-kube-api-access-xzvhk\") pod \"rabbitmq-cell1-server-0\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.145108 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/640c3829-d2e9-49e1-82e3-bd213aa992dd-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.145130 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/640c3829-d2e9-49e1-82e3-bd213aa992dd-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.145150 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/640c3829-d2e9-49e1-82e3-bd213aa992dd-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.158308 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.246444 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/640c3829-d2e9-49e1-82e3-bd213aa992dd-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.246544 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/640c3829-d2e9-49e1-82e3-bd213aa992dd-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.246569 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/640c3829-d2e9-49e1-82e3-bd213aa992dd-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.246596 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzvhk\" (UniqueName: \"kubernetes.io/projected/640c3829-d2e9-49e1-82e3-bd213aa992dd-kube-api-access-xzvhk\") pod \"rabbitmq-cell1-server-0\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.246625 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/640c3829-d2e9-49e1-82e3-bd213aa992dd-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.246648 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/640c3829-d2e9-49e1-82e3-bd213aa992dd-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.246674 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/640c3829-d2e9-49e1-82e3-bd213aa992dd-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.246717 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/640c3829-d2e9-49e1-82e3-bd213aa992dd-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.246756 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/640c3829-d2e9-49e1-82e3-bd213aa992dd-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.246779 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.246811 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/640c3829-d2e9-49e1-82e3-bd213aa992dd-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.248032 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/640c3829-d2e9-49e1-82e3-bd213aa992dd-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.251409 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/640c3829-d2e9-49e1-82e3-bd213aa992dd-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.251800 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/640c3829-d2e9-49e1-82e3-bd213aa992dd-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.253312 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/640c3829-d2e9-49e1-82e3-bd213aa992dd-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.253707 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/640c3829-d2e9-49e1-82e3-bd213aa992dd-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.253857 4971 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.261300 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/640c3829-d2e9-49e1-82e3-bd213aa992dd-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.263869 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/640c3829-d2e9-49e1-82e3-bd213aa992dd-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.264204 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/640c3829-d2e9-49e1-82e3-bd213aa992dd-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.275347 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/640c3829-d2e9-49e1-82e3-bd213aa992dd-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.285160 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzvhk\" (UniqueName: \"kubernetes.io/projected/640c3829-d2e9-49e1-82e3-bd213aa992dd-kube-api-access-xzvhk\") pod \"rabbitmq-cell1-server-0\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.311194 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.419812 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.566049 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 27 07:13:17 crc kubenswrapper[4971]: W1127 07:13:17.681721 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd6439a3c_ee26_467c_8e42_5abbbf390f16.slice/crio-b68c9a2e72731ae4a0d02eeb428d0b5c5cc249a3fba7835d0886b885cd7b4645 WatchSource:0}: Error finding container b68c9a2e72731ae4a0d02eeb428d0b5c5cc249a3fba7835d0886b885cd7b4645: Status 404 returned error can't find the container with id b68c9a2e72731ae4a0d02eeb428d0b5c5cc249a3fba7835d0886b885cd7b4645 Nov 27 07:13:17 crc kubenswrapper[4971]: I1127 07:13:17.987775 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d6439a3c-ee26-467c-8e42-5abbbf390f16","Type":"ContainerStarted","Data":"b68c9a2e72731ae4a0d02eeb428d0b5c5cc249a3fba7835d0886b885cd7b4645"} Nov 27 07:13:18 crc kubenswrapper[4971]: I1127 07:13:18.151336 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 27 07:13:18 crc kubenswrapper[4971]: I1127 07:13:18.190025 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Nov 27 07:13:18 crc kubenswrapper[4971]: I1127 07:13:18.191389 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Nov 27 07:13:18 crc kubenswrapper[4971]: I1127 07:13:18.199122 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-kzxgw" Nov 27 07:13:18 crc kubenswrapper[4971]: I1127 07:13:18.199667 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Nov 27 07:13:18 crc kubenswrapper[4971]: I1127 07:13:18.199953 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Nov 27 07:13:18 crc kubenswrapper[4971]: I1127 07:13:18.200300 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Nov 27 07:13:18 crc kubenswrapper[4971]: I1127 07:13:18.216597 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Nov 27 07:13:18 crc kubenswrapper[4971]: I1127 07:13:18.225700 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Nov 27 07:13:18 crc kubenswrapper[4971]: I1127 07:13:18.276513 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-operator-scripts\") pod \"openstack-galera-0\" (UID: \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\") " pod="openstack/openstack-galera-0" Nov 27 07:13:18 crc kubenswrapper[4971]: I1127 07:13:18.276587 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-config-data-default\") pod \"openstack-galera-0\" (UID: \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\") " pod="openstack/openstack-galera-0" Nov 27 07:13:18 crc kubenswrapper[4971]: I1127 07:13:18.276652 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v2dt8\" (UniqueName: \"kubernetes.io/projected/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-kube-api-access-v2dt8\") pod \"openstack-galera-0\" (UID: \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\") " pod="openstack/openstack-galera-0" Nov 27 07:13:18 crc kubenswrapper[4971]: I1127 07:13:18.276672 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-kolla-config\") pod \"openstack-galera-0\" (UID: \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\") " pod="openstack/openstack-galera-0" Nov 27 07:13:18 crc kubenswrapper[4971]: I1127 07:13:18.276687 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\") " pod="openstack/openstack-galera-0" Nov 27 07:13:18 crc kubenswrapper[4971]: I1127 07:13:18.276720 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\") " pod="openstack/openstack-galera-0" Nov 27 07:13:18 crc kubenswrapper[4971]: I1127 07:13:18.276747 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\") " pod="openstack/openstack-galera-0" Nov 27 07:13:18 crc kubenswrapper[4971]: I1127 07:13:18.276771 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-config-data-generated\") pod \"openstack-galera-0\" (UID: \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\") " pod="openstack/openstack-galera-0" Nov 27 07:13:18 crc kubenswrapper[4971]: I1127 07:13:18.379611 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v2dt8\" (UniqueName: \"kubernetes.io/projected/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-kube-api-access-v2dt8\") pod \"openstack-galera-0\" (UID: \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\") " pod="openstack/openstack-galera-0" Nov 27 07:13:18 crc kubenswrapper[4971]: I1127 07:13:18.383703 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-kolla-config\") pod \"openstack-galera-0\" (UID: \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\") " pod="openstack/openstack-galera-0" Nov 27 07:13:18 crc kubenswrapper[4971]: I1127 07:13:18.383732 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\") " pod="openstack/openstack-galera-0" Nov 27 07:13:18 crc kubenswrapper[4971]: I1127 07:13:18.383809 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\") " pod="openstack/openstack-galera-0" Nov 27 07:13:18 crc kubenswrapper[4971]: I1127 07:13:18.383866 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\") " pod="openstack/openstack-galera-0" Nov 27 07:13:18 crc kubenswrapper[4971]: I1127 07:13:18.383894 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-config-data-generated\") pod \"openstack-galera-0\" (UID: \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\") " pod="openstack/openstack-galera-0" Nov 27 07:13:18 crc kubenswrapper[4971]: I1127 07:13:18.383933 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-operator-scripts\") pod \"openstack-galera-0\" (UID: \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\") " pod="openstack/openstack-galera-0" Nov 27 07:13:18 crc kubenswrapper[4971]: I1127 07:13:18.384056 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-config-data-default\") pod \"openstack-galera-0\" (UID: \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\") " pod="openstack/openstack-galera-0" Nov 27 07:13:18 crc kubenswrapper[4971]: I1127 07:13:18.385373 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-config-data-generated\") pod \"openstack-galera-0\" (UID: \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\") " pod="openstack/openstack-galera-0" Nov 27 07:13:18 crc kubenswrapper[4971]: I1127 07:13:18.385525 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-config-data-default\") pod \"openstack-galera-0\" (UID: \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\") " pod="openstack/openstack-galera-0" Nov 27 07:13:18 crc kubenswrapper[4971]: I1127 07:13:18.385635 4971 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/openstack-galera-0" Nov 27 07:13:18 crc kubenswrapper[4971]: I1127 07:13:18.392110 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-operator-scripts\") pod \"openstack-galera-0\" (UID: \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\") " pod="openstack/openstack-galera-0" Nov 27 07:13:18 crc kubenswrapper[4971]: I1127 07:13:18.392377 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\") " pod="openstack/openstack-galera-0" Nov 27 07:13:18 crc kubenswrapper[4971]: I1127 07:13:18.393073 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-kolla-config\") pod \"openstack-galera-0\" (UID: \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\") " pod="openstack/openstack-galera-0" Nov 27 07:13:18 crc kubenswrapper[4971]: I1127 07:13:18.398703 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\") " pod="openstack/openstack-galera-0" Nov 27 07:13:18 crc kubenswrapper[4971]: I1127 07:13:18.402576 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v2dt8\" (UniqueName: \"kubernetes.io/projected/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-kube-api-access-v2dt8\") pod \"openstack-galera-0\" (UID: \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\") " pod="openstack/openstack-galera-0" Nov 27 07:13:18 crc kubenswrapper[4971]: I1127 07:13:18.494503 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\") " pod="openstack/openstack-galera-0" Nov 27 07:13:18 crc kubenswrapper[4971]: I1127 07:13:18.521477 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.010591 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"640c3829-d2e9-49e1-82e3-bd213aa992dd","Type":"ContainerStarted","Data":"852d5810ef0f5e31c0974f4306fef32f05d2ec8ed8b6e2ceb9309c3647318ba8"} Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.115231 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.406665 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.409112 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.411951 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.411951 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.412724 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-v45xk" Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.413700 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.414722 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.628757 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/bcf58afd-21c6-4c9d-8702-09bc98859732-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"bcf58afd-21c6-4c9d-8702-09bc98859732\") " pod="openstack/openstack-cell1-galera-0" Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.628861 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bcf58afd-21c6-4c9d-8702-09bc98859732-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"bcf58afd-21c6-4c9d-8702-09bc98859732\") " pod="openstack/openstack-cell1-galera-0" Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.628905 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/bcf58afd-21c6-4c9d-8702-09bc98859732-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"bcf58afd-21c6-4c9d-8702-09bc98859732\") " pod="openstack/openstack-cell1-galera-0" Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.628937 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-cell1-galera-0\" (UID: \"bcf58afd-21c6-4c9d-8702-09bc98859732\") " pod="openstack/openstack-cell1-galera-0" Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.628957 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/bcf58afd-21c6-4c9d-8702-09bc98859732-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"bcf58afd-21c6-4c9d-8702-09bc98859732\") " pod="openstack/openstack-cell1-galera-0" Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.629250 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/bcf58afd-21c6-4c9d-8702-09bc98859732-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"bcf58afd-21c6-4c9d-8702-09bc98859732\") " pod="openstack/openstack-cell1-galera-0" Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.629288 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kkcmh\" (UniqueName: \"kubernetes.io/projected/bcf58afd-21c6-4c9d-8702-09bc98859732-kube-api-access-kkcmh\") pod \"openstack-cell1-galera-0\" (UID: \"bcf58afd-21c6-4c9d-8702-09bc98859732\") " pod="openstack/openstack-cell1-galera-0" Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.629310 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bcf58afd-21c6-4c9d-8702-09bc98859732-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"bcf58afd-21c6-4c9d-8702-09bc98859732\") " pod="openstack/openstack-cell1-galera-0" Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.730676 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/bcf58afd-21c6-4c9d-8702-09bc98859732-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"bcf58afd-21c6-4c9d-8702-09bc98859732\") " pod="openstack/openstack-cell1-galera-0" Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.730830 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-cell1-galera-0\" (UID: \"bcf58afd-21c6-4c9d-8702-09bc98859732\") " pod="openstack/openstack-cell1-galera-0" Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.731379 4971 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-cell1-galera-0\" (UID: \"bcf58afd-21c6-4c9d-8702-09bc98859732\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/openstack-cell1-galera-0" Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.763850 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/bcf58afd-21c6-4c9d-8702-09bc98859732-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"bcf58afd-21c6-4c9d-8702-09bc98859732\") " pod="openstack/openstack-cell1-galera-0" Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.774170 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/bcf58afd-21c6-4c9d-8702-09bc98859732-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"bcf58afd-21c6-4c9d-8702-09bc98859732\") " pod="openstack/openstack-cell1-galera-0" Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.774293 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/bcf58afd-21c6-4c9d-8702-09bc98859732-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"bcf58afd-21c6-4c9d-8702-09bc98859732\") " pod="openstack/openstack-cell1-galera-0" Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.774427 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kkcmh\" (UniqueName: \"kubernetes.io/projected/bcf58afd-21c6-4c9d-8702-09bc98859732-kube-api-access-kkcmh\") pod \"openstack-cell1-galera-0\" (UID: \"bcf58afd-21c6-4c9d-8702-09bc98859732\") " pod="openstack/openstack-cell1-galera-0" Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.774478 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bcf58afd-21c6-4c9d-8702-09bc98859732-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"bcf58afd-21c6-4c9d-8702-09bc98859732\") " pod="openstack/openstack-cell1-galera-0" Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.774693 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/bcf58afd-21c6-4c9d-8702-09bc98859732-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"bcf58afd-21c6-4c9d-8702-09bc98859732\") " pod="openstack/openstack-cell1-galera-0" Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.774742 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bcf58afd-21c6-4c9d-8702-09bc98859732-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"bcf58afd-21c6-4c9d-8702-09bc98859732\") " pod="openstack/openstack-cell1-galera-0" Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.776553 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bcf58afd-21c6-4c9d-8702-09bc98859732-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"bcf58afd-21c6-4c9d-8702-09bc98859732\") " pod="openstack/openstack-cell1-galera-0" Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.776794 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/bcf58afd-21c6-4c9d-8702-09bc98859732-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"bcf58afd-21c6-4c9d-8702-09bc98859732\") " pod="openstack/openstack-cell1-galera-0" Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.777066 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/bcf58afd-21c6-4c9d-8702-09bc98859732-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"bcf58afd-21c6-4c9d-8702-09bc98859732\") " pod="openstack/openstack-cell1-galera-0" Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.778861 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/bcf58afd-21c6-4c9d-8702-09bc98859732-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"bcf58afd-21c6-4c9d-8702-09bc98859732\") " pod="openstack/openstack-cell1-galera-0" Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.799426 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kkcmh\" (UniqueName: \"kubernetes.io/projected/bcf58afd-21c6-4c9d-8702-09bc98859732-kube-api-access-kkcmh\") pod \"openstack-cell1-galera-0\" (UID: \"bcf58afd-21c6-4c9d-8702-09bc98859732\") " pod="openstack/openstack-cell1-galera-0" Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.802106 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bcf58afd-21c6-4c9d-8702-09bc98859732-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"bcf58afd-21c6-4c9d-8702-09bc98859732\") " pod="openstack/openstack-cell1-galera-0" Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.811837 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-cell1-galera-0\" (UID: \"bcf58afd-21c6-4c9d-8702-09bc98859732\") " pod="openstack/openstack-cell1-galera-0" Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.901947 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.903583 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.913954 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.916918 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-zkh9t" Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.917117 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Nov 27 07:13:19 crc kubenswrapper[4971]: I1127 07:13:19.917248 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Nov 27 07:13:20 crc kubenswrapper[4971]: I1127 07:13:20.001438 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d6833730-f034-4b5f-954a-19e993167f04-config-data\") pod \"memcached-0\" (UID: \"d6833730-f034-4b5f-954a-19e993167f04\") " pod="openstack/memcached-0" Nov 27 07:13:20 crc kubenswrapper[4971]: I1127 07:13:20.001511 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6833730-f034-4b5f-954a-19e993167f04-combined-ca-bundle\") pod \"memcached-0\" (UID: \"d6833730-f034-4b5f-954a-19e993167f04\") " pod="openstack/memcached-0" Nov 27 07:13:20 crc kubenswrapper[4971]: I1127 07:13:20.001548 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d6833730-f034-4b5f-954a-19e993167f04-kolla-config\") pod \"memcached-0\" (UID: \"d6833730-f034-4b5f-954a-19e993167f04\") " pod="openstack/memcached-0" Nov 27 07:13:20 crc kubenswrapper[4971]: I1127 07:13:20.001599 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6833730-f034-4b5f-954a-19e993167f04-memcached-tls-certs\") pod \"memcached-0\" (UID: \"d6833730-f034-4b5f-954a-19e993167f04\") " pod="openstack/memcached-0" Nov 27 07:13:20 crc kubenswrapper[4971]: I1127 07:13:20.001621 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tvd4d\" (UniqueName: \"kubernetes.io/projected/d6833730-f034-4b5f-954a-19e993167f04-kube-api-access-tvd4d\") pod \"memcached-0\" (UID: \"d6833730-f034-4b5f-954a-19e993167f04\") " pod="openstack/memcached-0" Nov 27 07:13:20 crc kubenswrapper[4971]: I1127 07:13:20.086111 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3","Type":"ContainerStarted","Data":"d4ba45740bd639b78d618eee5a883a210b84172a9805a27979d54b1226fa1e2f"} Nov 27 07:13:20 crc kubenswrapper[4971]: I1127 07:13:20.106139 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6833730-f034-4b5f-954a-19e993167f04-combined-ca-bundle\") pod \"memcached-0\" (UID: \"d6833730-f034-4b5f-954a-19e993167f04\") " pod="openstack/memcached-0" Nov 27 07:13:20 crc kubenswrapper[4971]: I1127 07:13:20.106201 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d6833730-f034-4b5f-954a-19e993167f04-kolla-config\") pod \"memcached-0\" (UID: \"d6833730-f034-4b5f-954a-19e993167f04\") " pod="openstack/memcached-0" Nov 27 07:13:20 crc kubenswrapper[4971]: I1127 07:13:20.106283 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6833730-f034-4b5f-954a-19e993167f04-memcached-tls-certs\") pod \"memcached-0\" (UID: \"d6833730-f034-4b5f-954a-19e993167f04\") " pod="openstack/memcached-0" Nov 27 07:13:20 crc kubenswrapper[4971]: I1127 07:13:20.106320 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tvd4d\" (UniqueName: \"kubernetes.io/projected/d6833730-f034-4b5f-954a-19e993167f04-kube-api-access-tvd4d\") pod \"memcached-0\" (UID: \"d6833730-f034-4b5f-954a-19e993167f04\") " pod="openstack/memcached-0" Nov 27 07:13:20 crc kubenswrapper[4971]: I1127 07:13:20.106399 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d6833730-f034-4b5f-954a-19e993167f04-config-data\") pod \"memcached-0\" (UID: \"d6833730-f034-4b5f-954a-19e993167f04\") " pod="openstack/memcached-0" Nov 27 07:13:20 crc kubenswrapper[4971]: I1127 07:13:20.107396 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d6833730-f034-4b5f-954a-19e993167f04-kolla-config\") pod \"memcached-0\" (UID: \"d6833730-f034-4b5f-954a-19e993167f04\") " pod="openstack/memcached-0" Nov 27 07:13:20 crc kubenswrapper[4971]: I1127 07:13:20.107449 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d6833730-f034-4b5f-954a-19e993167f04-config-data\") pod \"memcached-0\" (UID: \"d6833730-f034-4b5f-954a-19e993167f04\") " pod="openstack/memcached-0" Nov 27 07:13:20 crc kubenswrapper[4971]: I1127 07:13:20.114097 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6833730-f034-4b5f-954a-19e993167f04-memcached-tls-certs\") pod \"memcached-0\" (UID: \"d6833730-f034-4b5f-954a-19e993167f04\") " pod="openstack/memcached-0" Nov 27 07:13:20 crc kubenswrapper[4971]: I1127 07:13:20.116924 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Nov 27 07:13:20 crc kubenswrapper[4971]: I1127 07:13:20.131026 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6833730-f034-4b5f-954a-19e993167f04-combined-ca-bundle\") pod \"memcached-0\" (UID: \"d6833730-f034-4b5f-954a-19e993167f04\") " pod="openstack/memcached-0" Nov 27 07:13:20 crc kubenswrapper[4971]: I1127 07:13:20.143513 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tvd4d\" (UniqueName: \"kubernetes.io/projected/d6833730-f034-4b5f-954a-19e993167f04-kube-api-access-tvd4d\") pod \"memcached-0\" (UID: \"d6833730-f034-4b5f-954a-19e993167f04\") " pod="openstack/memcached-0" Nov 27 07:13:20 crc kubenswrapper[4971]: I1127 07:13:20.275166 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Nov 27 07:13:20 crc kubenswrapper[4971]: I1127 07:13:20.640927 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 27 07:13:20 crc kubenswrapper[4971]: W1127 07:13:20.670425 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbcf58afd_21c6_4c9d_8702_09bc98859732.slice/crio-a6652ce76b9cece76b9ede712acece12b70a0cae484af7a48284ab90b1053378 WatchSource:0}: Error finding container a6652ce76b9cece76b9ede712acece12b70a0cae484af7a48284ab90b1053378: Status 404 returned error can't find the container with id a6652ce76b9cece76b9ede712acece12b70a0cae484af7a48284ab90b1053378 Nov 27 07:13:21 crc kubenswrapper[4971]: I1127 07:13:21.016028 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Nov 27 07:13:21 crc kubenswrapper[4971]: I1127 07:13:21.120337 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"bcf58afd-21c6-4c9d-8702-09bc98859732","Type":"ContainerStarted","Data":"a6652ce76b9cece76b9ede712acece12b70a0cae484af7a48284ab90b1053378"} Nov 27 07:13:21 crc kubenswrapper[4971]: I1127 07:13:21.123669 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"d6833730-f034-4b5f-954a-19e993167f04","Type":"ContainerStarted","Data":"dc8739d7e1380833df5ffbb2fd90e957d06c03cf2b39c2491034ec42e003dac6"} Nov 27 07:13:21 crc kubenswrapper[4971]: I1127 07:13:21.499616 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Nov 27 07:13:21 crc kubenswrapper[4971]: I1127 07:13:21.501007 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 27 07:13:21 crc kubenswrapper[4971]: I1127 07:13:21.503721 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-9x6zw" Nov 27 07:13:21 crc kubenswrapper[4971]: I1127 07:13:21.521523 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 27 07:13:21 crc kubenswrapper[4971]: I1127 07:13:21.540899 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g6mrh\" (UniqueName: \"kubernetes.io/projected/95491a98-26dd-4fe9-95a8-09e1c3f7d3f6-kube-api-access-g6mrh\") pod \"kube-state-metrics-0\" (UID: \"95491a98-26dd-4fe9-95a8-09e1c3f7d3f6\") " pod="openstack/kube-state-metrics-0" Nov 27 07:13:21 crc kubenswrapper[4971]: I1127 07:13:21.642224 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g6mrh\" (UniqueName: \"kubernetes.io/projected/95491a98-26dd-4fe9-95a8-09e1c3f7d3f6-kube-api-access-g6mrh\") pod \"kube-state-metrics-0\" (UID: \"95491a98-26dd-4fe9-95a8-09e1c3f7d3f6\") " pod="openstack/kube-state-metrics-0" Nov 27 07:13:21 crc kubenswrapper[4971]: I1127 07:13:21.677339 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g6mrh\" (UniqueName: \"kubernetes.io/projected/95491a98-26dd-4fe9-95a8-09e1c3f7d3f6-kube-api-access-g6mrh\") pod \"kube-state-metrics-0\" (UID: \"95491a98-26dd-4fe9-95a8-09e1c3f7d3f6\") " pod="openstack/kube-state-metrics-0" Nov 27 07:13:21 crc kubenswrapper[4971]: I1127 07:13:21.834064 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 27 07:13:24 crc kubenswrapper[4971]: I1127 07:13:24.007159 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 27 07:13:24 crc kubenswrapper[4971]: W1127 07:13:24.048514 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod95491a98_26dd_4fe9_95a8_09e1c3f7d3f6.slice/crio-bd52c95c5ffc86358bbf0fa7678fb985bf8fe272f9ce498f25ceb2043a80dd1b WatchSource:0}: Error finding container bd52c95c5ffc86358bbf0fa7678fb985bf8fe272f9ce498f25ceb2043a80dd1b: Status 404 returned error can't find the container with id bd52c95c5ffc86358bbf0fa7678fb985bf8fe272f9ce498f25ceb2043a80dd1b Nov 27 07:13:24 crc kubenswrapper[4971]: I1127 07:13:24.176345 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"95491a98-26dd-4fe9-95a8-09e1c3f7d3f6","Type":"ContainerStarted","Data":"bd52c95c5ffc86358bbf0fa7678fb985bf8fe272f9ce498f25ceb2043a80dd1b"} Nov 27 07:13:26 crc kubenswrapper[4971]: I1127 07:13:26.902118 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-db2qc"] Nov 27 07:13:26 crc kubenswrapper[4971]: I1127 07:13:26.903645 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-db2qc" Nov 27 07:13:26 crc kubenswrapper[4971]: I1127 07:13:26.906093 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-9l9hk" Nov 27 07:13:26 crc kubenswrapper[4971]: I1127 07:13:26.906565 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Nov 27 07:13:26 crc kubenswrapper[4971]: I1127 07:13:26.912026 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Nov 27 07:13:26 crc kubenswrapper[4971]: I1127 07:13:26.919593 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-45rt8"] Nov 27 07:13:26 crc kubenswrapper[4971]: I1127 07:13:26.921791 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-45rt8" Nov 27 07:13:26 crc kubenswrapper[4971]: I1127 07:13:26.953376 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-db2qc"] Nov 27 07:13:26 crc kubenswrapper[4971]: I1127 07:13:26.965625 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0aa6e1b6-c18b-4a02-a396-880350cde407-combined-ca-bundle\") pod \"ovn-controller-db2qc\" (UID: \"0aa6e1b6-c18b-4a02-a396-880350cde407\") " pod="openstack/ovn-controller-db2qc" Nov 27 07:13:26 crc kubenswrapper[4971]: I1127 07:13:26.965691 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d9wff\" (UniqueName: \"kubernetes.io/projected/6facf3b5-48aa-4a38-823e-6b7adbbcdfee-kube-api-access-d9wff\") pod \"ovn-controller-ovs-45rt8\" (UID: \"6facf3b5-48aa-4a38-823e-6b7adbbcdfee\") " pod="openstack/ovn-controller-ovs-45rt8" Nov 27 07:13:26 crc kubenswrapper[4971]: I1127 07:13:26.965744 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6facf3b5-48aa-4a38-823e-6b7adbbcdfee-var-run\") pod \"ovn-controller-ovs-45rt8\" (UID: \"6facf3b5-48aa-4a38-823e-6b7adbbcdfee\") " pod="openstack/ovn-controller-ovs-45rt8" Nov 27 07:13:26 crc kubenswrapper[4971]: I1127 07:13:26.965766 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0aa6e1b6-c18b-4a02-a396-880350cde407-var-run\") pod \"ovn-controller-db2qc\" (UID: \"0aa6e1b6-c18b-4a02-a396-880350cde407\") " pod="openstack/ovn-controller-db2qc" Nov 27 07:13:26 crc kubenswrapper[4971]: I1127 07:13:26.965786 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/0aa6e1b6-c18b-4a02-a396-880350cde407-ovn-controller-tls-certs\") pod \"ovn-controller-db2qc\" (UID: \"0aa6e1b6-c18b-4a02-a396-880350cde407\") " pod="openstack/ovn-controller-db2qc" Nov 27 07:13:26 crc kubenswrapper[4971]: I1127 07:13:26.965806 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0aa6e1b6-c18b-4a02-a396-880350cde407-var-run-ovn\") pod \"ovn-controller-db2qc\" (UID: \"0aa6e1b6-c18b-4a02-a396-880350cde407\") " pod="openstack/ovn-controller-db2qc" Nov 27 07:13:26 crc kubenswrapper[4971]: I1127 07:13:26.965825 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/6facf3b5-48aa-4a38-823e-6b7adbbcdfee-etc-ovs\") pod \"ovn-controller-ovs-45rt8\" (UID: \"6facf3b5-48aa-4a38-823e-6b7adbbcdfee\") " pod="openstack/ovn-controller-ovs-45rt8" Nov 27 07:13:26 crc kubenswrapper[4971]: I1127 07:13:26.965844 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0aa6e1b6-c18b-4a02-a396-880350cde407-scripts\") pod \"ovn-controller-db2qc\" (UID: \"0aa6e1b6-c18b-4a02-a396-880350cde407\") " pod="openstack/ovn-controller-db2qc" Nov 27 07:13:26 crc kubenswrapper[4971]: I1127 07:13:26.965866 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6facf3b5-48aa-4a38-823e-6b7adbbcdfee-scripts\") pod \"ovn-controller-ovs-45rt8\" (UID: \"6facf3b5-48aa-4a38-823e-6b7adbbcdfee\") " pod="openstack/ovn-controller-ovs-45rt8" Nov 27 07:13:26 crc kubenswrapper[4971]: I1127 07:13:26.965889 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0aa6e1b6-c18b-4a02-a396-880350cde407-var-log-ovn\") pod \"ovn-controller-db2qc\" (UID: \"0aa6e1b6-c18b-4a02-a396-880350cde407\") " pod="openstack/ovn-controller-db2qc" Nov 27 07:13:26 crc kubenswrapper[4971]: I1127 07:13:26.965912 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/6facf3b5-48aa-4a38-823e-6b7adbbcdfee-var-lib\") pod \"ovn-controller-ovs-45rt8\" (UID: \"6facf3b5-48aa-4a38-823e-6b7adbbcdfee\") " pod="openstack/ovn-controller-ovs-45rt8" Nov 27 07:13:26 crc kubenswrapper[4971]: I1127 07:13:26.965950 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wfgmz\" (UniqueName: \"kubernetes.io/projected/0aa6e1b6-c18b-4a02-a396-880350cde407-kube-api-access-wfgmz\") pod \"ovn-controller-db2qc\" (UID: \"0aa6e1b6-c18b-4a02-a396-880350cde407\") " pod="openstack/ovn-controller-db2qc" Nov 27 07:13:26 crc kubenswrapper[4971]: I1127 07:13:26.965974 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/6facf3b5-48aa-4a38-823e-6b7adbbcdfee-var-log\") pod \"ovn-controller-ovs-45rt8\" (UID: \"6facf3b5-48aa-4a38-823e-6b7adbbcdfee\") " pod="openstack/ovn-controller-ovs-45rt8" Nov 27 07:13:26 crc kubenswrapper[4971]: I1127 07:13:26.988448 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-45rt8"] Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.067354 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6facf3b5-48aa-4a38-823e-6b7adbbcdfee-scripts\") pod \"ovn-controller-ovs-45rt8\" (UID: \"6facf3b5-48aa-4a38-823e-6b7adbbcdfee\") " pod="openstack/ovn-controller-ovs-45rt8" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.067440 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0aa6e1b6-c18b-4a02-a396-880350cde407-var-log-ovn\") pod \"ovn-controller-db2qc\" (UID: \"0aa6e1b6-c18b-4a02-a396-880350cde407\") " pod="openstack/ovn-controller-db2qc" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.067462 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/6facf3b5-48aa-4a38-823e-6b7adbbcdfee-var-lib\") pod \"ovn-controller-ovs-45rt8\" (UID: \"6facf3b5-48aa-4a38-823e-6b7adbbcdfee\") " pod="openstack/ovn-controller-ovs-45rt8" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.067954 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0aa6e1b6-c18b-4a02-a396-880350cde407-var-log-ovn\") pod \"ovn-controller-db2qc\" (UID: \"0aa6e1b6-c18b-4a02-a396-880350cde407\") " pod="openstack/ovn-controller-db2qc" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.068170 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/6facf3b5-48aa-4a38-823e-6b7adbbcdfee-var-lib\") pod \"ovn-controller-ovs-45rt8\" (UID: \"6facf3b5-48aa-4a38-823e-6b7adbbcdfee\") " pod="openstack/ovn-controller-ovs-45rt8" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.068222 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wfgmz\" (UniqueName: \"kubernetes.io/projected/0aa6e1b6-c18b-4a02-a396-880350cde407-kube-api-access-wfgmz\") pod \"ovn-controller-db2qc\" (UID: \"0aa6e1b6-c18b-4a02-a396-880350cde407\") " pod="openstack/ovn-controller-db2qc" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.068241 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/6facf3b5-48aa-4a38-823e-6b7adbbcdfee-var-log\") pod \"ovn-controller-ovs-45rt8\" (UID: \"6facf3b5-48aa-4a38-823e-6b7adbbcdfee\") " pod="openstack/ovn-controller-ovs-45rt8" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.068708 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/6facf3b5-48aa-4a38-823e-6b7adbbcdfee-var-log\") pod \"ovn-controller-ovs-45rt8\" (UID: \"6facf3b5-48aa-4a38-823e-6b7adbbcdfee\") " pod="openstack/ovn-controller-ovs-45rt8" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.068260 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0aa6e1b6-c18b-4a02-a396-880350cde407-combined-ca-bundle\") pod \"ovn-controller-db2qc\" (UID: \"0aa6e1b6-c18b-4a02-a396-880350cde407\") " pod="openstack/ovn-controller-db2qc" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.069499 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d9wff\" (UniqueName: \"kubernetes.io/projected/6facf3b5-48aa-4a38-823e-6b7adbbcdfee-kube-api-access-d9wff\") pod \"ovn-controller-ovs-45rt8\" (UID: \"6facf3b5-48aa-4a38-823e-6b7adbbcdfee\") " pod="openstack/ovn-controller-ovs-45rt8" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.069800 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6facf3b5-48aa-4a38-823e-6b7adbbcdfee-var-run\") pod \"ovn-controller-ovs-45rt8\" (UID: \"6facf3b5-48aa-4a38-823e-6b7adbbcdfee\") " pod="openstack/ovn-controller-ovs-45rt8" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.069829 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0aa6e1b6-c18b-4a02-a396-880350cde407-var-run\") pod \"ovn-controller-db2qc\" (UID: \"0aa6e1b6-c18b-4a02-a396-880350cde407\") " pod="openstack/ovn-controller-db2qc" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.069988 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6facf3b5-48aa-4a38-823e-6b7adbbcdfee-var-run\") pod \"ovn-controller-ovs-45rt8\" (UID: \"6facf3b5-48aa-4a38-823e-6b7adbbcdfee\") " pod="openstack/ovn-controller-ovs-45rt8" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.070022 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0aa6e1b6-c18b-4a02-a396-880350cde407-var-run\") pod \"ovn-controller-db2qc\" (UID: \"0aa6e1b6-c18b-4a02-a396-880350cde407\") " pod="openstack/ovn-controller-db2qc" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.070145 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/0aa6e1b6-c18b-4a02-a396-880350cde407-ovn-controller-tls-certs\") pod \"ovn-controller-db2qc\" (UID: \"0aa6e1b6-c18b-4a02-a396-880350cde407\") " pod="openstack/ovn-controller-db2qc" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.070196 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0aa6e1b6-c18b-4a02-a396-880350cde407-var-run-ovn\") pod \"ovn-controller-db2qc\" (UID: \"0aa6e1b6-c18b-4a02-a396-880350cde407\") " pod="openstack/ovn-controller-db2qc" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.070231 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/6facf3b5-48aa-4a38-823e-6b7adbbcdfee-etc-ovs\") pod \"ovn-controller-ovs-45rt8\" (UID: \"6facf3b5-48aa-4a38-823e-6b7adbbcdfee\") " pod="openstack/ovn-controller-ovs-45rt8" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.070258 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0aa6e1b6-c18b-4a02-a396-880350cde407-scripts\") pod \"ovn-controller-db2qc\" (UID: \"0aa6e1b6-c18b-4a02-a396-880350cde407\") " pod="openstack/ovn-controller-db2qc" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.070458 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/6facf3b5-48aa-4a38-823e-6b7adbbcdfee-etc-ovs\") pod \"ovn-controller-ovs-45rt8\" (UID: \"6facf3b5-48aa-4a38-823e-6b7adbbcdfee\") " pod="openstack/ovn-controller-ovs-45rt8" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.070512 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0aa6e1b6-c18b-4a02-a396-880350cde407-var-run-ovn\") pod \"ovn-controller-db2qc\" (UID: \"0aa6e1b6-c18b-4a02-a396-880350cde407\") " pod="openstack/ovn-controller-db2qc" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.070997 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6facf3b5-48aa-4a38-823e-6b7adbbcdfee-scripts\") pod \"ovn-controller-ovs-45rt8\" (UID: \"6facf3b5-48aa-4a38-823e-6b7adbbcdfee\") " pod="openstack/ovn-controller-ovs-45rt8" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.078011 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0aa6e1b6-c18b-4a02-a396-880350cde407-scripts\") pod \"ovn-controller-db2qc\" (UID: \"0aa6e1b6-c18b-4a02-a396-880350cde407\") " pod="openstack/ovn-controller-db2qc" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.083579 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/0aa6e1b6-c18b-4a02-a396-880350cde407-ovn-controller-tls-certs\") pod \"ovn-controller-db2qc\" (UID: \"0aa6e1b6-c18b-4a02-a396-880350cde407\") " pod="openstack/ovn-controller-db2qc" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.083754 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0aa6e1b6-c18b-4a02-a396-880350cde407-combined-ca-bundle\") pod \"ovn-controller-db2qc\" (UID: \"0aa6e1b6-c18b-4a02-a396-880350cde407\") " pod="openstack/ovn-controller-db2qc" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.087165 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d9wff\" (UniqueName: \"kubernetes.io/projected/6facf3b5-48aa-4a38-823e-6b7adbbcdfee-kube-api-access-d9wff\") pod \"ovn-controller-ovs-45rt8\" (UID: \"6facf3b5-48aa-4a38-823e-6b7adbbcdfee\") " pod="openstack/ovn-controller-ovs-45rt8" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.087968 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wfgmz\" (UniqueName: \"kubernetes.io/projected/0aa6e1b6-c18b-4a02-a396-880350cde407-kube-api-access-wfgmz\") pod \"ovn-controller-db2qc\" (UID: \"0aa6e1b6-c18b-4a02-a396-880350cde407\") " pod="openstack/ovn-controller-db2qc" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.243372 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-db2qc" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.254084 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-45rt8" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.736638 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.740761 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.773220 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.773409 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.773874 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.774077 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.775083 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.790857 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-wnqnk" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.892936 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6e13a581-61d0-4a1f-ad42-5f2783417c70-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"6e13a581-61d0-4a1f-ad42-5f2783417c70\") " pod="openstack/ovsdbserver-sb-0" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.893251 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e13a581-61d0-4a1f-ad42-5f2783417c70-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"6e13a581-61d0-4a1f-ad42-5f2783417c70\") " pod="openstack/ovsdbserver-sb-0" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.893285 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2vqk\" (UniqueName: \"kubernetes.io/projected/6e13a581-61d0-4a1f-ad42-5f2783417c70-kube-api-access-c2vqk\") pod \"ovsdbserver-sb-0\" (UID: \"6e13a581-61d0-4a1f-ad42-5f2783417c70\") " pod="openstack/ovsdbserver-sb-0" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.893332 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e13a581-61d0-4a1f-ad42-5f2783417c70-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"6e13a581-61d0-4a1f-ad42-5f2783417c70\") " pod="openstack/ovsdbserver-sb-0" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.893353 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6e13a581-61d0-4a1f-ad42-5f2783417c70-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"6e13a581-61d0-4a1f-ad42-5f2783417c70\") " pod="openstack/ovsdbserver-sb-0" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.893371 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e13a581-61d0-4a1f-ad42-5f2783417c70-config\") pod \"ovsdbserver-sb-0\" (UID: \"6e13a581-61d0-4a1f-ad42-5f2783417c70\") " pod="openstack/ovsdbserver-sb-0" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.893400 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e13a581-61d0-4a1f-ad42-5f2783417c70-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"6e13a581-61d0-4a1f-ad42-5f2783417c70\") " pod="openstack/ovsdbserver-sb-0" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.893459 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"6e13a581-61d0-4a1f-ad42-5f2783417c70\") " pod="openstack/ovsdbserver-sb-0" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.995184 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e13a581-61d0-4a1f-ad42-5f2783417c70-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"6e13a581-61d0-4a1f-ad42-5f2783417c70\") " pod="openstack/ovsdbserver-sb-0" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.995238 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6e13a581-61d0-4a1f-ad42-5f2783417c70-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"6e13a581-61d0-4a1f-ad42-5f2783417c70\") " pod="openstack/ovsdbserver-sb-0" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.995264 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e13a581-61d0-4a1f-ad42-5f2783417c70-config\") pod \"ovsdbserver-sb-0\" (UID: \"6e13a581-61d0-4a1f-ad42-5f2783417c70\") " pod="openstack/ovsdbserver-sb-0" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.995295 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e13a581-61d0-4a1f-ad42-5f2783417c70-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"6e13a581-61d0-4a1f-ad42-5f2783417c70\") " pod="openstack/ovsdbserver-sb-0" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.995356 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"6e13a581-61d0-4a1f-ad42-5f2783417c70\") " pod="openstack/ovsdbserver-sb-0" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.995373 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6e13a581-61d0-4a1f-ad42-5f2783417c70-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"6e13a581-61d0-4a1f-ad42-5f2783417c70\") " pod="openstack/ovsdbserver-sb-0" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.995392 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e13a581-61d0-4a1f-ad42-5f2783417c70-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"6e13a581-61d0-4a1f-ad42-5f2783417c70\") " pod="openstack/ovsdbserver-sb-0" Nov 27 07:13:27 crc kubenswrapper[4971]: I1127 07:13:27.995415 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2vqk\" (UniqueName: \"kubernetes.io/projected/6e13a581-61d0-4a1f-ad42-5f2783417c70-kube-api-access-c2vqk\") pod \"ovsdbserver-sb-0\" (UID: \"6e13a581-61d0-4a1f-ad42-5f2783417c70\") " pod="openstack/ovsdbserver-sb-0" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:27.996282 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6e13a581-61d0-4a1f-ad42-5f2783417c70-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"6e13a581-61d0-4a1f-ad42-5f2783417c70\") " pod="openstack/ovsdbserver-sb-0" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:27.996683 4971 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"6e13a581-61d0-4a1f-ad42-5f2783417c70\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/ovsdbserver-sb-0" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:27.997054 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6e13a581-61d0-4a1f-ad42-5f2783417c70-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"6e13a581-61d0-4a1f-ad42-5f2783417c70\") " pod="openstack/ovsdbserver-sb-0" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:28.003983 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e13a581-61d0-4a1f-ad42-5f2783417c70-config\") pod \"ovsdbserver-sb-0\" (UID: \"6e13a581-61d0-4a1f-ad42-5f2783417c70\") " pod="openstack/ovsdbserver-sb-0" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:28.004555 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e13a581-61d0-4a1f-ad42-5f2783417c70-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"6e13a581-61d0-4a1f-ad42-5f2783417c70\") " pod="openstack/ovsdbserver-sb-0" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:28.005391 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e13a581-61d0-4a1f-ad42-5f2783417c70-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"6e13a581-61d0-4a1f-ad42-5f2783417c70\") " pod="openstack/ovsdbserver-sb-0" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:28.013133 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e13a581-61d0-4a1f-ad42-5f2783417c70-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"6e13a581-61d0-4a1f-ad42-5f2783417c70\") " pod="openstack/ovsdbserver-sb-0" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:28.022431 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2vqk\" (UniqueName: \"kubernetes.io/projected/6e13a581-61d0-4a1f-ad42-5f2783417c70-kube-api-access-c2vqk\") pod \"ovsdbserver-sb-0\" (UID: \"6e13a581-61d0-4a1f-ad42-5f2783417c70\") " pod="openstack/ovsdbserver-sb-0" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:28.033828 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"6e13a581-61d0-4a1f-ad42-5f2783417c70\") " pod="openstack/ovsdbserver-sb-0" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:28.150070 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:28.741150 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:28.743291 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:28.746106 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:28.746224 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:28.747206 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-9llmv" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:28.747253 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:28.767202 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:28.818938 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\") " pod="openstack/ovsdbserver-nb-0" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:28.819005 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\") " pod="openstack/ovsdbserver-nb-0" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:28.819032 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\") " pod="openstack/ovsdbserver-nb-0" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:28.819174 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\") " pod="openstack/ovsdbserver-nb-0" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:28.819222 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-config\") pod \"ovsdbserver-nb-0\" (UID: \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\") " pod="openstack/ovsdbserver-nb-0" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:28.819251 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwljt\" (UniqueName: \"kubernetes.io/projected/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-kube-api-access-dwljt\") pod \"ovsdbserver-nb-0\" (UID: \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\") " pod="openstack/ovsdbserver-nb-0" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:28.819281 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\") " pod="openstack/ovsdbserver-nb-0" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:28.819311 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-nb-0\" (UID: \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\") " pod="openstack/ovsdbserver-nb-0" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:28.921154 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-config\") pod \"ovsdbserver-nb-0\" (UID: \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\") " pod="openstack/ovsdbserver-nb-0" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:28.921222 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwljt\" (UniqueName: \"kubernetes.io/projected/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-kube-api-access-dwljt\") pod \"ovsdbserver-nb-0\" (UID: \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\") " pod="openstack/ovsdbserver-nb-0" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:28.921416 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\") " pod="openstack/ovsdbserver-nb-0" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:28.921503 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-nb-0\" (UID: \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\") " pod="openstack/ovsdbserver-nb-0" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:28.921586 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\") " pod="openstack/ovsdbserver-nb-0" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:28.921617 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\") " pod="openstack/ovsdbserver-nb-0" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:28.921724 4971 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-nb-0\" (UID: \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/ovsdbserver-nb-0" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:28.922152 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\") " pod="openstack/ovsdbserver-nb-0" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:28.922335 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\") " pod="openstack/ovsdbserver-nb-0" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:28.922460 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\") " pod="openstack/ovsdbserver-nb-0" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:28.922948 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-config\") pod \"ovsdbserver-nb-0\" (UID: \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\") " pod="openstack/ovsdbserver-nb-0" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:28.923079 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\") " pod="openstack/ovsdbserver-nb-0" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:28.927396 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\") " pod="openstack/ovsdbserver-nb-0" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:28.928242 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\") " pod="openstack/ovsdbserver-nb-0" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:28.933257 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\") " pod="openstack/ovsdbserver-nb-0" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:28.940492 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwljt\" (UniqueName: \"kubernetes.io/projected/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-kube-api-access-dwljt\") pod \"ovsdbserver-nb-0\" (UID: \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\") " pod="openstack/ovsdbserver-nb-0" Nov 27 07:13:28 crc kubenswrapper[4971]: I1127 07:13:28.945440 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-nb-0\" (UID: \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\") " pod="openstack/ovsdbserver-nb-0" Nov 27 07:13:29 crc kubenswrapper[4971]: I1127 07:13:29.065729 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Nov 27 07:13:39 crc kubenswrapper[4971]: E1127 07:13:39.441519 4971 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb@sha256:5526be2fd8d8cdc035078fdbcb7de6b02c081147295a13f2b1e50e281ef17f52" Nov 27 07:13:39 crc kubenswrapper[4971]: E1127 07:13:39.442237 4971 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:5526be2fd8d8cdc035078fdbcb7de6b02c081147295a13f2b1e50e281ef17f52,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kkcmh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-cell1-galera-0_openstack(bcf58afd-21c6-4c9d-8702-09bc98859732): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 27 07:13:39 crc kubenswrapper[4971]: E1127 07:13:39.443439 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-cell1-galera-0" podUID="bcf58afd-21c6-4c9d-8702-09bc98859732" Nov 27 07:13:40 crc kubenswrapper[4971]: E1127 07:13:40.251867 4971 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-memcached@sha256:0e00f2303db35259ffcd3d034f38ab9eb4cb089e268305a4165b5f86a18fce6c" Nov 27 07:13:40 crc kubenswrapper[4971]: E1127 07:13:40.252323 4971 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:memcached,Image:quay.io/podified-antelope-centos9/openstack-memcached@sha256:0e00f2303db35259ffcd3d034f38ab9eb4cb089e268305a4165b5f86a18fce6c,Command:[/usr/bin/dumb-init -- /usr/local/bin/kolla_start],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:memcached,HostPort:0,ContainerPort:11211,Protocol:TCP,HostIP:,},ContainerPort{Name:memcached-tls,HostPort:0,ContainerPort:11212,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:POD_IPS,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIPs,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:CONFIG_HASH,Value:n585h586h56bh695h665h5d8hfbh89h664h654h5fbh654h7h88h57ch5fhb9h5c8h657h5dhd5h56bh547h598h545h686h65ch564h55bh579h9bh5d5q,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/src,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/certs/memcached.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/private/memcached.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tvd4d,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42457,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42457,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod memcached-0_openstack(d6833730-f034-4b5f-954a-19e993167f04): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 27 07:13:40 crc kubenswrapper[4971]: E1127 07:13:40.253763 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/memcached-0" podUID="d6833730-f034-4b5f-954a-19e993167f04" Nov 27 07:13:40 crc kubenswrapper[4971]: E1127 07:13:40.281174 4971 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb@sha256:5526be2fd8d8cdc035078fdbcb7de6b02c081147295a13f2b1e50e281ef17f52" Nov 27 07:13:40 crc kubenswrapper[4971]: E1127 07:13:40.282069 4971 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:5526be2fd8d8cdc035078fdbcb7de6b02c081147295a13f2b1e50e281ef17f52,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-v2dt8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-galera-0_openstack(4a0dcbed-5f66-4faf-83c2-1227bc05e9d3): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 27 07:13:40 crc kubenswrapper[4971]: E1127 07:13:40.283456 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-galera-0" podUID="4a0dcbed-5f66-4faf-83c2-1227bc05e9d3" Nov 27 07:13:40 crc kubenswrapper[4971]: E1127 07:13:40.321546 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb@sha256:5526be2fd8d8cdc035078fdbcb7de6b02c081147295a13f2b1e50e281ef17f52\\\"\"" pod="openstack/openstack-cell1-galera-0" podUID="bcf58afd-21c6-4c9d-8702-09bc98859732" Nov 27 07:13:40 crc kubenswrapper[4971]: E1127 07:13:40.321586 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-memcached@sha256:0e00f2303db35259ffcd3d034f38ab9eb4cb089e268305a4165b5f86a18fce6c\\\"\"" pod="openstack/memcached-0" podUID="d6833730-f034-4b5f-954a-19e993167f04" Nov 27 07:13:40 crc kubenswrapper[4971]: E1127 07:13:40.321684 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb@sha256:5526be2fd8d8cdc035078fdbcb7de6b02c081147295a13f2b1e50e281ef17f52\\\"\"" pod="openstack/openstack-galera-0" podUID="4a0dcbed-5f66-4faf-83c2-1227bc05e9d3" Nov 27 07:13:41 crc kubenswrapper[4971]: E1127 07:13:41.271935 4971 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:c64e18fe0ecb6900e763e6cf6be0ca8f71b5c8af9e078a543238a505cf88ae46" Nov 27 07:13:41 crc kubenswrapper[4971]: E1127 07:13:41.272134 4971 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:c64e18fe0ecb6900e763e6cf6be0ca8f71b5c8af9e078a543238a505cf88ae46,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kmnnq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(d6439a3c-ee26-467c-8e42-5abbbf390f16): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 27 07:13:41 crc kubenswrapper[4971]: E1127 07:13:41.273357 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="d6439a3c-ee26-467c-8e42-5abbbf390f16" Nov 27 07:13:41 crc kubenswrapper[4971]: E1127 07:13:41.275917 4971 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:c64e18fe0ecb6900e763e6cf6be0ca8f71b5c8af9e078a543238a505cf88ae46" Nov 27 07:13:41 crc kubenswrapper[4971]: E1127 07:13:41.276095 4971 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:c64e18fe0ecb6900e763e6cf6be0ca8f71b5c8af9e078a543238a505cf88ae46,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xzvhk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cell1-server-0_openstack(640c3829-d2e9-49e1-82e3-bd213aa992dd): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 27 07:13:41 crc kubenswrapper[4971]: E1127 07:13:41.277954 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-cell1-server-0" podUID="640c3829-d2e9-49e1-82e3-bd213aa992dd" Nov 27 07:13:41 crc kubenswrapper[4971]: E1127 07:13:41.348765 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:c64e18fe0ecb6900e763e6cf6be0ca8f71b5c8af9e078a543238a505cf88ae46\\\"\"" pod="openstack/rabbitmq-cell1-server-0" podUID="640c3829-d2e9-49e1-82e3-bd213aa992dd" Nov 27 07:13:41 crc kubenswrapper[4971]: E1127 07:13:41.349258 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:c64e18fe0ecb6900e763e6cf6be0ca8f71b5c8af9e078a543238a505cf88ae46\\\"\"" pod="openstack/rabbitmq-server-0" podUID="d6439a3c-ee26-467c-8e42-5abbbf390f16" Nov 27 07:13:42 crc kubenswrapper[4971]: E1127 07:13:42.294232 4971 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:4218330ae90f65f4a2c1d93334812c4d04a4ed1d46013269252aba16e1138627" Nov 27 07:13:42 crc kubenswrapper[4971]: E1127 07:13:42.294849 4971 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:4218330ae90f65f4a2c1d93334812c4d04a4ed1d46013269252aba16e1138627,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-s4p6h,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57dc4c6697-d7m57_openstack(5e4df58e-1b19-439d-aee0-a09ee67f328e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 27 07:13:42 crc kubenswrapper[4971]: E1127 07:13:42.296483 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57dc4c6697-d7m57" podUID="5e4df58e-1b19-439d-aee0-a09ee67f328e" Nov 27 07:13:42 crc kubenswrapper[4971]: E1127 07:13:42.297920 4971 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:4218330ae90f65f4a2c1d93334812c4d04a4ed1d46013269252aba16e1138627" Nov 27 07:13:42 crc kubenswrapper[4971]: E1127 07:13:42.298056 4971 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:4218330ae90f65f4a2c1d93334812c4d04a4ed1d46013269252aba16e1138627,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-r2tm2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-8446fd7c75-z9w86_openstack(42377322-3180-4e68-94b9-601dcd82f6f4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 27 07:13:42 crc kubenswrapper[4971]: E1127 07:13:42.299223 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-8446fd7c75-z9w86" podUID="42377322-3180-4e68-94b9-601dcd82f6f4" Nov 27 07:13:42 crc kubenswrapper[4971]: E1127 07:13:42.313303 4971 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:4218330ae90f65f4a2c1d93334812c4d04a4ed1d46013269252aba16e1138627" Nov 27 07:13:42 crc kubenswrapper[4971]: E1127 07:13:42.313470 4971 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:4218330ae90f65f4a2c1d93334812c4d04a4ed1d46013269252aba16e1138627,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-j78p4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-557f57d995-hr4xs_openstack(059f6657-eda5-4f44-91fb-abd66cc1d9e9): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 27 07:13:42 crc kubenswrapper[4971]: E1127 07:13:42.313785 4971 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:4218330ae90f65f4a2c1d93334812c4d04a4ed1d46013269252aba16e1138627" Nov 27 07:13:42 crc kubenswrapper[4971]: E1127 07:13:42.313927 4971 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:4218330ae90f65f4a2c1d93334812c4d04a4ed1d46013269252aba16e1138627,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-58d2c,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-766fdc659c-2kj78_openstack(f46735aa-eb5c-475c-a810-c60a3d192807): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 27 07:13:42 crc kubenswrapper[4971]: E1127 07:13:42.315095 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-557f57d995-hr4xs" podUID="059f6657-eda5-4f44-91fb-abd66cc1d9e9" Nov 27 07:13:42 crc kubenswrapper[4971]: E1127 07:13:42.315138 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-766fdc659c-2kj78" podUID="f46735aa-eb5c-475c-a810-c60a3d192807" Nov 27 07:13:42 crc kubenswrapper[4971]: E1127 07:13:42.394771 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:4218330ae90f65f4a2c1d93334812c4d04a4ed1d46013269252aba16e1138627\\\"\"" pod="openstack/dnsmasq-dns-57dc4c6697-d7m57" podUID="5e4df58e-1b19-439d-aee0-a09ee67f328e" Nov 27 07:13:42 crc kubenswrapper[4971]: E1127 07:13:42.395921 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:4218330ae90f65f4a2c1d93334812c4d04a4ed1d46013269252aba16e1138627\\\"\"" pod="openstack/dnsmasq-dns-8446fd7c75-z9w86" podUID="42377322-3180-4e68-94b9-601dcd82f6f4" Nov 27 07:13:42 crc kubenswrapper[4971]: I1127 07:13:42.930559 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-db2qc"] Nov 27 07:13:43 crc kubenswrapper[4971]: I1127 07:13:43.048123 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 27 07:13:43 crc kubenswrapper[4971]: I1127 07:13:43.142073 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-45rt8"] Nov 27 07:13:43 crc kubenswrapper[4971]: W1127 07:13:43.142449 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0aa6e1b6_c18b_4a02_a396_880350cde407.slice/crio-dcd26d31f61dc261b0ae4502413881a35341b72b65ea0b54917e2204fa51658a WatchSource:0}: Error finding container dcd26d31f61dc261b0ae4502413881a35341b72b65ea0b54917e2204fa51658a: Status 404 returned error can't find the container with id dcd26d31f61dc261b0ae4502413881a35341b72b65ea0b54917e2204fa51658a Nov 27 07:13:43 crc kubenswrapper[4971]: W1127 07:13:43.268319 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6e13a581_61d0_4a1f_ad42_5f2783417c70.slice/crio-2d362db6b1447b211c560391e76ed1612f2cc461550022715c739ababa1142f2 WatchSource:0}: Error finding container 2d362db6b1447b211c560391e76ed1612f2cc461550022715c739ababa1142f2: Status 404 returned error can't find the container with id 2d362db6b1447b211c560391e76ed1612f2cc461550022715c739ababa1142f2 Nov 27 07:13:43 crc kubenswrapper[4971]: I1127 07:13:43.331168 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-766fdc659c-2kj78" Nov 27 07:13:43 crc kubenswrapper[4971]: I1127 07:13:43.338491 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-557f57d995-hr4xs" Nov 27 07:13:43 crc kubenswrapper[4971]: W1127 07:13:43.365809 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6facf3b5_48aa_4a38_823e_6b7adbbcdfee.slice/crio-9b40984b9eca07ba195f28e6dcd857b2b4c7a263a570bb3a7e7ef5022c1c0d62 WatchSource:0}: Error finding container 9b40984b9eca07ba195f28e6dcd857b2b4c7a263a570bb3a7e7ef5022c1c0d62: Status 404 returned error can't find the container with id 9b40984b9eca07ba195f28e6dcd857b2b4c7a263a570bb3a7e7ef5022c1c0d62 Nov 27 07:13:43 crc kubenswrapper[4971]: I1127 07:13:43.386066 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-45rt8" event={"ID":"6facf3b5-48aa-4a38-823e-6b7adbbcdfee","Type":"ContainerStarted","Data":"9b40984b9eca07ba195f28e6dcd857b2b4c7a263a570bb3a7e7ef5022c1c0d62"} Nov 27 07:13:43 crc kubenswrapper[4971]: I1127 07:13:43.386893 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-db2qc" event={"ID":"0aa6e1b6-c18b-4a02-a396-880350cde407","Type":"ContainerStarted","Data":"dcd26d31f61dc261b0ae4502413881a35341b72b65ea0b54917e2204fa51658a"} Nov 27 07:13:43 crc kubenswrapper[4971]: I1127 07:13:43.387848 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-557f57d995-hr4xs" event={"ID":"059f6657-eda5-4f44-91fb-abd66cc1d9e9","Type":"ContainerDied","Data":"340d70f52c3b6b1d209575efb93d7bdaa4c40599c2242c232dd2ed54e2fb64e6"} Nov 27 07:13:43 crc kubenswrapper[4971]: I1127 07:13:43.387881 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-557f57d995-hr4xs" Nov 27 07:13:43 crc kubenswrapper[4971]: I1127 07:13:43.388792 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"6e13a581-61d0-4a1f-ad42-5f2783417c70","Type":"ContainerStarted","Data":"2d362db6b1447b211c560391e76ed1612f2cc461550022715c739ababa1142f2"} Nov 27 07:13:43 crc kubenswrapper[4971]: I1127 07:13:43.389744 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-766fdc659c-2kj78" event={"ID":"f46735aa-eb5c-475c-a810-c60a3d192807","Type":"ContainerDied","Data":"48966907910f9a5d0bc057813b79bed057a37ed64972d96e5ccd53cc08c4da7b"} Nov 27 07:13:43 crc kubenswrapper[4971]: I1127 07:13:43.389851 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-766fdc659c-2kj78" Nov 27 07:13:43 crc kubenswrapper[4971]: I1127 07:13:43.440582 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/059f6657-eda5-4f44-91fb-abd66cc1d9e9-config\") pod \"059f6657-eda5-4f44-91fb-abd66cc1d9e9\" (UID: \"059f6657-eda5-4f44-91fb-abd66cc1d9e9\") " Nov 27 07:13:43 crc kubenswrapper[4971]: I1127 07:13:43.440626 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f46735aa-eb5c-475c-a810-c60a3d192807-config\") pod \"f46735aa-eb5c-475c-a810-c60a3d192807\" (UID: \"f46735aa-eb5c-475c-a810-c60a3d192807\") " Nov 27 07:13:43 crc kubenswrapper[4971]: I1127 07:13:43.440684 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j78p4\" (UniqueName: \"kubernetes.io/projected/059f6657-eda5-4f44-91fb-abd66cc1d9e9-kube-api-access-j78p4\") pod \"059f6657-eda5-4f44-91fb-abd66cc1d9e9\" (UID: \"059f6657-eda5-4f44-91fb-abd66cc1d9e9\") " Nov 27 07:13:43 crc kubenswrapper[4971]: I1127 07:13:43.440707 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f46735aa-eb5c-475c-a810-c60a3d192807-dns-svc\") pod \"f46735aa-eb5c-475c-a810-c60a3d192807\" (UID: \"f46735aa-eb5c-475c-a810-c60a3d192807\") " Nov 27 07:13:43 crc kubenswrapper[4971]: I1127 07:13:43.440731 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-58d2c\" (UniqueName: \"kubernetes.io/projected/f46735aa-eb5c-475c-a810-c60a3d192807-kube-api-access-58d2c\") pod \"f46735aa-eb5c-475c-a810-c60a3d192807\" (UID: \"f46735aa-eb5c-475c-a810-c60a3d192807\") " Nov 27 07:13:43 crc kubenswrapper[4971]: I1127 07:13:43.441287 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f46735aa-eb5c-475c-a810-c60a3d192807-config" (OuterVolumeSpecName: "config") pod "f46735aa-eb5c-475c-a810-c60a3d192807" (UID: "f46735aa-eb5c-475c-a810-c60a3d192807"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:13:43 crc kubenswrapper[4971]: I1127 07:13:43.441732 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/059f6657-eda5-4f44-91fb-abd66cc1d9e9-config" (OuterVolumeSpecName: "config") pod "059f6657-eda5-4f44-91fb-abd66cc1d9e9" (UID: "059f6657-eda5-4f44-91fb-abd66cc1d9e9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:13:43 crc kubenswrapper[4971]: I1127 07:13:43.442020 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f46735aa-eb5c-475c-a810-c60a3d192807-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f46735aa-eb5c-475c-a810-c60a3d192807" (UID: "f46735aa-eb5c-475c-a810-c60a3d192807"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:13:43 crc kubenswrapper[4971]: I1127 07:13:43.446433 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/059f6657-eda5-4f44-91fb-abd66cc1d9e9-kube-api-access-j78p4" (OuterVolumeSpecName: "kube-api-access-j78p4") pod "059f6657-eda5-4f44-91fb-abd66cc1d9e9" (UID: "059f6657-eda5-4f44-91fb-abd66cc1d9e9"). InnerVolumeSpecName "kube-api-access-j78p4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:13:43 crc kubenswrapper[4971]: I1127 07:13:43.447172 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f46735aa-eb5c-475c-a810-c60a3d192807-kube-api-access-58d2c" (OuterVolumeSpecName: "kube-api-access-58d2c") pod "f46735aa-eb5c-475c-a810-c60a3d192807" (UID: "f46735aa-eb5c-475c-a810-c60a3d192807"). InnerVolumeSpecName "kube-api-access-58d2c". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:13:43 crc kubenswrapper[4971]: I1127 07:13:43.543129 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-58d2c\" (UniqueName: \"kubernetes.io/projected/f46735aa-eb5c-475c-a810-c60a3d192807-kube-api-access-58d2c\") on node \"crc\" DevicePath \"\"" Nov 27 07:13:43 crc kubenswrapper[4971]: I1127 07:13:43.543182 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/059f6657-eda5-4f44-91fb-abd66cc1d9e9-config\") on node \"crc\" DevicePath \"\"" Nov 27 07:13:43 crc kubenswrapper[4971]: I1127 07:13:43.543196 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f46735aa-eb5c-475c-a810-c60a3d192807-config\") on node \"crc\" DevicePath \"\"" Nov 27 07:13:43 crc kubenswrapper[4971]: I1127 07:13:43.543213 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j78p4\" (UniqueName: \"kubernetes.io/projected/059f6657-eda5-4f44-91fb-abd66cc1d9e9-kube-api-access-j78p4\") on node \"crc\" DevicePath \"\"" Nov 27 07:13:43 crc kubenswrapper[4971]: I1127 07:13:43.543243 4971 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f46735aa-eb5c-475c-a810-c60a3d192807-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 27 07:13:43 crc kubenswrapper[4971]: I1127 07:13:43.787081 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-557f57d995-hr4xs"] Nov 27 07:13:43 crc kubenswrapper[4971]: I1127 07:13:43.796582 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-557f57d995-hr4xs"] Nov 27 07:13:43 crc kubenswrapper[4971]: I1127 07:13:43.818807 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-766fdc659c-2kj78"] Nov 27 07:13:43 crc kubenswrapper[4971]: I1127 07:13:43.826560 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-766fdc659c-2kj78"] Nov 27 07:13:43 crc kubenswrapper[4971]: I1127 07:13:43.833569 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 27 07:13:44 crc kubenswrapper[4971]: I1127 07:13:44.399248 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"95491a98-26dd-4fe9-95a8-09e1c3f7d3f6","Type":"ContainerStarted","Data":"5f69ff59df757c223b52667faed26c43c2f9179d089f63d894199a3c579b0475"} Nov 27 07:13:44 crc kubenswrapper[4971]: I1127 07:13:44.399660 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Nov 27 07:13:44 crc kubenswrapper[4971]: I1127 07:13:44.402645 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2","Type":"ContainerStarted","Data":"9bc43782eba8c4cf00dbf1e71cf92628b20a8a75f49f39680989ed8e4c815c99"} Nov 27 07:13:44 crc kubenswrapper[4971]: I1127 07:13:44.413980 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=3.324752338 podStartE2EDuration="23.413958187s" podCreationTimestamp="2025-11-27 07:13:21 +0000 UTC" firstStartedPulling="2025-11-27 07:13:24.058430482 +0000 UTC m=+1242.250474400" lastFinishedPulling="2025-11-27 07:13:44.147636331 +0000 UTC m=+1262.339680249" observedRunningTime="2025-11-27 07:13:44.410078487 +0000 UTC m=+1262.602122425" watchObservedRunningTime="2025-11-27 07:13:44.413958187 +0000 UTC m=+1262.606002115" Nov 27 07:13:44 crc kubenswrapper[4971]: I1127 07:13:44.583599 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="059f6657-eda5-4f44-91fb-abd66cc1d9e9" path="/var/lib/kubelet/pods/059f6657-eda5-4f44-91fb-abd66cc1d9e9/volumes" Nov 27 07:13:44 crc kubenswrapper[4971]: I1127 07:13:44.584833 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f46735aa-eb5c-475c-a810-c60a3d192807" path="/var/lib/kubelet/pods/f46735aa-eb5c-475c-a810-c60a3d192807/volumes" Nov 27 07:13:48 crc kubenswrapper[4971]: I1127 07:13:48.444753 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"6e13a581-61d0-4a1f-ad42-5f2783417c70","Type":"ContainerStarted","Data":"203479ecfee56a9441b43631622196bf609ce8fbfc30cbc6c31eb9c3e9440056"} Nov 27 07:13:48 crc kubenswrapper[4971]: I1127 07:13:48.446700 4971 generic.go:334] "Generic (PLEG): container finished" podID="6facf3b5-48aa-4a38-823e-6b7adbbcdfee" containerID="60f59a6e83fdc72d437d71d6a2fc4290250b75d86ff4dc231378f363a52d5f74" exitCode=0 Nov 27 07:13:48 crc kubenswrapper[4971]: I1127 07:13:48.446774 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-45rt8" event={"ID":"6facf3b5-48aa-4a38-823e-6b7adbbcdfee","Type":"ContainerDied","Data":"60f59a6e83fdc72d437d71d6a2fc4290250b75d86ff4dc231378f363a52d5f74"} Nov 27 07:13:48 crc kubenswrapper[4971]: I1127 07:13:48.448414 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2","Type":"ContainerStarted","Data":"ccb0479ba9995bc8550fbc2dbf993c3449d21c9af046183d2f478467bb11fc07"} Nov 27 07:13:48 crc kubenswrapper[4971]: I1127 07:13:48.451804 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-db2qc" event={"ID":"0aa6e1b6-c18b-4a02-a396-880350cde407","Type":"ContainerStarted","Data":"fc482f882a3b7633d1172ebad636e951956bbf0dc53007a5de81a84d031ea868"} Nov 27 07:13:48 crc kubenswrapper[4971]: I1127 07:13:48.452003 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-db2qc" Nov 27 07:13:48 crc kubenswrapper[4971]: I1127 07:13:48.501609 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-db2qc" podStartSLOduration=18.500149463 podStartE2EDuration="22.501555812s" podCreationTimestamp="2025-11-27 07:13:26 +0000 UTC" firstStartedPulling="2025-11-27 07:13:43.144947502 +0000 UTC m=+1261.336991420" lastFinishedPulling="2025-11-27 07:13:47.146353851 +0000 UTC m=+1265.338397769" observedRunningTime="2025-11-27 07:13:48.488970036 +0000 UTC m=+1266.681013974" watchObservedRunningTime="2025-11-27 07:13:48.501555812 +0000 UTC m=+1266.693599750" Nov 27 07:13:49 crc kubenswrapper[4971]: I1127 07:13:49.482480 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-45rt8" event={"ID":"6facf3b5-48aa-4a38-823e-6b7adbbcdfee","Type":"ContainerStarted","Data":"8f63e70bc533062faba370a611b76ae194f52aeabfa2c53bea1503c7257a0a34"} Nov 27 07:13:49 crc kubenswrapper[4971]: I1127 07:13:49.483584 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-45rt8" event={"ID":"6facf3b5-48aa-4a38-823e-6b7adbbcdfee","Type":"ContainerStarted","Data":"d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2"} Nov 27 07:13:49 crc kubenswrapper[4971]: I1127 07:13:49.519752 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-45rt8" podStartSLOduration=19.759888306 podStartE2EDuration="23.519732768s" podCreationTimestamp="2025-11-27 07:13:26 +0000 UTC" firstStartedPulling="2025-11-27 07:13:43.370067455 +0000 UTC m=+1261.562111373" lastFinishedPulling="2025-11-27 07:13:47.129911917 +0000 UTC m=+1265.321955835" observedRunningTime="2025-11-27 07:13:49.513716708 +0000 UTC m=+1267.705760646" watchObservedRunningTime="2025-11-27 07:13:49.519732768 +0000 UTC m=+1267.711776696" Nov 27 07:13:50 crc kubenswrapper[4971]: I1127 07:13:50.494111 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2","Type":"ContainerStarted","Data":"7a4e158b7c7e9c61d16087892eb264fb5c50d094f187b71cab60d863a90c10b0"} Nov 27 07:13:50 crc kubenswrapper[4971]: I1127 07:13:50.497081 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"6e13a581-61d0-4a1f-ad42-5f2783417c70","Type":"ContainerStarted","Data":"35daabf30eff3474954a06c52ad28d749b52aae9fc9b781f447588cf8b408b5f"} Nov 27 07:13:50 crc kubenswrapper[4971]: I1127 07:13:50.497168 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-45rt8" Nov 27 07:13:50 crc kubenswrapper[4971]: I1127 07:13:50.497197 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-45rt8" Nov 27 07:13:50 crc kubenswrapper[4971]: I1127 07:13:50.524602 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=17.392552003 podStartE2EDuration="23.524568048s" podCreationTimestamp="2025-11-27 07:13:27 +0000 UTC" firstStartedPulling="2025-11-27 07:13:44.140432067 +0000 UTC m=+1262.332475985" lastFinishedPulling="2025-11-27 07:13:50.272448112 +0000 UTC m=+1268.464492030" observedRunningTime="2025-11-27 07:13:50.517084226 +0000 UTC m=+1268.709128154" watchObservedRunningTime="2025-11-27 07:13:50.524568048 +0000 UTC m=+1268.716612016" Nov 27 07:13:51 crc kubenswrapper[4971]: I1127 07:13:51.572002 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=18.556025412 podStartE2EDuration="25.571982199s" podCreationTimestamp="2025-11-27 07:13:26 +0000 UTC" firstStartedPulling="2025-11-27 07:13:43.274997248 +0000 UTC m=+1261.467041166" lastFinishedPulling="2025-11-27 07:13:50.290954035 +0000 UTC m=+1268.482997953" observedRunningTime="2025-11-27 07:13:50.537918805 +0000 UTC m=+1268.729962733" watchObservedRunningTime="2025-11-27 07:13:51.571982199 +0000 UTC m=+1269.764026117" Nov 27 07:13:51 crc kubenswrapper[4971]: I1127 07:13:51.840755 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Nov 27 07:13:52 crc kubenswrapper[4971]: I1127 07:13:52.154155 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Nov 27 07:13:52 crc kubenswrapper[4971]: I1127 07:13:52.203989 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Nov 27 07:13:52 crc kubenswrapper[4971]: I1127 07:13:52.516036 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"d6833730-f034-4b5f-954a-19e993167f04","Type":"ContainerStarted","Data":"de2ace92e2287e0935ebddfec859f7a90b7eade8440fecf2f0f85b2026bacd9f"} Nov 27 07:13:52 crc kubenswrapper[4971]: I1127 07:13:52.516461 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Nov 27 07:13:52 crc kubenswrapper[4971]: I1127 07:13:52.516766 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Nov 27 07:13:52 crc kubenswrapper[4971]: I1127 07:13:52.546371 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=2.354226265 podStartE2EDuration="33.546343707s" podCreationTimestamp="2025-11-27 07:13:19 +0000 UTC" firstStartedPulling="2025-11-27 07:13:21.059964979 +0000 UTC m=+1239.252008897" lastFinishedPulling="2025-11-27 07:13:52.252082371 +0000 UTC m=+1270.444126339" observedRunningTime="2025-11-27 07:13:52.536600032 +0000 UTC m=+1270.728643970" watchObservedRunningTime="2025-11-27 07:13:52.546343707 +0000 UTC m=+1270.738387635" Nov 27 07:13:52 crc kubenswrapper[4971]: I1127 07:13:52.572340 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Nov 27 07:13:52 crc kubenswrapper[4971]: I1127 07:13:52.972703 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57dc4c6697-d7m57"] Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.032339 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-fbf586c4f-dld5n"] Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.037142 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fbf586c4f-dld5n" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.053959 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.066700 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.112154 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-fbf586c4f-dld5n"] Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.140382 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-x4rpv"] Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.143347 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-x4rpv" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.146254 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.157810 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c8023a25-e335-417c-9d79-93178ade2982-config\") pod \"dnsmasq-dns-fbf586c4f-dld5n\" (UID: \"c8023a25-e335-417c-9d79-93178ade2982\") " pod="openstack/dnsmasq-dns-fbf586c4f-dld5n" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.157884 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c8023a25-e335-417c-9d79-93178ade2982-ovsdbserver-sb\") pod \"dnsmasq-dns-fbf586c4f-dld5n\" (UID: \"c8023a25-e335-417c-9d79-93178ade2982\") " pod="openstack/dnsmasq-dns-fbf586c4f-dld5n" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.157918 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c8023a25-e335-417c-9d79-93178ade2982-dns-svc\") pod \"dnsmasq-dns-fbf586c4f-dld5n\" (UID: \"c8023a25-e335-417c-9d79-93178ade2982\") " pod="openstack/dnsmasq-dns-fbf586c4f-dld5n" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.157950 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gwz5k\" (UniqueName: \"kubernetes.io/projected/c8023a25-e335-417c-9d79-93178ade2982-kube-api-access-gwz5k\") pod \"dnsmasq-dns-fbf586c4f-dld5n\" (UID: \"c8023a25-e335-417c-9d79-93178ade2982\") " pod="openstack/dnsmasq-dns-fbf586c4f-dld5n" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.196553 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.228472 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-x4rpv"] Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.259156 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/31d8afb0-f8c2-4d34-879f-260e94779de0-ovn-rundir\") pod \"ovn-controller-metrics-x4rpv\" (UID: \"31d8afb0-f8c2-4d34-879f-260e94779de0\") " pod="openstack/ovn-controller-metrics-x4rpv" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.263688 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c8023a25-e335-417c-9d79-93178ade2982-config\") pod \"dnsmasq-dns-fbf586c4f-dld5n\" (UID: \"c8023a25-e335-417c-9d79-93178ade2982\") " pod="openstack/dnsmasq-dns-fbf586c4f-dld5n" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.263836 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c8023a25-e335-417c-9d79-93178ade2982-ovsdbserver-sb\") pod \"dnsmasq-dns-fbf586c4f-dld5n\" (UID: \"c8023a25-e335-417c-9d79-93178ade2982\") " pod="openstack/dnsmasq-dns-fbf586c4f-dld5n" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.263897 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c8023a25-e335-417c-9d79-93178ade2982-dns-svc\") pod \"dnsmasq-dns-fbf586c4f-dld5n\" (UID: \"c8023a25-e335-417c-9d79-93178ade2982\") " pod="openstack/dnsmasq-dns-fbf586c4f-dld5n" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.263929 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gwz5k\" (UniqueName: \"kubernetes.io/projected/c8023a25-e335-417c-9d79-93178ade2982-kube-api-access-gwz5k\") pod \"dnsmasq-dns-fbf586c4f-dld5n\" (UID: \"c8023a25-e335-417c-9d79-93178ade2982\") " pod="openstack/dnsmasq-dns-fbf586c4f-dld5n" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.264104 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/31d8afb0-f8c2-4d34-879f-260e94779de0-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-x4rpv\" (UID: \"31d8afb0-f8c2-4d34-879f-260e94779de0\") " pod="openstack/ovn-controller-metrics-x4rpv" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.264174 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-48q8t\" (UniqueName: \"kubernetes.io/projected/31d8afb0-f8c2-4d34-879f-260e94779de0-kube-api-access-48q8t\") pod \"ovn-controller-metrics-x4rpv\" (UID: \"31d8afb0-f8c2-4d34-879f-260e94779de0\") " pod="openstack/ovn-controller-metrics-x4rpv" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.264267 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31d8afb0-f8c2-4d34-879f-260e94779de0-config\") pod \"ovn-controller-metrics-x4rpv\" (UID: \"31d8afb0-f8c2-4d34-879f-260e94779de0\") " pod="openstack/ovn-controller-metrics-x4rpv" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.264344 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31d8afb0-f8c2-4d34-879f-260e94779de0-combined-ca-bundle\") pod \"ovn-controller-metrics-x4rpv\" (UID: \"31d8afb0-f8c2-4d34-879f-260e94779de0\") " pod="openstack/ovn-controller-metrics-x4rpv" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.264394 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/31d8afb0-f8c2-4d34-879f-260e94779de0-ovs-rundir\") pod \"ovn-controller-metrics-x4rpv\" (UID: \"31d8afb0-f8c2-4d34-879f-260e94779de0\") " pod="openstack/ovn-controller-metrics-x4rpv" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.265998 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c8023a25-e335-417c-9d79-93178ade2982-config\") pod \"dnsmasq-dns-fbf586c4f-dld5n\" (UID: \"c8023a25-e335-417c-9d79-93178ade2982\") " pod="openstack/dnsmasq-dns-fbf586c4f-dld5n" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.266658 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c8023a25-e335-417c-9d79-93178ade2982-ovsdbserver-sb\") pod \"dnsmasq-dns-fbf586c4f-dld5n\" (UID: \"c8023a25-e335-417c-9d79-93178ade2982\") " pod="openstack/dnsmasq-dns-fbf586c4f-dld5n" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.267203 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c8023a25-e335-417c-9d79-93178ade2982-dns-svc\") pod \"dnsmasq-dns-fbf586c4f-dld5n\" (UID: \"c8023a25-e335-417c-9d79-93178ade2982\") " pod="openstack/dnsmasq-dns-fbf586c4f-dld5n" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.307322 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gwz5k\" (UniqueName: \"kubernetes.io/projected/c8023a25-e335-417c-9d79-93178ade2982-kube-api-access-gwz5k\") pod \"dnsmasq-dns-fbf586c4f-dld5n\" (UID: \"c8023a25-e335-417c-9d79-93178ade2982\") " pod="openstack/dnsmasq-dns-fbf586c4f-dld5n" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.365684 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31d8afb0-f8c2-4d34-879f-260e94779de0-combined-ca-bundle\") pod \"ovn-controller-metrics-x4rpv\" (UID: \"31d8afb0-f8c2-4d34-879f-260e94779de0\") " pod="openstack/ovn-controller-metrics-x4rpv" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.365775 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/31d8afb0-f8c2-4d34-879f-260e94779de0-ovs-rundir\") pod \"ovn-controller-metrics-x4rpv\" (UID: \"31d8afb0-f8c2-4d34-879f-260e94779de0\") " pod="openstack/ovn-controller-metrics-x4rpv" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.365837 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/31d8afb0-f8c2-4d34-879f-260e94779de0-ovn-rundir\") pod \"ovn-controller-metrics-x4rpv\" (UID: \"31d8afb0-f8c2-4d34-879f-260e94779de0\") " pod="openstack/ovn-controller-metrics-x4rpv" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.365932 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/31d8afb0-f8c2-4d34-879f-260e94779de0-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-x4rpv\" (UID: \"31d8afb0-f8c2-4d34-879f-260e94779de0\") " pod="openstack/ovn-controller-metrics-x4rpv" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.365966 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-48q8t\" (UniqueName: \"kubernetes.io/projected/31d8afb0-f8c2-4d34-879f-260e94779de0-kube-api-access-48q8t\") pod \"ovn-controller-metrics-x4rpv\" (UID: \"31d8afb0-f8c2-4d34-879f-260e94779de0\") " pod="openstack/ovn-controller-metrics-x4rpv" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.366009 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31d8afb0-f8c2-4d34-879f-260e94779de0-config\") pod \"ovn-controller-metrics-x4rpv\" (UID: \"31d8afb0-f8c2-4d34-879f-260e94779de0\") " pod="openstack/ovn-controller-metrics-x4rpv" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.366585 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/31d8afb0-f8c2-4d34-879f-260e94779de0-ovn-rundir\") pod \"ovn-controller-metrics-x4rpv\" (UID: \"31d8afb0-f8c2-4d34-879f-260e94779de0\") " pod="openstack/ovn-controller-metrics-x4rpv" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.369713 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31d8afb0-f8c2-4d34-879f-260e94779de0-config\") pod \"ovn-controller-metrics-x4rpv\" (UID: \"31d8afb0-f8c2-4d34-879f-260e94779de0\") " pod="openstack/ovn-controller-metrics-x4rpv" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.370130 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/31d8afb0-f8c2-4d34-879f-260e94779de0-ovs-rundir\") pod \"ovn-controller-metrics-x4rpv\" (UID: \"31d8afb0-f8c2-4d34-879f-260e94779de0\") " pod="openstack/ovn-controller-metrics-x4rpv" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.382166 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/31d8afb0-f8c2-4d34-879f-260e94779de0-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-x4rpv\" (UID: \"31d8afb0-f8c2-4d34-879f-260e94779de0\") " pod="openstack/ovn-controller-metrics-x4rpv" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.388759 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8446fd7c75-z9w86"] Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.391708 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-48q8t\" (UniqueName: \"kubernetes.io/projected/31d8afb0-f8c2-4d34-879f-260e94779de0-kube-api-access-48q8t\") pod \"ovn-controller-metrics-x4rpv\" (UID: \"31d8afb0-f8c2-4d34-879f-260e94779de0\") " pod="openstack/ovn-controller-metrics-x4rpv" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.404607 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-58bd875f97-nxb6t"] Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.407451 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58bd875f97-nxb6t" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.413953 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.427328 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31d8afb0-f8c2-4d34-879f-260e94779de0-combined-ca-bundle\") pod \"ovn-controller-metrics-x4rpv\" (UID: \"31d8afb0-f8c2-4d34-879f-260e94779de0\") " pod="openstack/ovn-controller-metrics-x4rpv" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.427448 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fbf586c4f-dld5n" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.429719 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-58bd875f97-nxb6t"] Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.476688 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/677c92ea-3947-426f-8b7b-ec15116012be-dns-svc\") pod \"dnsmasq-dns-58bd875f97-nxb6t\" (UID: \"677c92ea-3947-426f-8b7b-ec15116012be\") " pod="openstack/dnsmasq-dns-58bd875f97-nxb6t" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.477162 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xlrhv\" (UniqueName: \"kubernetes.io/projected/677c92ea-3947-426f-8b7b-ec15116012be-kube-api-access-xlrhv\") pod \"dnsmasq-dns-58bd875f97-nxb6t\" (UID: \"677c92ea-3947-426f-8b7b-ec15116012be\") " pod="openstack/dnsmasq-dns-58bd875f97-nxb6t" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.477193 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/677c92ea-3947-426f-8b7b-ec15116012be-ovsdbserver-sb\") pod \"dnsmasq-dns-58bd875f97-nxb6t\" (UID: \"677c92ea-3947-426f-8b7b-ec15116012be\") " pod="openstack/dnsmasq-dns-58bd875f97-nxb6t" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.477222 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/677c92ea-3947-426f-8b7b-ec15116012be-config\") pod \"dnsmasq-dns-58bd875f97-nxb6t\" (UID: \"677c92ea-3947-426f-8b7b-ec15116012be\") " pod="openstack/dnsmasq-dns-58bd875f97-nxb6t" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.477345 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/677c92ea-3947-426f-8b7b-ec15116012be-ovsdbserver-nb\") pod \"dnsmasq-dns-58bd875f97-nxb6t\" (UID: \"677c92ea-3947-426f-8b7b-ec15116012be\") " pod="openstack/dnsmasq-dns-58bd875f97-nxb6t" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.529994 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-x4rpv" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.549920 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57dc4c6697-d7m57" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.555419 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57dc4c6697-d7m57" event={"ID":"5e4df58e-1b19-439d-aee0-a09ee67f328e","Type":"ContainerDied","Data":"51418f03259c3784502d88fc015197915de54a579198bcbfd6bef29fa8c5f498"} Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.556207 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.578697 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/677c92ea-3947-426f-8b7b-ec15116012be-ovsdbserver-nb\") pod \"dnsmasq-dns-58bd875f97-nxb6t\" (UID: \"677c92ea-3947-426f-8b7b-ec15116012be\") " pod="openstack/dnsmasq-dns-58bd875f97-nxb6t" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.578768 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/677c92ea-3947-426f-8b7b-ec15116012be-dns-svc\") pod \"dnsmasq-dns-58bd875f97-nxb6t\" (UID: \"677c92ea-3947-426f-8b7b-ec15116012be\") " pod="openstack/dnsmasq-dns-58bd875f97-nxb6t" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.578824 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xlrhv\" (UniqueName: \"kubernetes.io/projected/677c92ea-3947-426f-8b7b-ec15116012be-kube-api-access-xlrhv\") pod \"dnsmasq-dns-58bd875f97-nxb6t\" (UID: \"677c92ea-3947-426f-8b7b-ec15116012be\") " pod="openstack/dnsmasq-dns-58bd875f97-nxb6t" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.578844 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/677c92ea-3947-426f-8b7b-ec15116012be-ovsdbserver-sb\") pod \"dnsmasq-dns-58bd875f97-nxb6t\" (UID: \"677c92ea-3947-426f-8b7b-ec15116012be\") " pod="openstack/dnsmasq-dns-58bd875f97-nxb6t" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.578875 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/677c92ea-3947-426f-8b7b-ec15116012be-config\") pod \"dnsmasq-dns-58bd875f97-nxb6t\" (UID: \"677c92ea-3947-426f-8b7b-ec15116012be\") " pod="openstack/dnsmasq-dns-58bd875f97-nxb6t" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.580341 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/677c92ea-3947-426f-8b7b-ec15116012be-dns-svc\") pod \"dnsmasq-dns-58bd875f97-nxb6t\" (UID: \"677c92ea-3947-426f-8b7b-ec15116012be\") " pod="openstack/dnsmasq-dns-58bd875f97-nxb6t" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.580447 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/677c92ea-3947-426f-8b7b-ec15116012be-ovsdbserver-sb\") pod \"dnsmasq-dns-58bd875f97-nxb6t\" (UID: \"677c92ea-3947-426f-8b7b-ec15116012be\") " pod="openstack/dnsmasq-dns-58bd875f97-nxb6t" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.580442 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/677c92ea-3947-426f-8b7b-ec15116012be-ovsdbserver-nb\") pod \"dnsmasq-dns-58bd875f97-nxb6t\" (UID: \"677c92ea-3947-426f-8b7b-ec15116012be\") " pod="openstack/dnsmasq-dns-58bd875f97-nxb6t" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.580837 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/677c92ea-3947-426f-8b7b-ec15116012be-config\") pod \"dnsmasq-dns-58bd875f97-nxb6t\" (UID: \"677c92ea-3947-426f-8b7b-ec15116012be\") " pod="openstack/dnsmasq-dns-58bd875f97-nxb6t" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.597383 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xlrhv\" (UniqueName: \"kubernetes.io/projected/677c92ea-3947-426f-8b7b-ec15116012be-kube-api-access-xlrhv\") pod \"dnsmasq-dns-58bd875f97-nxb6t\" (UID: \"677c92ea-3947-426f-8b7b-ec15116012be\") " pod="openstack/dnsmasq-dns-58bd875f97-nxb6t" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.604279 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.679879 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4p6h\" (UniqueName: \"kubernetes.io/projected/5e4df58e-1b19-439d-aee0-a09ee67f328e-kube-api-access-s4p6h\") pod \"5e4df58e-1b19-439d-aee0-a09ee67f328e\" (UID: \"5e4df58e-1b19-439d-aee0-a09ee67f328e\") " Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.680296 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e4df58e-1b19-439d-aee0-a09ee67f328e-config\") pod \"5e4df58e-1b19-439d-aee0-a09ee67f328e\" (UID: \"5e4df58e-1b19-439d-aee0-a09ee67f328e\") " Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.680387 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e4df58e-1b19-439d-aee0-a09ee67f328e-dns-svc\") pod \"5e4df58e-1b19-439d-aee0-a09ee67f328e\" (UID: \"5e4df58e-1b19-439d-aee0-a09ee67f328e\") " Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.681110 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e4df58e-1b19-439d-aee0-a09ee67f328e-config" (OuterVolumeSpecName: "config") pod "5e4df58e-1b19-439d-aee0-a09ee67f328e" (UID: "5e4df58e-1b19-439d-aee0-a09ee67f328e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.682150 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e4df58e-1b19-439d-aee0-a09ee67f328e-config\") on node \"crc\" DevicePath \"\"" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.682506 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e4df58e-1b19-439d-aee0-a09ee67f328e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5e4df58e-1b19-439d-aee0-a09ee67f328e" (UID: "5e4df58e-1b19-439d-aee0-a09ee67f328e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.686920 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e4df58e-1b19-439d-aee0-a09ee67f328e-kube-api-access-s4p6h" (OuterVolumeSpecName: "kube-api-access-s4p6h") pod "5e4df58e-1b19-439d-aee0-a09ee67f328e" (UID: "5e4df58e-1b19-439d-aee0-a09ee67f328e"). InnerVolumeSpecName "kube-api-access-s4p6h". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.781365 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58bd875f97-nxb6t" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.784569 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4p6h\" (UniqueName: \"kubernetes.io/projected/5e4df58e-1b19-439d-aee0-a09ee67f328e-kube-api-access-s4p6h\") on node \"crc\" DevicePath \"\"" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.784596 4971 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e4df58e-1b19-439d-aee0-a09ee67f328e-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.816941 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.818761 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.819134 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.827353 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.827612 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.827778 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-ck4hs" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.827788 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.881355 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8446fd7c75-z9w86" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.887506 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/abd4a589-1b2e-4559-852f-2c27c0d8c459-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"abd4a589-1b2e-4559-852f-2c27c0d8c459\") " pod="openstack/ovn-northd-0" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.887603 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/abd4a589-1b2e-4559-852f-2c27c0d8c459-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"abd4a589-1b2e-4559-852f-2c27c0d8c459\") " pod="openstack/ovn-northd-0" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.887659 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abd4a589-1b2e-4559-852f-2c27c0d8c459-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"abd4a589-1b2e-4559-852f-2c27c0d8c459\") " pod="openstack/ovn-northd-0" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.887747 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/abd4a589-1b2e-4559-852f-2c27c0d8c459-config\") pod \"ovn-northd-0\" (UID: \"abd4a589-1b2e-4559-852f-2c27c0d8c459\") " pod="openstack/ovn-northd-0" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.887834 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/abd4a589-1b2e-4559-852f-2c27c0d8c459-scripts\") pod \"ovn-northd-0\" (UID: \"abd4a589-1b2e-4559-852f-2c27c0d8c459\") " pod="openstack/ovn-northd-0" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.887885 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/abd4a589-1b2e-4559-852f-2c27c0d8c459-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"abd4a589-1b2e-4559-852f-2c27c0d8c459\") " pod="openstack/ovn-northd-0" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.887918 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4dzdm\" (UniqueName: \"kubernetes.io/projected/abd4a589-1b2e-4559-852f-2c27c0d8c459-kube-api-access-4dzdm\") pod \"ovn-northd-0\" (UID: \"abd4a589-1b2e-4559-852f-2c27c0d8c459\") " pod="openstack/ovn-northd-0" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.989505 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42377322-3180-4e68-94b9-601dcd82f6f4-config\") pod \"42377322-3180-4e68-94b9-601dcd82f6f4\" (UID: \"42377322-3180-4e68-94b9-601dcd82f6f4\") " Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.990302 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r2tm2\" (UniqueName: \"kubernetes.io/projected/42377322-3180-4e68-94b9-601dcd82f6f4-kube-api-access-r2tm2\") pod \"42377322-3180-4e68-94b9-601dcd82f6f4\" (UID: \"42377322-3180-4e68-94b9-601dcd82f6f4\") " Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.990480 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/42377322-3180-4e68-94b9-601dcd82f6f4-dns-svc\") pod \"42377322-3180-4e68-94b9-601dcd82f6f4\" (UID: \"42377322-3180-4e68-94b9-601dcd82f6f4\") " Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.990580 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/42377322-3180-4e68-94b9-601dcd82f6f4-config" (OuterVolumeSpecName: "config") pod "42377322-3180-4e68-94b9-601dcd82f6f4" (UID: "42377322-3180-4e68-94b9-601dcd82f6f4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.990782 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/abd4a589-1b2e-4559-852f-2c27c0d8c459-scripts\") pod \"ovn-northd-0\" (UID: \"abd4a589-1b2e-4559-852f-2c27c0d8c459\") " pod="openstack/ovn-northd-0" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.990827 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/abd4a589-1b2e-4559-852f-2c27c0d8c459-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"abd4a589-1b2e-4559-852f-2c27c0d8c459\") " pod="openstack/ovn-northd-0" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.990847 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4dzdm\" (UniqueName: \"kubernetes.io/projected/abd4a589-1b2e-4559-852f-2c27c0d8c459-kube-api-access-4dzdm\") pod \"ovn-northd-0\" (UID: \"abd4a589-1b2e-4559-852f-2c27c0d8c459\") " pod="openstack/ovn-northd-0" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.990907 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/abd4a589-1b2e-4559-852f-2c27c0d8c459-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"abd4a589-1b2e-4559-852f-2c27c0d8c459\") " pod="openstack/ovn-northd-0" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.990929 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/abd4a589-1b2e-4559-852f-2c27c0d8c459-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"abd4a589-1b2e-4559-852f-2c27c0d8c459\") " pod="openstack/ovn-northd-0" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.990960 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abd4a589-1b2e-4559-852f-2c27c0d8c459-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"abd4a589-1b2e-4559-852f-2c27c0d8c459\") " pod="openstack/ovn-northd-0" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.991021 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/abd4a589-1b2e-4559-852f-2c27c0d8c459-config\") pod \"ovn-northd-0\" (UID: \"abd4a589-1b2e-4559-852f-2c27c0d8c459\") " pod="openstack/ovn-northd-0" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.991094 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42377322-3180-4e68-94b9-601dcd82f6f4-config\") on node \"crc\" DevicePath \"\"" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.992129 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/abd4a589-1b2e-4559-852f-2c27c0d8c459-config\") pod \"ovn-northd-0\" (UID: \"abd4a589-1b2e-4559-852f-2c27c0d8c459\") " pod="openstack/ovn-northd-0" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.992164 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/42377322-3180-4e68-94b9-601dcd82f6f4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "42377322-3180-4e68-94b9-601dcd82f6f4" (UID: "42377322-3180-4e68-94b9-601dcd82f6f4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.993026 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/abd4a589-1b2e-4559-852f-2c27c0d8c459-scripts\") pod \"ovn-northd-0\" (UID: \"abd4a589-1b2e-4559-852f-2c27c0d8c459\") " pod="openstack/ovn-northd-0" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.995138 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/abd4a589-1b2e-4559-852f-2c27c0d8c459-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"abd4a589-1b2e-4559-852f-2c27c0d8c459\") " pod="openstack/ovn-northd-0" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.995302 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/42377322-3180-4e68-94b9-601dcd82f6f4-kube-api-access-r2tm2" (OuterVolumeSpecName: "kube-api-access-r2tm2") pod "42377322-3180-4e68-94b9-601dcd82f6f4" (UID: "42377322-3180-4e68-94b9-601dcd82f6f4"). InnerVolumeSpecName "kube-api-access-r2tm2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.999179 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/abd4a589-1b2e-4559-852f-2c27c0d8c459-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"abd4a589-1b2e-4559-852f-2c27c0d8c459\") " pod="openstack/ovn-northd-0" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.999326 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abd4a589-1b2e-4559-852f-2c27c0d8c459-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"abd4a589-1b2e-4559-852f-2c27c0d8c459\") " pod="openstack/ovn-northd-0" Nov 27 07:13:53 crc kubenswrapper[4971]: I1127 07:13:53.999603 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/abd4a589-1b2e-4559-852f-2c27c0d8c459-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"abd4a589-1b2e-4559-852f-2c27c0d8c459\") " pod="openstack/ovn-northd-0" Nov 27 07:13:54 crc kubenswrapper[4971]: I1127 07:13:54.012113 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4dzdm\" (UniqueName: \"kubernetes.io/projected/abd4a589-1b2e-4559-852f-2c27c0d8c459-kube-api-access-4dzdm\") pod \"ovn-northd-0\" (UID: \"abd4a589-1b2e-4559-852f-2c27c0d8c459\") " pod="openstack/ovn-northd-0" Nov 27 07:13:54 crc kubenswrapper[4971]: I1127 07:13:54.093022 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r2tm2\" (UniqueName: \"kubernetes.io/projected/42377322-3180-4e68-94b9-601dcd82f6f4-kube-api-access-r2tm2\") on node \"crc\" DevicePath \"\"" Nov 27 07:13:54 crc kubenswrapper[4971]: I1127 07:13:54.093084 4971 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/42377322-3180-4e68-94b9-601dcd82f6f4-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 27 07:13:54 crc kubenswrapper[4971]: W1127 07:13:54.125746 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod31d8afb0_f8c2_4d34_879f_260e94779de0.slice/crio-8c11713125217089a5a7277a83bdc4139c332115aa50c492c205289ce5af7eed WatchSource:0}: Error finding container 8c11713125217089a5a7277a83bdc4139c332115aa50c492c205289ce5af7eed: Status 404 returned error can't find the container with id 8c11713125217089a5a7277a83bdc4139c332115aa50c492c205289ce5af7eed Nov 27 07:13:54 crc kubenswrapper[4971]: I1127 07:13:54.126749 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-x4rpv"] Nov 27 07:13:54 crc kubenswrapper[4971]: I1127 07:13:54.139654 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-fbf586c4f-dld5n"] Nov 27 07:13:54 crc kubenswrapper[4971]: I1127 07:13:54.148365 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Nov 27 07:13:54 crc kubenswrapper[4971]: I1127 07:13:54.360764 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-58bd875f97-nxb6t"] Nov 27 07:13:54 crc kubenswrapper[4971]: W1127 07:13:54.372307 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod677c92ea_3947_426f_8b7b_ec15116012be.slice/crio-ebd5d2996e1af0ec20b0b5e755ab2f616c6af92a5df0b3fe50e720f6331dce8c WatchSource:0}: Error finding container ebd5d2996e1af0ec20b0b5e755ab2f616c6af92a5df0b3fe50e720f6331dce8c: Status 404 returned error can't find the container with id ebd5d2996e1af0ec20b0b5e755ab2f616c6af92a5df0b3fe50e720f6331dce8c Nov 27 07:13:54 crc kubenswrapper[4971]: I1127 07:13:54.574425 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"bcf58afd-21c6-4c9d-8702-09bc98859732","Type":"ContainerStarted","Data":"bab5a722723f54ec0f0efa57197716dd795a9b02b94e004eafa54f37ca83903e"} Nov 27 07:13:54 crc kubenswrapper[4971]: I1127 07:13:54.580941 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58bd875f97-nxb6t" event={"ID":"677c92ea-3947-426f-8b7b-ec15116012be","Type":"ContainerStarted","Data":"ebd5d2996e1af0ec20b0b5e755ab2f616c6af92a5df0b3fe50e720f6331dce8c"} Nov 27 07:13:54 crc kubenswrapper[4971]: I1127 07:13:54.584212 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-x4rpv" event={"ID":"31d8afb0-f8c2-4d34-879f-260e94779de0","Type":"ContainerStarted","Data":"ac55cff26fc898c9d7406993cb5d12c6b2b8a73445d04ffc7d5949cc59f486b9"} Nov 27 07:13:54 crc kubenswrapper[4971]: I1127 07:13:54.584296 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-x4rpv" event={"ID":"31d8afb0-f8c2-4d34-879f-260e94779de0","Type":"ContainerStarted","Data":"8c11713125217089a5a7277a83bdc4139c332115aa50c492c205289ce5af7eed"} Nov 27 07:13:54 crc kubenswrapper[4971]: I1127 07:13:54.588500 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"640c3829-d2e9-49e1-82e3-bd213aa992dd","Type":"ContainerStarted","Data":"ddb554636617ff38027bc6df7fed10cfc5e39e121a5847f244d29b6eed3b5a39"} Nov 27 07:13:54 crc kubenswrapper[4971]: I1127 07:13:54.590592 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fbf586c4f-dld5n" event={"ID":"c8023a25-e335-417c-9d79-93178ade2982","Type":"ContainerStarted","Data":"e8b67465bf68f2728b76cef3effa174d87dbf8197f2ae72249a8f54be22c2259"} Nov 27 07:13:54 crc kubenswrapper[4971]: I1127 07:13:54.591912 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8446fd7c75-z9w86" event={"ID":"42377322-3180-4e68-94b9-601dcd82f6f4","Type":"ContainerDied","Data":"b4a1263f944c2fd8d6778a72cc5629e01ac88e840a570b2445c603bd63e4af1d"} Nov 27 07:13:54 crc kubenswrapper[4971]: I1127 07:13:54.592107 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8446fd7c75-z9w86" Nov 27 07:13:54 crc kubenswrapper[4971]: I1127 07:13:54.592113 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57dc4c6697-d7m57" Nov 27 07:13:54 crc kubenswrapper[4971]: I1127 07:13:54.612671 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-x4rpv" podStartSLOduration=1.612654177 podStartE2EDuration="1.612654177s" podCreationTimestamp="2025-11-27 07:13:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:13:54.602626983 +0000 UTC m=+1272.794670921" watchObservedRunningTime="2025-11-27 07:13:54.612654177 +0000 UTC m=+1272.804698095" Nov 27 07:13:54 crc kubenswrapper[4971]: I1127 07:13:54.752381 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8446fd7c75-z9w86"] Nov 27 07:13:54 crc kubenswrapper[4971]: I1127 07:13:54.768969 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8446fd7c75-z9w86"] Nov 27 07:13:54 crc kubenswrapper[4971]: I1127 07:13:54.782239 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Nov 27 07:13:54 crc kubenswrapper[4971]: I1127 07:13:54.798999 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57dc4c6697-d7m57"] Nov 27 07:13:54 crc kubenswrapper[4971]: I1127 07:13:54.804480 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57dc4c6697-d7m57"] Nov 27 07:13:54 crc kubenswrapper[4971]: W1127 07:13:54.807091 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podabd4a589_1b2e_4559_852f_2c27c0d8c459.slice/crio-57235c1b5dd826d09a3cbfc27f2c1cec772a45124168b6fae949112fe2038c59 WatchSource:0}: Error finding container 57235c1b5dd826d09a3cbfc27f2c1cec772a45124168b6fae949112fe2038c59: Status 404 returned error can't find the container with id 57235c1b5dd826d09a3cbfc27f2c1cec772a45124168b6fae949112fe2038c59 Nov 27 07:13:55 crc kubenswrapper[4971]: I1127 07:13:55.601602 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3","Type":"ContainerStarted","Data":"7af3d67f7f3c155ae9f2f5faa014fea2a48dc67f24c202cc5665e7b51df1c641"} Nov 27 07:13:55 crc kubenswrapper[4971]: I1127 07:13:55.605149 4971 generic.go:334] "Generic (PLEG): container finished" podID="c8023a25-e335-417c-9d79-93178ade2982" containerID="a0243d211df91684e067543ea91a9902a19e93e20fda7151a630d3fb66368220" exitCode=0 Nov 27 07:13:55 crc kubenswrapper[4971]: I1127 07:13:55.605213 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fbf586c4f-dld5n" event={"ID":"c8023a25-e335-417c-9d79-93178ade2982","Type":"ContainerDied","Data":"a0243d211df91684e067543ea91a9902a19e93e20fda7151a630d3fb66368220"} Nov 27 07:13:55 crc kubenswrapper[4971]: I1127 07:13:55.606440 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"abd4a589-1b2e-4559-852f-2c27c0d8c459","Type":"ContainerStarted","Data":"57235c1b5dd826d09a3cbfc27f2c1cec772a45124168b6fae949112fe2038c59"} Nov 27 07:13:55 crc kubenswrapper[4971]: I1127 07:13:55.608527 4971 generic.go:334] "Generic (PLEG): container finished" podID="677c92ea-3947-426f-8b7b-ec15116012be" containerID="a1bdaa2fd615824865b633f2e111abeaa788edd29d699b45c3c4467716a512ce" exitCode=0 Nov 27 07:13:55 crc kubenswrapper[4971]: I1127 07:13:55.608667 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58bd875f97-nxb6t" event={"ID":"677c92ea-3947-426f-8b7b-ec15116012be","Type":"ContainerDied","Data":"a1bdaa2fd615824865b633f2e111abeaa788edd29d699b45c3c4467716a512ce"} Nov 27 07:13:56 crc kubenswrapper[4971]: I1127 07:13:56.560022 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="42377322-3180-4e68-94b9-601dcd82f6f4" path="/var/lib/kubelet/pods/42377322-3180-4e68-94b9-601dcd82f6f4/volumes" Nov 27 07:13:56 crc kubenswrapper[4971]: I1127 07:13:56.560509 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e4df58e-1b19-439d-aee0-a09ee67f328e" path="/var/lib/kubelet/pods/5e4df58e-1b19-439d-aee0-a09ee67f328e/volumes" Nov 27 07:13:56 crc kubenswrapper[4971]: I1127 07:13:56.618520 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"abd4a589-1b2e-4559-852f-2c27c0d8c459","Type":"ContainerStarted","Data":"fe9a50947e8810fe514a39d8fd3842aa695ffb13f22c248d0a3f4b1749faa50c"} Nov 27 07:13:56 crc kubenswrapper[4971]: I1127 07:13:56.619103 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"abd4a589-1b2e-4559-852f-2c27c0d8c459","Type":"ContainerStarted","Data":"c9d172c81f9f3625bed2063b71b15aa0aaee90448e7484f3536af33829d91bf8"} Nov 27 07:13:56 crc kubenswrapper[4971]: I1127 07:13:56.619159 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Nov 27 07:13:56 crc kubenswrapper[4971]: I1127 07:13:56.620921 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58bd875f97-nxb6t" event={"ID":"677c92ea-3947-426f-8b7b-ec15116012be","Type":"ContainerStarted","Data":"3ffdf08e367552c7cc1bc740052a701a1154868344088d9c742eb445669a1713"} Nov 27 07:13:56 crc kubenswrapper[4971]: I1127 07:13:56.621129 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-58bd875f97-nxb6t" Nov 27 07:13:56 crc kubenswrapper[4971]: I1127 07:13:56.623010 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fbf586c4f-dld5n" event={"ID":"c8023a25-e335-417c-9d79-93178ade2982","Type":"ContainerStarted","Data":"82ef0880647fde0ab196067f1d1fe9684c32000bf7441a7e92bc7fe3ad25ab1e"} Nov 27 07:13:56 crc kubenswrapper[4971]: I1127 07:13:56.623257 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-fbf586c4f-dld5n" Nov 27 07:13:56 crc kubenswrapper[4971]: I1127 07:13:56.647354 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.596371029 podStartE2EDuration="3.647335331s" podCreationTimestamp="2025-11-27 07:13:53 +0000 UTC" firstStartedPulling="2025-11-27 07:13:54.820142981 +0000 UTC m=+1273.012186899" lastFinishedPulling="2025-11-27 07:13:55.871107273 +0000 UTC m=+1274.063151201" observedRunningTime="2025-11-27 07:13:56.642937277 +0000 UTC m=+1274.834981215" watchObservedRunningTime="2025-11-27 07:13:56.647335331 +0000 UTC m=+1274.839379249" Nov 27 07:13:56 crc kubenswrapper[4971]: I1127 07:13:56.669279 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-58bd875f97-nxb6t" podStartSLOduration=3.164145645 podStartE2EDuration="3.66924937s" podCreationTimestamp="2025-11-27 07:13:53 +0000 UTC" firstStartedPulling="2025-11-27 07:13:54.38313383 +0000 UTC m=+1272.575177748" lastFinishedPulling="2025-11-27 07:13:54.888237555 +0000 UTC m=+1273.080281473" observedRunningTime="2025-11-27 07:13:56.662736686 +0000 UTC m=+1274.854780604" watchObservedRunningTime="2025-11-27 07:13:56.66924937 +0000 UTC m=+1274.861293298" Nov 27 07:13:56 crc kubenswrapper[4971]: I1127 07:13:56.685562 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-fbf586c4f-dld5n" podStartSLOduration=4.099668683 podStartE2EDuration="4.68551708s" podCreationTimestamp="2025-11-27 07:13:52 +0000 UTC" firstStartedPulling="2025-11-27 07:13:54.131417776 +0000 UTC m=+1272.323461694" lastFinishedPulling="2025-11-27 07:13:54.717266173 +0000 UTC m=+1272.909310091" observedRunningTime="2025-11-27 07:13:56.681019973 +0000 UTC m=+1274.873063921" watchObservedRunningTime="2025-11-27 07:13:56.68551708 +0000 UTC m=+1274.877561038" Nov 27 07:13:57 crc kubenswrapper[4971]: I1127 07:13:57.636721 4971 generic.go:334] "Generic (PLEG): container finished" podID="bcf58afd-21c6-4c9d-8702-09bc98859732" containerID="bab5a722723f54ec0f0efa57197716dd795a9b02b94e004eafa54f37ca83903e" exitCode=0 Nov 27 07:13:57 crc kubenswrapper[4971]: I1127 07:13:57.636846 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"bcf58afd-21c6-4c9d-8702-09bc98859732","Type":"ContainerDied","Data":"bab5a722723f54ec0f0efa57197716dd795a9b02b94e004eafa54f37ca83903e"} Nov 27 07:13:57 crc kubenswrapper[4971]: I1127 07:13:57.648750 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d6439a3c-ee26-467c-8e42-5abbbf390f16","Type":"ContainerStarted","Data":"47223c6c413b4fa18667b07a2c8e720f342e5906c3e3e926a37060f670324a52"} Nov 27 07:13:58 crc kubenswrapper[4971]: I1127 07:13:58.662414 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"bcf58afd-21c6-4c9d-8702-09bc98859732","Type":"ContainerStarted","Data":"d8cae12287c5ef396f90e407b77789281e9654588ff7861e1db29623eaae2e8c"} Nov 27 07:13:58 crc kubenswrapper[4971]: I1127 07:13:58.666498 4971 generic.go:334] "Generic (PLEG): container finished" podID="4a0dcbed-5f66-4faf-83c2-1227bc05e9d3" containerID="7af3d67f7f3c155ae9f2f5faa014fea2a48dc67f24c202cc5665e7b51df1c641" exitCode=0 Nov 27 07:13:58 crc kubenswrapper[4971]: I1127 07:13:58.666569 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3","Type":"ContainerDied","Data":"7af3d67f7f3c155ae9f2f5faa014fea2a48dc67f24c202cc5665e7b51df1c641"} Nov 27 07:13:58 crc kubenswrapper[4971]: I1127 07:13:58.719892 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=8.251802862 podStartE2EDuration="40.719862755s" podCreationTimestamp="2025-11-27 07:13:18 +0000 UTC" firstStartedPulling="2025-11-27 07:13:20.691742842 +0000 UTC m=+1238.883786760" lastFinishedPulling="2025-11-27 07:13:53.159802735 +0000 UTC m=+1271.351846653" observedRunningTime="2025-11-27 07:13:58.687706786 +0000 UTC m=+1276.879750714" watchObservedRunningTime="2025-11-27 07:13:58.719862755 +0000 UTC m=+1276.911906723" Nov 27 07:13:59 crc kubenswrapper[4971]: I1127 07:13:59.689308 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3","Type":"ContainerStarted","Data":"c882ea5da9f0b4988139836dea25aa544de3122c3c530bac76a1e7264a8122e0"} Nov 27 07:13:59 crc kubenswrapper[4971]: I1127 07:13:59.711977 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=-9223371994.142826 podStartE2EDuration="42.711949524s" podCreationTimestamp="2025-11-27 07:13:17 +0000 UTC" firstStartedPulling="2025-11-27 07:13:19.177842346 +0000 UTC m=+1237.369886264" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:13:59.709133074 +0000 UTC m=+1277.901177012" watchObservedRunningTime="2025-11-27 07:13:59.711949524 +0000 UTC m=+1277.903993442" Nov 27 07:14:00 crc kubenswrapper[4971]: I1127 07:14:00.117043 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Nov 27 07:14:00 crc kubenswrapper[4971]: I1127 07:14:00.117402 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Nov 27 07:14:00 crc kubenswrapper[4971]: I1127 07:14:00.277159 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Nov 27 07:14:01 crc kubenswrapper[4971]: I1127 07:14:01.897689 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-fbf586c4f-dld5n"] Nov 27 07:14:01 crc kubenswrapper[4971]: I1127 07:14:01.898030 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-fbf586c4f-dld5n" podUID="c8023a25-e335-417c-9d79-93178ade2982" containerName="dnsmasq-dns" containerID="cri-o://82ef0880647fde0ab196067f1d1fe9684c32000bf7441a7e92bc7fe3ad25ab1e" gracePeriod=10 Nov 27 07:14:01 crc kubenswrapper[4971]: I1127 07:14:01.903384 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-fbf586c4f-dld5n" Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.109788 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7c8cb8df65-n7qt6"] Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.120086 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c8cb8df65-n7qt6" Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.184436 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7c8cb8df65-n7qt6"] Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.273652 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb-dns-svc\") pod \"dnsmasq-dns-7c8cb8df65-n7qt6\" (UID: \"8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb\") " pod="openstack/dnsmasq-dns-7c8cb8df65-n7qt6" Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.273711 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86s6x\" (UniqueName: \"kubernetes.io/projected/8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb-kube-api-access-86s6x\") pod \"dnsmasq-dns-7c8cb8df65-n7qt6\" (UID: \"8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb\") " pod="openstack/dnsmasq-dns-7c8cb8df65-n7qt6" Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.273738 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb-ovsdbserver-sb\") pod \"dnsmasq-dns-7c8cb8df65-n7qt6\" (UID: \"8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb\") " pod="openstack/dnsmasq-dns-7c8cb8df65-n7qt6" Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.273782 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb-config\") pod \"dnsmasq-dns-7c8cb8df65-n7qt6\" (UID: \"8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb\") " pod="openstack/dnsmasq-dns-7c8cb8df65-n7qt6" Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.273902 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb-ovsdbserver-nb\") pod \"dnsmasq-dns-7c8cb8df65-n7qt6\" (UID: \"8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb\") " pod="openstack/dnsmasq-dns-7c8cb8df65-n7qt6" Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.374957 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb-config\") pod \"dnsmasq-dns-7c8cb8df65-n7qt6\" (UID: \"8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb\") " pod="openstack/dnsmasq-dns-7c8cb8df65-n7qt6" Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.375079 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb-ovsdbserver-nb\") pod \"dnsmasq-dns-7c8cb8df65-n7qt6\" (UID: \"8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb\") " pod="openstack/dnsmasq-dns-7c8cb8df65-n7qt6" Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.375121 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb-dns-svc\") pod \"dnsmasq-dns-7c8cb8df65-n7qt6\" (UID: \"8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb\") " pod="openstack/dnsmasq-dns-7c8cb8df65-n7qt6" Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.375148 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-86s6x\" (UniqueName: \"kubernetes.io/projected/8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb-kube-api-access-86s6x\") pod \"dnsmasq-dns-7c8cb8df65-n7qt6\" (UID: \"8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb\") " pod="openstack/dnsmasq-dns-7c8cb8df65-n7qt6" Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.375166 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb-ovsdbserver-sb\") pod \"dnsmasq-dns-7c8cb8df65-n7qt6\" (UID: \"8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb\") " pod="openstack/dnsmasq-dns-7c8cb8df65-n7qt6" Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.376326 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb-ovsdbserver-sb\") pod \"dnsmasq-dns-7c8cb8df65-n7qt6\" (UID: \"8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb\") " pod="openstack/dnsmasq-dns-7c8cb8df65-n7qt6" Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.376336 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb-dns-svc\") pod \"dnsmasq-dns-7c8cb8df65-n7qt6\" (UID: \"8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb\") " pod="openstack/dnsmasq-dns-7c8cb8df65-n7qt6" Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.376402 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb-ovsdbserver-nb\") pod \"dnsmasq-dns-7c8cb8df65-n7qt6\" (UID: \"8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb\") " pod="openstack/dnsmasq-dns-7c8cb8df65-n7qt6" Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.376411 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb-config\") pod \"dnsmasq-dns-7c8cb8df65-n7qt6\" (UID: \"8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb\") " pod="openstack/dnsmasq-dns-7c8cb8df65-n7qt6" Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.415364 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-86s6x\" (UniqueName: \"kubernetes.io/projected/8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb-kube-api-access-86s6x\") pod \"dnsmasq-dns-7c8cb8df65-n7qt6\" (UID: \"8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb\") " pod="openstack/dnsmasq-dns-7c8cb8df65-n7qt6" Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.526965 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c8cb8df65-n7qt6" Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.591100 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fbf586c4f-dld5n" Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.681950 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c8023a25-e335-417c-9d79-93178ade2982-ovsdbserver-sb\") pod \"c8023a25-e335-417c-9d79-93178ade2982\" (UID: \"c8023a25-e335-417c-9d79-93178ade2982\") " Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.682040 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gwz5k\" (UniqueName: \"kubernetes.io/projected/c8023a25-e335-417c-9d79-93178ade2982-kube-api-access-gwz5k\") pod \"c8023a25-e335-417c-9d79-93178ade2982\" (UID: \"c8023a25-e335-417c-9d79-93178ade2982\") " Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.682210 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c8023a25-e335-417c-9d79-93178ade2982-dns-svc\") pod \"c8023a25-e335-417c-9d79-93178ade2982\" (UID: \"c8023a25-e335-417c-9d79-93178ade2982\") " Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.682266 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c8023a25-e335-417c-9d79-93178ade2982-config\") pod \"c8023a25-e335-417c-9d79-93178ade2982\" (UID: \"c8023a25-e335-417c-9d79-93178ade2982\") " Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.699957 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8023a25-e335-417c-9d79-93178ade2982-kube-api-access-gwz5k" (OuterVolumeSpecName: "kube-api-access-gwz5k") pod "c8023a25-e335-417c-9d79-93178ade2982" (UID: "c8023a25-e335-417c-9d79-93178ade2982"). InnerVolumeSpecName "kube-api-access-gwz5k". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.724301 4971 generic.go:334] "Generic (PLEG): container finished" podID="c8023a25-e335-417c-9d79-93178ade2982" containerID="82ef0880647fde0ab196067f1d1fe9684c32000bf7441a7e92bc7fe3ad25ab1e" exitCode=0 Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.724686 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fbf586c4f-dld5n" event={"ID":"c8023a25-e335-417c-9d79-93178ade2982","Type":"ContainerDied","Data":"82ef0880647fde0ab196067f1d1fe9684c32000bf7441a7e92bc7fe3ad25ab1e"} Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.724732 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fbf586c4f-dld5n" event={"ID":"c8023a25-e335-417c-9d79-93178ade2982","Type":"ContainerDied","Data":"e8b67465bf68f2728b76cef3effa174d87dbf8197f2ae72249a8f54be22c2259"} Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.724760 4971 scope.go:117] "RemoveContainer" containerID="82ef0880647fde0ab196067f1d1fe9684c32000bf7441a7e92bc7fe3ad25ab1e" Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.724966 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fbf586c4f-dld5n" Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.743069 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c8023a25-e335-417c-9d79-93178ade2982-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c8023a25-e335-417c-9d79-93178ade2982" (UID: "c8023a25-e335-417c-9d79-93178ade2982"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.753357 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c8023a25-e335-417c-9d79-93178ade2982-config" (OuterVolumeSpecName: "config") pod "c8023a25-e335-417c-9d79-93178ade2982" (UID: "c8023a25-e335-417c-9d79-93178ade2982"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.756744 4971 scope.go:117] "RemoveContainer" containerID="a0243d211df91684e067543ea91a9902a19e93e20fda7151a630d3fb66368220" Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.759759 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c8023a25-e335-417c-9d79-93178ade2982-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c8023a25-e335-417c-9d79-93178ade2982" (UID: "c8023a25-e335-417c-9d79-93178ade2982"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.778937 4971 scope.go:117] "RemoveContainer" containerID="82ef0880647fde0ab196067f1d1fe9684c32000bf7441a7e92bc7fe3ad25ab1e" Nov 27 07:14:02 crc kubenswrapper[4971]: E1127 07:14:02.779860 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"82ef0880647fde0ab196067f1d1fe9684c32000bf7441a7e92bc7fe3ad25ab1e\": container with ID starting with 82ef0880647fde0ab196067f1d1fe9684c32000bf7441a7e92bc7fe3ad25ab1e not found: ID does not exist" containerID="82ef0880647fde0ab196067f1d1fe9684c32000bf7441a7e92bc7fe3ad25ab1e" Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.779921 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"82ef0880647fde0ab196067f1d1fe9684c32000bf7441a7e92bc7fe3ad25ab1e"} err="failed to get container status \"82ef0880647fde0ab196067f1d1fe9684c32000bf7441a7e92bc7fe3ad25ab1e\": rpc error: code = NotFound desc = could not find container \"82ef0880647fde0ab196067f1d1fe9684c32000bf7441a7e92bc7fe3ad25ab1e\": container with ID starting with 82ef0880647fde0ab196067f1d1fe9684c32000bf7441a7e92bc7fe3ad25ab1e not found: ID does not exist" Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.779972 4971 scope.go:117] "RemoveContainer" containerID="a0243d211df91684e067543ea91a9902a19e93e20fda7151a630d3fb66368220" Nov 27 07:14:02 crc kubenswrapper[4971]: E1127 07:14:02.780346 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0243d211df91684e067543ea91a9902a19e93e20fda7151a630d3fb66368220\": container with ID starting with a0243d211df91684e067543ea91a9902a19e93e20fda7151a630d3fb66368220 not found: ID does not exist" containerID="a0243d211df91684e067543ea91a9902a19e93e20fda7151a630d3fb66368220" Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.780382 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0243d211df91684e067543ea91a9902a19e93e20fda7151a630d3fb66368220"} err="failed to get container status \"a0243d211df91684e067543ea91a9902a19e93e20fda7151a630d3fb66368220\": rpc error: code = NotFound desc = could not find container \"a0243d211df91684e067543ea91a9902a19e93e20fda7151a630d3fb66368220\": container with ID starting with a0243d211df91684e067543ea91a9902a19e93e20fda7151a630d3fb66368220 not found: ID does not exist" Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.784546 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c8023a25-e335-417c-9d79-93178ade2982-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.784607 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gwz5k\" (UniqueName: \"kubernetes.io/projected/c8023a25-e335-417c-9d79-93178ade2982-kube-api-access-gwz5k\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.784624 4971 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c8023a25-e335-417c-9d79-93178ade2982-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:02 crc kubenswrapper[4971]: I1127 07:14:02.784638 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c8023a25-e335-417c-9d79-93178ade2982-config\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.083628 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7c8cb8df65-n7qt6"] Nov 27 07:14:03 crc kubenswrapper[4971]: W1127 07:14:03.090746 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8e9e2347_7f0c_4e7e_9c84_a9afaf885bdb.slice/crio-5ea9966f723aeea730e49cc7a5c3b87b631d27fe0e2d13125b22912fc38e7618 WatchSource:0}: Error finding container 5ea9966f723aeea730e49cc7a5c3b87b631d27fe0e2d13125b22912fc38e7618: Status 404 returned error can't find the container with id 5ea9966f723aeea730e49cc7a5c3b87b631d27fe0e2d13125b22912fc38e7618 Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.097672 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-fbf586c4f-dld5n"] Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.103822 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-fbf586c4f-dld5n"] Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.131038 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Nov 27 07:14:03 crc kubenswrapper[4971]: E1127 07:14:03.131746 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8023a25-e335-417c-9d79-93178ade2982" containerName="dnsmasq-dns" Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.131771 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8023a25-e335-417c-9d79-93178ade2982" containerName="dnsmasq-dns" Nov 27 07:14:03 crc kubenswrapper[4971]: E1127 07:14:03.131837 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8023a25-e335-417c-9d79-93178ade2982" containerName="init" Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.131849 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8023a25-e335-417c-9d79-93178ade2982" containerName="init" Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.132086 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8023a25-e335-417c-9d79-93178ade2982" containerName="dnsmasq-dns" Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.137593 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.175974 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.176175 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-k5bl4" Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.176059 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.176102 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.230397 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.297692 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/b9ccc9bd-d955-4853-986f-95597f2c70e6-lock\") pod \"swift-storage-0\" (UID: \"b9ccc9bd-d955-4853-986f-95597f2c70e6\") " pod="openstack/swift-storage-0" Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.298290 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"b9ccc9bd-d955-4853-986f-95597f2c70e6\") " pod="openstack/swift-storage-0" Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.298334 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rhrd\" (UniqueName: \"kubernetes.io/projected/b9ccc9bd-d955-4853-986f-95597f2c70e6-kube-api-access-5rhrd\") pod \"swift-storage-0\" (UID: \"b9ccc9bd-d955-4853-986f-95597f2c70e6\") " pod="openstack/swift-storage-0" Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.298395 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/b9ccc9bd-d955-4853-986f-95597f2c70e6-cache\") pod \"swift-storage-0\" (UID: \"b9ccc9bd-d955-4853-986f-95597f2c70e6\") " pod="openstack/swift-storage-0" Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.298447 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b9ccc9bd-d955-4853-986f-95597f2c70e6-etc-swift\") pod \"swift-storage-0\" (UID: \"b9ccc9bd-d955-4853-986f-95597f2c70e6\") " pod="openstack/swift-storage-0" Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.401066 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"b9ccc9bd-d955-4853-986f-95597f2c70e6\") " pod="openstack/swift-storage-0" Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.401206 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rhrd\" (UniqueName: \"kubernetes.io/projected/b9ccc9bd-d955-4853-986f-95597f2c70e6-kube-api-access-5rhrd\") pod \"swift-storage-0\" (UID: \"b9ccc9bd-d955-4853-986f-95597f2c70e6\") " pod="openstack/swift-storage-0" Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.401330 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/b9ccc9bd-d955-4853-986f-95597f2c70e6-cache\") pod \"swift-storage-0\" (UID: \"b9ccc9bd-d955-4853-986f-95597f2c70e6\") " pod="openstack/swift-storage-0" Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.401404 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b9ccc9bd-d955-4853-986f-95597f2c70e6-etc-swift\") pod \"swift-storage-0\" (UID: \"b9ccc9bd-d955-4853-986f-95597f2c70e6\") " pod="openstack/swift-storage-0" Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.401448 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/b9ccc9bd-d955-4853-986f-95597f2c70e6-lock\") pod \"swift-storage-0\" (UID: \"b9ccc9bd-d955-4853-986f-95597f2c70e6\") " pod="openstack/swift-storage-0" Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.402433 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/b9ccc9bd-d955-4853-986f-95597f2c70e6-lock\") pod \"swift-storage-0\" (UID: \"b9ccc9bd-d955-4853-986f-95597f2c70e6\") " pod="openstack/swift-storage-0" Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.404344 4971 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"b9ccc9bd-d955-4853-986f-95597f2c70e6\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/swift-storage-0" Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.404784 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/b9ccc9bd-d955-4853-986f-95597f2c70e6-cache\") pod \"swift-storage-0\" (UID: \"b9ccc9bd-d955-4853-986f-95597f2c70e6\") " pod="openstack/swift-storage-0" Nov 27 07:14:03 crc kubenswrapper[4971]: E1127 07:14:03.404818 4971 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Nov 27 07:14:03 crc kubenswrapper[4971]: E1127 07:14:03.404863 4971 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Nov 27 07:14:03 crc kubenswrapper[4971]: E1127 07:14:03.404958 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b9ccc9bd-d955-4853-986f-95597f2c70e6-etc-swift podName:b9ccc9bd-d955-4853-986f-95597f2c70e6 nodeName:}" failed. No retries permitted until 2025-11-27 07:14:03.904927516 +0000 UTC m=+1282.096971434 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b9ccc9bd-d955-4853-986f-95597f2c70e6-etc-swift") pod "swift-storage-0" (UID: "b9ccc9bd-d955-4853-986f-95597f2c70e6") : configmap "swift-ring-files" not found Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.427763 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rhrd\" (UniqueName: \"kubernetes.io/projected/b9ccc9bd-d955-4853-986f-95597f2c70e6-kube-api-access-5rhrd\") pod \"swift-storage-0\" (UID: \"b9ccc9bd-d955-4853-986f-95597f2c70e6\") " pod="openstack/swift-storage-0" Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.439143 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"b9ccc9bd-d955-4853-986f-95597f2c70e6\") " pod="openstack/swift-storage-0" Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.735400 4971 generic.go:334] "Generic (PLEG): container finished" podID="8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb" containerID="1503ad59fe2f7a731dc4ddb7d6b70443caede2d1e9949aeb41cab028d1b729a9" exitCode=0 Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.735468 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c8cb8df65-n7qt6" event={"ID":"8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb","Type":"ContainerDied","Data":"1503ad59fe2f7a731dc4ddb7d6b70443caede2d1e9949aeb41cab028d1b729a9"} Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.735511 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c8cb8df65-n7qt6" event={"ID":"8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb","Type":"ContainerStarted","Data":"5ea9966f723aeea730e49cc7a5c3b87b631d27fe0e2d13125b22912fc38e7618"} Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.782708 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-58bd875f97-nxb6t" Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.804938 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-s6jkh"] Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.806335 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-s6jkh" Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.823602 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.824394 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.838791 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.861023 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-s6jkh"] Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.924386 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/6652b368-b37b-48f3-839e-a89982fec862-dispersionconf\") pod \"swift-ring-rebalance-s6jkh\" (UID: \"6652b368-b37b-48f3-839e-a89982fec862\") " pod="openstack/swift-ring-rebalance-s6jkh" Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.924429 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/6652b368-b37b-48f3-839e-a89982fec862-swiftconf\") pod \"swift-ring-rebalance-s6jkh\" (UID: \"6652b368-b37b-48f3-839e-a89982fec862\") " pod="openstack/swift-ring-rebalance-s6jkh" Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.924466 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6652b368-b37b-48f3-839e-a89982fec862-combined-ca-bundle\") pod \"swift-ring-rebalance-s6jkh\" (UID: \"6652b368-b37b-48f3-839e-a89982fec862\") " pod="openstack/swift-ring-rebalance-s6jkh" Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.924615 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b9ccc9bd-d955-4853-986f-95597f2c70e6-etc-swift\") pod \"swift-storage-0\" (UID: \"b9ccc9bd-d955-4853-986f-95597f2c70e6\") " pod="openstack/swift-storage-0" Nov 27 07:14:03 crc kubenswrapper[4971]: E1127 07:14:03.924790 4971 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Nov 27 07:14:03 crc kubenswrapper[4971]: E1127 07:14:03.924807 4971 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Nov 27 07:14:03 crc kubenswrapper[4971]: E1127 07:14:03.924861 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b9ccc9bd-d955-4853-986f-95597f2c70e6-etc-swift podName:b9ccc9bd-d955-4853-986f-95597f2c70e6 nodeName:}" failed. No retries permitted until 2025-11-27 07:14:04.92484063 +0000 UTC m=+1283.116884548 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b9ccc9bd-d955-4853-986f-95597f2c70e6-etc-swift") pod "swift-storage-0" (UID: "b9ccc9bd-d955-4853-986f-95597f2c70e6") : configmap "swift-ring-files" not found Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.924943 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/6652b368-b37b-48f3-839e-a89982fec862-etc-swift\") pod \"swift-ring-rebalance-s6jkh\" (UID: \"6652b368-b37b-48f3-839e-a89982fec862\") " pod="openstack/swift-ring-rebalance-s6jkh" Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.925179 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/6652b368-b37b-48f3-839e-a89982fec862-ring-data-devices\") pod \"swift-ring-rebalance-s6jkh\" (UID: \"6652b368-b37b-48f3-839e-a89982fec862\") " pod="openstack/swift-ring-rebalance-s6jkh" Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.925324 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6652b368-b37b-48f3-839e-a89982fec862-scripts\") pod \"swift-ring-rebalance-s6jkh\" (UID: \"6652b368-b37b-48f3-839e-a89982fec862\") " pod="openstack/swift-ring-rebalance-s6jkh" Nov 27 07:14:03 crc kubenswrapper[4971]: I1127 07:14:03.925467 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdpjx\" (UniqueName: \"kubernetes.io/projected/6652b368-b37b-48f3-839e-a89982fec862-kube-api-access-cdpjx\") pod \"swift-ring-rebalance-s6jkh\" (UID: \"6652b368-b37b-48f3-839e-a89982fec862\") " pod="openstack/swift-ring-rebalance-s6jkh" Nov 27 07:14:04 crc kubenswrapper[4971]: I1127 07:14:04.027466 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6652b368-b37b-48f3-839e-a89982fec862-scripts\") pod \"swift-ring-rebalance-s6jkh\" (UID: \"6652b368-b37b-48f3-839e-a89982fec862\") " pod="openstack/swift-ring-rebalance-s6jkh" Nov 27 07:14:04 crc kubenswrapper[4971]: I1127 07:14:04.027534 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdpjx\" (UniqueName: \"kubernetes.io/projected/6652b368-b37b-48f3-839e-a89982fec862-kube-api-access-cdpjx\") pod \"swift-ring-rebalance-s6jkh\" (UID: \"6652b368-b37b-48f3-839e-a89982fec862\") " pod="openstack/swift-ring-rebalance-s6jkh" Nov 27 07:14:04 crc kubenswrapper[4971]: I1127 07:14:04.027584 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/6652b368-b37b-48f3-839e-a89982fec862-dispersionconf\") pod \"swift-ring-rebalance-s6jkh\" (UID: \"6652b368-b37b-48f3-839e-a89982fec862\") " pod="openstack/swift-ring-rebalance-s6jkh" Nov 27 07:14:04 crc kubenswrapper[4971]: I1127 07:14:04.027609 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/6652b368-b37b-48f3-839e-a89982fec862-swiftconf\") pod \"swift-ring-rebalance-s6jkh\" (UID: \"6652b368-b37b-48f3-839e-a89982fec862\") " pod="openstack/swift-ring-rebalance-s6jkh" Nov 27 07:14:04 crc kubenswrapper[4971]: I1127 07:14:04.027642 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6652b368-b37b-48f3-839e-a89982fec862-combined-ca-bundle\") pod \"swift-ring-rebalance-s6jkh\" (UID: \"6652b368-b37b-48f3-839e-a89982fec862\") " pod="openstack/swift-ring-rebalance-s6jkh" Nov 27 07:14:04 crc kubenswrapper[4971]: I1127 07:14:04.027732 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/6652b368-b37b-48f3-839e-a89982fec862-etc-swift\") pod \"swift-ring-rebalance-s6jkh\" (UID: \"6652b368-b37b-48f3-839e-a89982fec862\") " pod="openstack/swift-ring-rebalance-s6jkh" Nov 27 07:14:04 crc kubenswrapper[4971]: I1127 07:14:04.027773 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/6652b368-b37b-48f3-839e-a89982fec862-ring-data-devices\") pod \"swift-ring-rebalance-s6jkh\" (UID: \"6652b368-b37b-48f3-839e-a89982fec862\") " pod="openstack/swift-ring-rebalance-s6jkh" Nov 27 07:14:04 crc kubenswrapper[4971]: I1127 07:14:04.028512 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6652b368-b37b-48f3-839e-a89982fec862-scripts\") pod \"swift-ring-rebalance-s6jkh\" (UID: \"6652b368-b37b-48f3-839e-a89982fec862\") " pod="openstack/swift-ring-rebalance-s6jkh" Nov 27 07:14:04 crc kubenswrapper[4971]: I1127 07:14:04.028567 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/6652b368-b37b-48f3-839e-a89982fec862-ring-data-devices\") pod \"swift-ring-rebalance-s6jkh\" (UID: \"6652b368-b37b-48f3-839e-a89982fec862\") " pod="openstack/swift-ring-rebalance-s6jkh" Nov 27 07:14:04 crc kubenswrapper[4971]: I1127 07:14:04.028859 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/6652b368-b37b-48f3-839e-a89982fec862-etc-swift\") pod \"swift-ring-rebalance-s6jkh\" (UID: \"6652b368-b37b-48f3-839e-a89982fec862\") " pod="openstack/swift-ring-rebalance-s6jkh" Nov 27 07:14:04 crc kubenswrapper[4971]: I1127 07:14:04.033872 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/6652b368-b37b-48f3-839e-a89982fec862-dispersionconf\") pod \"swift-ring-rebalance-s6jkh\" (UID: \"6652b368-b37b-48f3-839e-a89982fec862\") " pod="openstack/swift-ring-rebalance-s6jkh" Nov 27 07:14:04 crc kubenswrapper[4971]: I1127 07:14:04.034221 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/6652b368-b37b-48f3-839e-a89982fec862-swiftconf\") pod \"swift-ring-rebalance-s6jkh\" (UID: \"6652b368-b37b-48f3-839e-a89982fec862\") " pod="openstack/swift-ring-rebalance-s6jkh" Nov 27 07:14:04 crc kubenswrapper[4971]: I1127 07:14:04.043468 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6652b368-b37b-48f3-839e-a89982fec862-combined-ca-bundle\") pod \"swift-ring-rebalance-s6jkh\" (UID: \"6652b368-b37b-48f3-839e-a89982fec862\") " pod="openstack/swift-ring-rebalance-s6jkh" Nov 27 07:14:04 crc kubenswrapper[4971]: I1127 07:14:04.050892 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdpjx\" (UniqueName: \"kubernetes.io/projected/6652b368-b37b-48f3-839e-a89982fec862-kube-api-access-cdpjx\") pod \"swift-ring-rebalance-s6jkh\" (UID: \"6652b368-b37b-48f3-839e-a89982fec862\") " pod="openstack/swift-ring-rebalance-s6jkh" Nov 27 07:14:04 crc kubenswrapper[4971]: I1127 07:14:04.149468 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-s6jkh" Nov 27 07:14:04 crc kubenswrapper[4971]: I1127 07:14:04.562334 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c8023a25-e335-417c-9d79-93178ade2982" path="/var/lib/kubelet/pods/c8023a25-e335-417c-9d79-93178ade2982/volumes" Nov 27 07:14:04 crc kubenswrapper[4971]: I1127 07:14:04.578054 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-s6jkh"] Nov 27 07:14:04 crc kubenswrapper[4971]: W1127 07:14:04.602615 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6652b368_b37b_48f3_839e_a89982fec862.slice/crio-d6c505da5c7f45453d1a3c792c56478334d88d5e0b4e98d2ec764159b1e6adb0 WatchSource:0}: Error finding container d6c505da5c7f45453d1a3c792c56478334d88d5e0b4e98d2ec764159b1e6adb0: Status 404 returned error can't find the container with id d6c505da5c7f45453d1a3c792c56478334d88d5e0b4e98d2ec764159b1e6adb0 Nov 27 07:14:04 crc kubenswrapper[4971]: I1127 07:14:04.744553 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-s6jkh" event={"ID":"6652b368-b37b-48f3-839e-a89982fec862","Type":"ContainerStarted","Data":"d6c505da5c7f45453d1a3c792c56478334d88d5e0b4e98d2ec764159b1e6adb0"} Nov 27 07:14:04 crc kubenswrapper[4971]: I1127 07:14:04.944717 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b9ccc9bd-d955-4853-986f-95597f2c70e6-etc-swift\") pod \"swift-storage-0\" (UID: \"b9ccc9bd-d955-4853-986f-95597f2c70e6\") " pod="openstack/swift-storage-0" Nov 27 07:14:04 crc kubenswrapper[4971]: E1127 07:14:04.944934 4971 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Nov 27 07:14:04 crc kubenswrapper[4971]: E1127 07:14:04.944967 4971 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Nov 27 07:14:04 crc kubenswrapper[4971]: E1127 07:14:04.945025 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b9ccc9bd-d955-4853-986f-95597f2c70e6-etc-swift podName:b9ccc9bd-d955-4853-986f-95597f2c70e6 nodeName:}" failed. No retries permitted until 2025-11-27 07:14:06.945007823 +0000 UTC m=+1285.137051741 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b9ccc9bd-d955-4853-986f-95597f2c70e6-etc-swift") pod "swift-storage-0" (UID: "b9ccc9bd-d955-4853-986f-95597f2c70e6") : configmap "swift-ring-files" not found Nov 27 07:14:05 crc kubenswrapper[4971]: I1127 07:14:05.756608 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c8cb8df65-n7qt6" event={"ID":"8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb","Type":"ContainerStarted","Data":"d089e8139b56227d8ce7e38a5d3c9d7c7321d12c9471e1e224872d3fbca70bd8"} Nov 27 07:14:05 crc kubenswrapper[4971]: I1127 07:14:05.756759 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7c8cb8df65-n7qt6" Nov 27 07:14:06 crc kubenswrapper[4971]: I1127 07:14:06.718815 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Nov 27 07:14:06 crc kubenswrapper[4971]: I1127 07:14:06.747767 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7c8cb8df65-n7qt6" podStartSLOduration=5.747743992 podStartE2EDuration="5.747743992s" podCreationTimestamp="2025-11-27 07:14:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:14:05.778398787 +0000 UTC m=+1283.970442715" watchObservedRunningTime="2025-11-27 07:14:06.747743992 +0000 UTC m=+1284.939787910" Nov 27 07:14:06 crc kubenswrapper[4971]: I1127 07:14:06.801103 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Nov 27 07:14:06 crc kubenswrapper[4971]: I1127 07:14:06.983620 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b9ccc9bd-d955-4853-986f-95597f2c70e6-etc-swift\") pod \"swift-storage-0\" (UID: \"b9ccc9bd-d955-4853-986f-95597f2c70e6\") " pod="openstack/swift-storage-0" Nov 27 07:14:06 crc kubenswrapper[4971]: E1127 07:14:06.983811 4971 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Nov 27 07:14:06 crc kubenswrapper[4971]: E1127 07:14:06.983921 4971 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Nov 27 07:14:06 crc kubenswrapper[4971]: E1127 07:14:06.983984 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b9ccc9bd-d955-4853-986f-95597f2c70e6-etc-swift podName:b9ccc9bd-d955-4853-986f-95597f2c70e6 nodeName:}" failed. No retries permitted until 2025-11-27 07:14:10.983965218 +0000 UTC m=+1289.176009136 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b9ccc9bd-d955-4853-986f-95597f2c70e6-etc-swift") pod "swift-storage-0" (UID: "b9ccc9bd-d955-4853-986f-95597f2c70e6") : configmap "swift-ring-files" not found Nov 27 07:14:08 crc kubenswrapper[4971]: I1127 07:14:08.523527 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Nov 27 07:14:08 crc kubenswrapper[4971]: I1127 07:14:08.524011 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Nov 27 07:14:08 crc kubenswrapper[4971]: I1127 07:14:08.607024 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Nov 27 07:14:08 crc kubenswrapper[4971]: I1127 07:14:08.786730 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-s6jkh" event={"ID":"6652b368-b37b-48f3-839e-a89982fec862","Type":"ContainerStarted","Data":"4ebd81db73ba88d8cfa0503889ac418a2664333c3598afa00657ff2e14de274c"} Nov 27 07:14:08 crc kubenswrapper[4971]: I1127 07:14:08.837920 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-s6jkh" podStartSLOduration=2.53858756 podStartE2EDuration="5.837895727s" podCreationTimestamp="2025-11-27 07:14:03 +0000 UTC" firstStartedPulling="2025-11-27 07:14:04.605417825 +0000 UTC m=+1282.797461743" lastFinishedPulling="2025-11-27 07:14:07.904725992 +0000 UTC m=+1286.096769910" observedRunningTime="2025-11-27 07:14:08.833652967 +0000 UTC m=+1287.025696895" watchObservedRunningTime="2025-11-27 07:14:08.837895727 +0000 UTC m=+1287.029939645" Nov 27 07:14:08 crc kubenswrapper[4971]: I1127 07:14:08.875124 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Nov 27 07:14:09 crc kubenswrapper[4971]: I1127 07:14:09.209386 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Nov 27 07:14:09 crc kubenswrapper[4971]: I1127 07:14:09.871526 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-gn9bn"] Nov 27 07:14:09 crc kubenswrapper[4971]: I1127 07:14:09.872863 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-gn9bn" Nov 27 07:14:09 crc kubenswrapper[4971]: I1127 07:14:09.882986 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-gn9bn"] Nov 27 07:14:09 crc kubenswrapper[4971]: I1127 07:14:09.954666 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lh7r7\" (UniqueName: \"kubernetes.io/projected/2c01d5be-466c-4eec-a5b3-261fe0f47d9c-kube-api-access-lh7r7\") pod \"keystone-db-create-gn9bn\" (UID: \"2c01d5be-466c-4eec-a5b3-261fe0f47d9c\") " pod="openstack/keystone-db-create-gn9bn" Nov 27 07:14:09 crc kubenswrapper[4971]: I1127 07:14:09.954771 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2c01d5be-466c-4eec-a5b3-261fe0f47d9c-operator-scripts\") pod \"keystone-db-create-gn9bn\" (UID: \"2c01d5be-466c-4eec-a5b3-261fe0f47d9c\") " pod="openstack/keystone-db-create-gn9bn" Nov 27 07:14:09 crc kubenswrapper[4971]: I1127 07:14:09.986075 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-afb3-account-create-update-msgvh"] Nov 27 07:14:09 crc kubenswrapper[4971]: I1127 07:14:09.987337 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-afb3-account-create-update-msgvh" Nov 27 07:14:09 crc kubenswrapper[4971]: I1127 07:14:09.990304 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Nov 27 07:14:09 crc kubenswrapper[4971]: I1127 07:14:09.993822 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-afb3-account-create-update-msgvh"] Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.056357 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r96db\" (UniqueName: \"kubernetes.io/projected/418a63d5-afc7-450f-a99a-3c67fedbbabd-kube-api-access-r96db\") pod \"keystone-afb3-account-create-update-msgvh\" (UID: \"418a63d5-afc7-450f-a99a-3c67fedbbabd\") " pod="openstack/keystone-afb3-account-create-update-msgvh" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.056496 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lh7r7\" (UniqueName: \"kubernetes.io/projected/2c01d5be-466c-4eec-a5b3-261fe0f47d9c-kube-api-access-lh7r7\") pod \"keystone-db-create-gn9bn\" (UID: \"2c01d5be-466c-4eec-a5b3-261fe0f47d9c\") " pod="openstack/keystone-db-create-gn9bn" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.056535 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2c01d5be-466c-4eec-a5b3-261fe0f47d9c-operator-scripts\") pod \"keystone-db-create-gn9bn\" (UID: \"2c01d5be-466c-4eec-a5b3-261fe0f47d9c\") " pod="openstack/keystone-db-create-gn9bn" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.056715 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/418a63d5-afc7-450f-a99a-3c67fedbbabd-operator-scripts\") pod \"keystone-afb3-account-create-update-msgvh\" (UID: \"418a63d5-afc7-450f-a99a-3c67fedbbabd\") " pod="openstack/keystone-afb3-account-create-update-msgvh" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.057641 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2c01d5be-466c-4eec-a5b3-261fe0f47d9c-operator-scripts\") pod \"keystone-db-create-gn9bn\" (UID: \"2c01d5be-466c-4eec-a5b3-261fe0f47d9c\") " pod="openstack/keystone-db-create-gn9bn" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.090126 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lh7r7\" (UniqueName: \"kubernetes.io/projected/2c01d5be-466c-4eec-a5b3-261fe0f47d9c-kube-api-access-lh7r7\") pod \"keystone-db-create-gn9bn\" (UID: \"2c01d5be-466c-4eec-a5b3-261fe0f47d9c\") " pod="openstack/keystone-db-create-gn9bn" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.102237 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-cxhr4"] Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.103658 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-cxhr4" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.111184 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-cxhr4"] Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.159105 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/418a63d5-afc7-450f-a99a-3c67fedbbabd-operator-scripts\") pod \"keystone-afb3-account-create-update-msgvh\" (UID: \"418a63d5-afc7-450f-a99a-3c67fedbbabd\") " pod="openstack/keystone-afb3-account-create-update-msgvh" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.160808 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/418a63d5-afc7-450f-a99a-3c67fedbbabd-operator-scripts\") pod \"keystone-afb3-account-create-update-msgvh\" (UID: \"418a63d5-afc7-450f-a99a-3c67fedbbabd\") " pod="openstack/keystone-afb3-account-create-update-msgvh" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.160882 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r96db\" (UniqueName: \"kubernetes.io/projected/418a63d5-afc7-450f-a99a-3c67fedbbabd-kube-api-access-r96db\") pod \"keystone-afb3-account-create-update-msgvh\" (UID: \"418a63d5-afc7-450f-a99a-3c67fedbbabd\") " pod="openstack/keystone-afb3-account-create-update-msgvh" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.185276 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r96db\" (UniqueName: \"kubernetes.io/projected/418a63d5-afc7-450f-a99a-3c67fedbbabd-kube-api-access-r96db\") pod \"keystone-afb3-account-create-update-msgvh\" (UID: \"418a63d5-afc7-450f-a99a-3c67fedbbabd\") " pod="openstack/keystone-afb3-account-create-update-msgvh" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.205203 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-gn9bn" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.217001 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-0147-account-create-update-gtcn7"] Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.220623 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-0147-account-create-update-gtcn7" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.230215 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.252375 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-0147-account-create-update-gtcn7"] Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.262724 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gzcfl\" (UniqueName: \"kubernetes.io/projected/1349dfd7-4e8c-458d-a932-c6a77029f735-kube-api-access-gzcfl\") pod \"placement-db-create-cxhr4\" (UID: \"1349dfd7-4e8c-458d-a932-c6a77029f735\") " pod="openstack/placement-db-create-cxhr4" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.262834 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1349dfd7-4e8c-458d-a932-c6a77029f735-operator-scripts\") pod \"placement-db-create-cxhr4\" (UID: \"1349dfd7-4e8c-458d-a932-c6a77029f735\") " pod="openstack/placement-db-create-cxhr4" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.339585 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-afb3-account-create-update-msgvh" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.364014 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gzcfl\" (UniqueName: \"kubernetes.io/projected/1349dfd7-4e8c-458d-a932-c6a77029f735-kube-api-access-gzcfl\") pod \"placement-db-create-cxhr4\" (UID: \"1349dfd7-4e8c-458d-a932-c6a77029f735\") " pod="openstack/placement-db-create-cxhr4" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.364063 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96f81740-91f7-431a-be63-94b4fd8fc096-operator-scripts\") pod \"placement-0147-account-create-update-gtcn7\" (UID: \"96f81740-91f7-431a-be63-94b4fd8fc096\") " pod="openstack/placement-0147-account-create-update-gtcn7" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.364165 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lsdx6\" (UniqueName: \"kubernetes.io/projected/96f81740-91f7-431a-be63-94b4fd8fc096-kube-api-access-lsdx6\") pod \"placement-0147-account-create-update-gtcn7\" (UID: \"96f81740-91f7-431a-be63-94b4fd8fc096\") " pod="openstack/placement-0147-account-create-update-gtcn7" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.364283 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1349dfd7-4e8c-458d-a932-c6a77029f735-operator-scripts\") pod \"placement-db-create-cxhr4\" (UID: \"1349dfd7-4e8c-458d-a932-c6a77029f735\") " pod="openstack/placement-db-create-cxhr4" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.365204 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1349dfd7-4e8c-458d-a932-c6a77029f735-operator-scripts\") pod \"placement-db-create-cxhr4\" (UID: \"1349dfd7-4e8c-458d-a932-c6a77029f735\") " pod="openstack/placement-db-create-cxhr4" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.384472 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gzcfl\" (UniqueName: \"kubernetes.io/projected/1349dfd7-4e8c-458d-a932-c6a77029f735-kube-api-access-gzcfl\") pod \"placement-db-create-cxhr4\" (UID: \"1349dfd7-4e8c-458d-a932-c6a77029f735\") " pod="openstack/placement-db-create-cxhr4" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.443028 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-cxhr4" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.468884 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lsdx6\" (UniqueName: \"kubernetes.io/projected/96f81740-91f7-431a-be63-94b4fd8fc096-kube-api-access-lsdx6\") pod \"placement-0147-account-create-update-gtcn7\" (UID: \"96f81740-91f7-431a-be63-94b4fd8fc096\") " pod="openstack/placement-0147-account-create-update-gtcn7" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.469062 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96f81740-91f7-431a-be63-94b4fd8fc096-operator-scripts\") pod \"placement-0147-account-create-update-gtcn7\" (UID: \"96f81740-91f7-431a-be63-94b4fd8fc096\") " pod="openstack/placement-0147-account-create-update-gtcn7" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.471111 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96f81740-91f7-431a-be63-94b4fd8fc096-operator-scripts\") pod \"placement-0147-account-create-update-gtcn7\" (UID: \"96f81740-91f7-431a-be63-94b4fd8fc096\") " pod="openstack/placement-0147-account-create-update-gtcn7" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.494458 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-8769s"] Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.496223 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-8769s" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.503383 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-8769s"] Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.504449 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lsdx6\" (UniqueName: \"kubernetes.io/projected/96f81740-91f7-431a-be63-94b4fd8fc096-kube-api-access-lsdx6\") pod \"placement-0147-account-create-update-gtcn7\" (UID: \"96f81740-91f7-431a-be63-94b4fd8fc096\") " pod="openstack/placement-0147-account-create-update-gtcn7" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.561055 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-0147-account-create-update-gtcn7" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.571971 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/38e2d59c-6699-4df0-aa89-e837dd02993a-operator-scripts\") pod \"glance-db-create-8769s\" (UID: \"38e2d59c-6699-4df0-aa89-e837dd02993a\") " pod="openstack/glance-db-create-8769s" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.572082 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6tb2h\" (UniqueName: \"kubernetes.io/projected/38e2d59c-6699-4df0-aa89-e837dd02993a-kube-api-access-6tb2h\") pod \"glance-db-create-8769s\" (UID: \"38e2d59c-6699-4df0-aa89-e837dd02993a\") " pod="openstack/glance-db-create-8769s" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.610521 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-7577-account-create-update-v4drs"] Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.611672 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-7577-account-create-update-v4drs" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.616732 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.617917 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-7577-account-create-update-v4drs"] Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.675040 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/38e2d59c-6699-4df0-aa89-e837dd02993a-operator-scripts\") pod \"glance-db-create-8769s\" (UID: \"38e2d59c-6699-4df0-aa89-e837dd02993a\") " pod="openstack/glance-db-create-8769s" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.675138 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6tb2h\" (UniqueName: \"kubernetes.io/projected/38e2d59c-6699-4df0-aa89-e837dd02993a-kube-api-access-6tb2h\") pod \"glance-db-create-8769s\" (UID: \"38e2d59c-6699-4df0-aa89-e837dd02993a\") " pod="openstack/glance-db-create-8769s" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.677384 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/38e2d59c-6699-4df0-aa89-e837dd02993a-operator-scripts\") pod \"glance-db-create-8769s\" (UID: \"38e2d59c-6699-4df0-aa89-e837dd02993a\") " pod="openstack/glance-db-create-8769s" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.692419 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-gn9bn"] Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.699327 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6tb2h\" (UniqueName: \"kubernetes.io/projected/38e2d59c-6699-4df0-aa89-e837dd02993a-kube-api-access-6tb2h\") pod \"glance-db-create-8769s\" (UID: \"38e2d59c-6699-4df0-aa89-e837dd02993a\") " pod="openstack/glance-db-create-8769s" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.777081 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lv9s4\" (UniqueName: \"kubernetes.io/projected/ad8e41a8-cb48-4f48-82d3-2293e640bf1e-kube-api-access-lv9s4\") pod \"glance-7577-account-create-update-v4drs\" (UID: \"ad8e41a8-cb48-4f48-82d3-2293e640bf1e\") " pod="openstack/glance-7577-account-create-update-v4drs" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.777153 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad8e41a8-cb48-4f48-82d3-2293e640bf1e-operator-scripts\") pod \"glance-7577-account-create-update-v4drs\" (UID: \"ad8e41a8-cb48-4f48-82d3-2293e640bf1e\") " pod="openstack/glance-7577-account-create-update-v4drs" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.812463 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-gn9bn" event={"ID":"2c01d5be-466c-4eec-a5b3-261fe0f47d9c","Type":"ContainerStarted","Data":"1173153e43cd4c18d6b35436605874fbe512e4b845ac19237ee70271be7395cf"} Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.838585 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-8769s" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.879229 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lv9s4\" (UniqueName: \"kubernetes.io/projected/ad8e41a8-cb48-4f48-82d3-2293e640bf1e-kube-api-access-lv9s4\") pod \"glance-7577-account-create-update-v4drs\" (UID: \"ad8e41a8-cb48-4f48-82d3-2293e640bf1e\") " pod="openstack/glance-7577-account-create-update-v4drs" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.879301 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad8e41a8-cb48-4f48-82d3-2293e640bf1e-operator-scripts\") pod \"glance-7577-account-create-update-v4drs\" (UID: \"ad8e41a8-cb48-4f48-82d3-2293e640bf1e\") " pod="openstack/glance-7577-account-create-update-v4drs" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.880262 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad8e41a8-cb48-4f48-82d3-2293e640bf1e-operator-scripts\") pod \"glance-7577-account-create-update-v4drs\" (UID: \"ad8e41a8-cb48-4f48-82d3-2293e640bf1e\") " pod="openstack/glance-7577-account-create-update-v4drs" Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.896903 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-afb3-account-create-update-msgvh"] Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.906199 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lv9s4\" (UniqueName: \"kubernetes.io/projected/ad8e41a8-cb48-4f48-82d3-2293e640bf1e-kube-api-access-lv9s4\") pod \"glance-7577-account-create-update-v4drs\" (UID: \"ad8e41a8-cb48-4f48-82d3-2293e640bf1e\") " pod="openstack/glance-7577-account-create-update-v4drs" Nov 27 07:14:10 crc kubenswrapper[4971]: W1127 07:14:10.907023 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod418a63d5_afc7_450f_a99a_3c67fedbbabd.slice/crio-748cbba54b55ae27457dca2a954529a063a4a43136c0f6ea375ec751b048464a WatchSource:0}: Error finding container 748cbba54b55ae27457dca2a954529a063a4a43136c0f6ea375ec751b048464a: Status 404 returned error can't find the container with id 748cbba54b55ae27457dca2a954529a063a4a43136c0f6ea375ec751b048464a Nov 27 07:14:10 crc kubenswrapper[4971]: I1127 07:14:10.944411 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-7577-account-create-update-v4drs" Nov 27 07:14:11 crc kubenswrapper[4971]: I1127 07:14:11.039877 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-cxhr4"] Nov 27 07:14:11 crc kubenswrapper[4971]: I1127 07:14:11.082909 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b9ccc9bd-d955-4853-986f-95597f2c70e6-etc-swift\") pod \"swift-storage-0\" (UID: \"b9ccc9bd-d955-4853-986f-95597f2c70e6\") " pod="openstack/swift-storage-0" Nov 27 07:14:11 crc kubenswrapper[4971]: E1127 07:14:11.083113 4971 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Nov 27 07:14:11 crc kubenswrapper[4971]: E1127 07:14:11.083128 4971 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Nov 27 07:14:11 crc kubenswrapper[4971]: E1127 07:14:11.083172 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b9ccc9bd-d955-4853-986f-95597f2c70e6-etc-swift podName:b9ccc9bd-d955-4853-986f-95597f2c70e6 nodeName:}" failed. No retries permitted until 2025-11-27 07:14:19.083156992 +0000 UTC m=+1297.275200910 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b9ccc9bd-d955-4853-986f-95597f2c70e6-etc-swift") pod "swift-storage-0" (UID: "b9ccc9bd-d955-4853-986f-95597f2c70e6") : configmap "swift-ring-files" not found Nov 27 07:14:11 crc kubenswrapper[4971]: I1127 07:14:11.183347 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-0147-account-create-update-gtcn7"] Nov 27 07:14:11 crc kubenswrapper[4971]: W1127 07:14:11.213507 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod96f81740_91f7_431a_be63_94b4fd8fc096.slice/crio-c83fbef1998bae8f801743153eba41e664a0f3c24d23d285c1e54327c439e011 WatchSource:0}: Error finding container c83fbef1998bae8f801743153eba41e664a0f3c24d23d285c1e54327c439e011: Status 404 returned error can't find the container with id c83fbef1998bae8f801743153eba41e664a0f3c24d23d285c1e54327c439e011 Nov 27 07:14:11 crc kubenswrapper[4971]: I1127 07:14:11.371508 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-8769s"] Nov 27 07:14:11 crc kubenswrapper[4971]: W1127 07:14:11.412499 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod38e2d59c_6699_4df0_aa89_e837dd02993a.slice/crio-a87756e4ab5586b3f616ef34eea64002dd13d2a812848d1b3a5b500acc9d0290 WatchSource:0}: Error finding container a87756e4ab5586b3f616ef34eea64002dd13d2a812848d1b3a5b500acc9d0290: Status 404 returned error can't find the container with id a87756e4ab5586b3f616ef34eea64002dd13d2a812848d1b3a5b500acc9d0290 Nov 27 07:14:11 crc kubenswrapper[4971]: I1127 07:14:11.646559 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-7577-account-create-update-v4drs"] Nov 27 07:14:11 crc kubenswrapper[4971]: I1127 07:14:11.823150 4971 generic.go:334] "Generic (PLEG): container finished" podID="96f81740-91f7-431a-be63-94b4fd8fc096" containerID="12983e11ebb77e9c91fe21064f9cdc861aef03f33d2b3124e97e962ac8d19cdc" exitCode=0 Nov 27 07:14:11 crc kubenswrapper[4971]: I1127 07:14:11.823241 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-0147-account-create-update-gtcn7" event={"ID":"96f81740-91f7-431a-be63-94b4fd8fc096","Type":"ContainerDied","Data":"12983e11ebb77e9c91fe21064f9cdc861aef03f33d2b3124e97e962ac8d19cdc"} Nov 27 07:14:11 crc kubenswrapper[4971]: I1127 07:14:11.823500 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-0147-account-create-update-gtcn7" event={"ID":"96f81740-91f7-431a-be63-94b4fd8fc096","Type":"ContainerStarted","Data":"c83fbef1998bae8f801743153eba41e664a0f3c24d23d285c1e54327c439e011"} Nov 27 07:14:11 crc kubenswrapper[4971]: I1127 07:14:11.827844 4971 generic.go:334] "Generic (PLEG): container finished" podID="1349dfd7-4e8c-458d-a932-c6a77029f735" containerID="e0a4a62c81fb15153f18b5b439de92b61bd92c1f4a77409cd9393e9468d90fe3" exitCode=0 Nov 27 07:14:11 crc kubenswrapper[4971]: I1127 07:14:11.827924 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-cxhr4" event={"ID":"1349dfd7-4e8c-458d-a932-c6a77029f735","Type":"ContainerDied","Data":"e0a4a62c81fb15153f18b5b439de92b61bd92c1f4a77409cd9393e9468d90fe3"} Nov 27 07:14:11 crc kubenswrapper[4971]: I1127 07:14:11.827978 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-cxhr4" event={"ID":"1349dfd7-4e8c-458d-a932-c6a77029f735","Type":"ContainerStarted","Data":"3a05fa4a0e623c688e838974fab3370a7715df0afcc262c8d51756dca9882f16"} Nov 27 07:14:11 crc kubenswrapper[4971]: I1127 07:14:11.829704 4971 generic.go:334] "Generic (PLEG): container finished" podID="418a63d5-afc7-450f-a99a-3c67fedbbabd" containerID="5362530fcd9fdea86c97351d6e4b2d531937c2c216938c513ebbdfd364fb5d3e" exitCode=0 Nov 27 07:14:11 crc kubenswrapper[4971]: I1127 07:14:11.829778 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-afb3-account-create-update-msgvh" event={"ID":"418a63d5-afc7-450f-a99a-3c67fedbbabd","Type":"ContainerDied","Data":"5362530fcd9fdea86c97351d6e4b2d531937c2c216938c513ebbdfd364fb5d3e"} Nov 27 07:14:11 crc kubenswrapper[4971]: I1127 07:14:11.829806 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-afb3-account-create-update-msgvh" event={"ID":"418a63d5-afc7-450f-a99a-3c67fedbbabd","Type":"ContainerStarted","Data":"748cbba54b55ae27457dca2a954529a063a4a43136c0f6ea375ec751b048464a"} Nov 27 07:14:11 crc kubenswrapper[4971]: I1127 07:14:11.831078 4971 generic.go:334] "Generic (PLEG): container finished" podID="38e2d59c-6699-4df0-aa89-e837dd02993a" containerID="6e1b7a107fa94bae5e4292a3dbefb4ddb70263f27da9c532afc2ecf7f858188d" exitCode=0 Nov 27 07:14:11 crc kubenswrapper[4971]: I1127 07:14:11.831116 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-8769s" event={"ID":"38e2d59c-6699-4df0-aa89-e837dd02993a","Type":"ContainerDied","Data":"6e1b7a107fa94bae5e4292a3dbefb4ddb70263f27da9c532afc2ecf7f858188d"} Nov 27 07:14:11 crc kubenswrapper[4971]: I1127 07:14:11.831133 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-8769s" event={"ID":"38e2d59c-6699-4df0-aa89-e837dd02993a","Type":"ContainerStarted","Data":"a87756e4ab5586b3f616ef34eea64002dd13d2a812848d1b3a5b500acc9d0290"} Nov 27 07:14:11 crc kubenswrapper[4971]: I1127 07:14:11.832221 4971 generic.go:334] "Generic (PLEG): container finished" podID="2c01d5be-466c-4eec-a5b3-261fe0f47d9c" containerID="4e798883832bc5d5b16ad0dd76ee1cfdfe53f43556b353147ffab3c5a19ab82e" exitCode=0 Nov 27 07:14:11 crc kubenswrapper[4971]: I1127 07:14:11.832253 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-gn9bn" event={"ID":"2c01d5be-466c-4eec-a5b3-261fe0f47d9c","Type":"ContainerDied","Data":"4e798883832bc5d5b16ad0dd76ee1cfdfe53f43556b353147ffab3c5a19ab82e"} Nov 27 07:14:11 crc kubenswrapper[4971]: I1127 07:14:11.833384 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-7577-account-create-update-v4drs" event={"ID":"ad8e41a8-cb48-4f48-82d3-2293e640bf1e","Type":"ContainerStarted","Data":"5e05a21a7103d6f887778bd82285b294951fce661aa5d2dd8a1def2dbabc1652"} Nov 27 07:14:11 crc kubenswrapper[4971]: I1127 07:14:11.833405 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-7577-account-create-update-v4drs" event={"ID":"ad8e41a8-cb48-4f48-82d3-2293e640bf1e","Type":"ContainerStarted","Data":"ba7072c54d9244dbf8358bc6cbf6b57d3a60ea15cf865ff08d24818ef29f3f91"} Nov 27 07:14:11 crc kubenswrapper[4971]: I1127 07:14:11.899898 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-7577-account-create-update-v4drs" podStartSLOduration=1.8998786349999999 podStartE2EDuration="1.899878635s" podCreationTimestamp="2025-11-27 07:14:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:14:11.893438873 +0000 UTC m=+1290.085482811" watchObservedRunningTime="2025-11-27 07:14:11.899878635 +0000 UTC m=+1290.091922553" Nov 27 07:14:12 crc kubenswrapper[4971]: I1127 07:14:12.529287 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7c8cb8df65-n7qt6" Nov 27 07:14:12 crc kubenswrapper[4971]: I1127 07:14:12.620830 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58bd875f97-nxb6t"] Nov 27 07:14:12 crc kubenswrapper[4971]: I1127 07:14:12.621151 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-58bd875f97-nxb6t" podUID="677c92ea-3947-426f-8b7b-ec15116012be" containerName="dnsmasq-dns" containerID="cri-o://3ffdf08e367552c7cc1bc740052a701a1154868344088d9c742eb445669a1713" gracePeriod=10 Nov 27 07:14:12 crc kubenswrapper[4971]: I1127 07:14:12.851414 4971 generic.go:334] "Generic (PLEG): container finished" podID="ad8e41a8-cb48-4f48-82d3-2293e640bf1e" containerID="5e05a21a7103d6f887778bd82285b294951fce661aa5d2dd8a1def2dbabc1652" exitCode=0 Nov 27 07:14:12 crc kubenswrapper[4971]: I1127 07:14:12.851744 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-7577-account-create-update-v4drs" event={"ID":"ad8e41a8-cb48-4f48-82d3-2293e640bf1e","Type":"ContainerDied","Data":"5e05a21a7103d6f887778bd82285b294951fce661aa5d2dd8a1def2dbabc1652"} Nov 27 07:14:12 crc kubenswrapper[4971]: I1127 07:14:12.853972 4971 generic.go:334] "Generic (PLEG): container finished" podID="677c92ea-3947-426f-8b7b-ec15116012be" containerID="3ffdf08e367552c7cc1bc740052a701a1154868344088d9c742eb445669a1713" exitCode=0 Nov 27 07:14:12 crc kubenswrapper[4971]: I1127 07:14:12.854001 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58bd875f97-nxb6t" event={"ID":"677c92ea-3947-426f-8b7b-ec15116012be","Type":"ContainerDied","Data":"3ffdf08e367552c7cc1bc740052a701a1154868344088d9c742eb445669a1713"} Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.203191 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58bd875f97-nxb6t" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.342730 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/677c92ea-3947-426f-8b7b-ec15116012be-ovsdbserver-nb\") pod \"677c92ea-3947-426f-8b7b-ec15116012be\" (UID: \"677c92ea-3947-426f-8b7b-ec15116012be\") " Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.342823 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xlrhv\" (UniqueName: \"kubernetes.io/projected/677c92ea-3947-426f-8b7b-ec15116012be-kube-api-access-xlrhv\") pod \"677c92ea-3947-426f-8b7b-ec15116012be\" (UID: \"677c92ea-3947-426f-8b7b-ec15116012be\") " Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.342904 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/677c92ea-3947-426f-8b7b-ec15116012be-ovsdbserver-sb\") pod \"677c92ea-3947-426f-8b7b-ec15116012be\" (UID: \"677c92ea-3947-426f-8b7b-ec15116012be\") " Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.342962 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/677c92ea-3947-426f-8b7b-ec15116012be-dns-svc\") pod \"677c92ea-3947-426f-8b7b-ec15116012be\" (UID: \"677c92ea-3947-426f-8b7b-ec15116012be\") " Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.343005 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/677c92ea-3947-426f-8b7b-ec15116012be-config\") pod \"677c92ea-3947-426f-8b7b-ec15116012be\" (UID: \"677c92ea-3947-426f-8b7b-ec15116012be\") " Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.352688 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/677c92ea-3947-426f-8b7b-ec15116012be-kube-api-access-xlrhv" (OuterVolumeSpecName: "kube-api-access-xlrhv") pod "677c92ea-3947-426f-8b7b-ec15116012be" (UID: "677c92ea-3947-426f-8b7b-ec15116012be"). InnerVolumeSpecName "kube-api-access-xlrhv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.434123 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/677c92ea-3947-426f-8b7b-ec15116012be-config" (OuterVolumeSpecName: "config") pod "677c92ea-3947-426f-8b7b-ec15116012be" (UID: "677c92ea-3947-426f-8b7b-ec15116012be"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.453398 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/677c92ea-3947-426f-8b7b-ec15116012be-config\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.453455 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xlrhv\" (UniqueName: \"kubernetes.io/projected/677c92ea-3947-426f-8b7b-ec15116012be-kube-api-access-xlrhv\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.476900 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/677c92ea-3947-426f-8b7b-ec15116012be-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "677c92ea-3947-426f-8b7b-ec15116012be" (UID: "677c92ea-3947-426f-8b7b-ec15116012be"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.477097 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/677c92ea-3947-426f-8b7b-ec15116012be-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "677c92ea-3947-426f-8b7b-ec15116012be" (UID: "677c92ea-3947-426f-8b7b-ec15116012be"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.499504 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-cxhr4" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.507277 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-gn9bn" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.523441 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/677c92ea-3947-426f-8b7b-ec15116012be-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "677c92ea-3947-426f-8b7b-ec15116012be" (UID: "677c92ea-3947-426f-8b7b-ec15116012be"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.544385 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-afb3-account-create-update-msgvh" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.556887 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/677c92ea-3947-426f-8b7b-ec15116012be-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.556936 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/677c92ea-3947-426f-8b7b-ec15116012be-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.556948 4971 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/677c92ea-3947-426f-8b7b-ec15116012be-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.571612 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-8769s" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.576571 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-0147-account-create-update-gtcn7" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.658491 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gzcfl\" (UniqueName: \"kubernetes.io/projected/1349dfd7-4e8c-458d-a932-c6a77029f735-kube-api-access-gzcfl\") pod \"1349dfd7-4e8c-458d-a932-c6a77029f735\" (UID: \"1349dfd7-4e8c-458d-a932-c6a77029f735\") " Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.658608 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lh7r7\" (UniqueName: \"kubernetes.io/projected/2c01d5be-466c-4eec-a5b3-261fe0f47d9c-kube-api-access-lh7r7\") pod \"2c01d5be-466c-4eec-a5b3-261fe0f47d9c\" (UID: \"2c01d5be-466c-4eec-a5b3-261fe0f47d9c\") " Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.658709 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2c01d5be-466c-4eec-a5b3-261fe0f47d9c-operator-scripts\") pod \"2c01d5be-466c-4eec-a5b3-261fe0f47d9c\" (UID: \"2c01d5be-466c-4eec-a5b3-261fe0f47d9c\") " Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.658765 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/418a63d5-afc7-450f-a99a-3c67fedbbabd-operator-scripts\") pod \"418a63d5-afc7-450f-a99a-3c67fedbbabd\" (UID: \"418a63d5-afc7-450f-a99a-3c67fedbbabd\") " Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.658848 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r96db\" (UniqueName: \"kubernetes.io/projected/418a63d5-afc7-450f-a99a-3c67fedbbabd-kube-api-access-r96db\") pod \"418a63d5-afc7-450f-a99a-3c67fedbbabd\" (UID: \"418a63d5-afc7-450f-a99a-3c67fedbbabd\") " Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.659234 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1349dfd7-4e8c-458d-a932-c6a77029f735-operator-scripts\") pod \"1349dfd7-4e8c-458d-a932-c6a77029f735\" (UID: \"1349dfd7-4e8c-458d-a932-c6a77029f735\") " Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.659589 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/418a63d5-afc7-450f-a99a-3c67fedbbabd-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "418a63d5-afc7-450f-a99a-3c67fedbbabd" (UID: "418a63d5-afc7-450f-a99a-3c67fedbbabd"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.659804 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c01d5be-466c-4eec-a5b3-261fe0f47d9c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2c01d5be-466c-4eec-a5b3-261fe0f47d9c" (UID: "2c01d5be-466c-4eec-a5b3-261fe0f47d9c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.659847 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/418a63d5-afc7-450f-a99a-3c67fedbbabd-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.659913 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1349dfd7-4e8c-458d-a932-c6a77029f735-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1349dfd7-4e8c-458d-a932-c6a77029f735" (UID: "1349dfd7-4e8c-458d-a932-c6a77029f735"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.663029 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1349dfd7-4e8c-458d-a932-c6a77029f735-kube-api-access-gzcfl" (OuterVolumeSpecName: "kube-api-access-gzcfl") pod "1349dfd7-4e8c-458d-a932-c6a77029f735" (UID: "1349dfd7-4e8c-458d-a932-c6a77029f735"). InnerVolumeSpecName "kube-api-access-gzcfl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.663279 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/418a63d5-afc7-450f-a99a-3c67fedbbabd-kube-api-access-r96db" (OuterVolumeSpecName: "kube-api-access-r96db") pod "418a63d5-afc7-450f-a99a-3c67fedbbabd" (UID: "418a63d5-afc7-450f-a99a-3c67fedbbabd"). InnerVolumeSpecName "kube-api-access-r96db". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.664026 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c01d5be-466c-4eec-a5b3-261fe0f47d9c-kube-api-access-lh7r7" (OuterVolumeSpecName: "kube-api-access-lh7r7") pod "2c01d5be-466c-4eec-a5b3-261fe0f47d9c" (UID: "2c01d5be-466c-4eec-a5b3-261fe0f47d9c"). InnerVolumeSpecName "kube-api-access-lh7r7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.760652 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lsdx6\" (UniqueName: \"kubernetes.io/projected/96f81740-91f7-431a-be63-94b4fd8fc096-kube-api-access-lsdx6\") pod \"96f81740-91f7-431a-be63-94b4fd8fc096\" (UID: \"96f81740-91f7-431a-be63-94b4fd8fc096\") " Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.760721 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/38e2d59c-6699-4df0-aa89-e837dd02993a-operator-scripts\") pod \"38e2d59c-6699-4df0-aa89-e837dd02993a\" (UID: \"38e2d59c-6699-4df0-aa89-e837dd02993a\") " Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.761038 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6tb2h\" (UniqueName: \"kubernetes.io/projected/38e2d59c-6699-4df0-aa89-e837dd02993a-kube-api-access-6tb2h\") pod \"38e2d59c-6699-4df0-aa89-e837dd02993a\" (UID: \"38e2d59c-6699-4df0-aa89-e837dd02993a\") " Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.761128 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/38e2d59c-6699-4df0-aa89-e837dd02993a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "38e2d59c-6699-4df0-aa89-e837dd02993a" (UID: "38e2d59c-6699-4df0-aa89-e837dd02993a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.761161 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96f81740-91f7-431a-be63-94b4fd8fc096-operator-scripts\") pod \"96f81740-91f7-431a-be63-94b4fd8fc096\" (UID: \"96f81740-91f7-431a-be63-94b4fd8fc096\") " Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.761627 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1349dfd7-4e8c-458d-a932-c6a77029f735-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.761645 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/38e2d59c-6699-4df0-aa89-e837dd02993a-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.761655 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gzcfl\" (UniqueName: \"kubernetes.io/projected/1349dfd7-4e8c-458d-a932-c6a77029f735-kube-api-access-gzcfl\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.761666 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lh7r7\" (UniqueName: \"kubernetes.io/projected/2c01d5be-466c-4eec-a5b3-261fe0f47d9c-kube-api-access-lh7r7\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.761675 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2c01d5be-466c-4eec-a5b3-261fe0f47d9c-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.761671 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96f81740-91f7-431a-be63-94b4fd8fc096-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "96f81740-91f7-431a-be63-94b4fd8fc096" (UID: "96f81740-91f7-431a-be63-94b4fd8fc096"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.761685 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r96db\" (UniqueName: \"kubernetes.io/projected/418a63d5-afc7-450f-a99a-3c67fedbbabd-kube-api-access-r96db\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.764240 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96f81740-91f7-431a-be63-94b4fd8fc096-kube-api-access-lsdx6" (OuterVolumeSpecName: "kube-api-access-lsdx6") pod "96f81740-91f7-431a-be63-94b4fd8fc096" (UID: "96f81740-91f7-431a-be63-94b4fd8fc096"). InnerVolumeSpecName "kube-api-access-lsdx6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.764334 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38e2d59c-6699-4df0-aa89-e837dd02993a-kube-api-access-6tb2h" (OuterVolumeSpecName: "kube-api-access-6tb2h") pod "38e2d59c-6699-4df0-aa89-e837dd02993a" (UID: "38e2d59c-6699-4df0-aa89-e837dd02993a"). InnerVolumeSpecName "kube-api-access-6tb2h". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.862897 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6tb2h\" (UniqueName: \"kubernetes.io/projected/38e2d59c-6699-4df0-aa89-e837dd02993a-kube-api-access-6tb2h\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.862923 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96f81740-91f7-431a-be63-94b4fd8fc096-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.862908 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58bd875f97-nxb6t" event={"ID":"677c92ea-3947-426f-8b7b-ec15116012be","Type":"ContainerDied","Data":"ebd5d2996e1af0ec20b0b5e755ab2f616c6af92a5df0b3fe50e720f6331dce8c"} Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.862940 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58bd875f97-nxb6t" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.862935 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lsdx6\" (UniqueName: \"kubernetes.io/projected/96f81740-91f7-431a-be63-94b4fd8fc096-kube-api-access-lsdx6\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.862980 4971 scope.go:117] "RemoveContainer" containerID="3ffdf08e367552c7cc1bc740052a701a1154868344088d9c742eb445669a1713" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.864693 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-0147-account-create-update-gtcn7" event={"ID":"96f81740-91f7-431a-be63-94b4fd8fc096","Type":"ContainerDied","Data":"c83fbef1998bae8f801743153eba41e664a0f3c24d23d285c1e54327c439e011"} Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.864724 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c83fbef1998bae8f801743153eba41e664a0f3c24d23d285c1e54327c439e011" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.864802 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-0147-account-create-update-gtcn7" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.868180 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-cxhr4" event={"ID":"1349dfd7-4e8c-458d-a932-c6a77029f735","Type":"ContainerDied","Data":"3a05fa4a0e623c688e838974fab3370a7715df0afcc262c8d51756dca9882f16"} Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.868232 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3a05fa4a0e623c688e838974fab3370a7715df0afcc262c8d51756dca9882f16" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.868297 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-cxhr4" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.870959 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-afb3-account-create-update-msgvh" event={"ID":"418a63d5-afc7-450f-a99a-3c67fedbbabd","Type":"ContainerDied","Data":"748cbba54b55ae27457dca2a954529a063a4a43136c0f6ea375ec751b048464a"} Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.870997 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="748cbba54b55ae27457dca2a954529a063a4a43136c0f6ea375ec751b048464a" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.871053 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-afb3-account-create-update-msgvh" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.877296 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-8769s" event={"ID":"38e2d59c-6699-4df0-aa89-e837dd02993a","Type":"ContainerDied","Data":"a87756e4ab5586b3f616ef34eea64002dd13d2a812848d1b3a5b500acc9d0290"} Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.877329 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a87756e4ab5586b3f616ef34eea64002dd13d2a812848d1b3a5b500acc9d0290" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.877345 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-8769s" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.879842 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-gn9bn" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.879833 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-gn9bn" event={"ID":"2c01d5be-466c-4eec-a5b3-261fe0f47d9c","Type":"ContainerDied","Data":"1173153e43cd4c18d6b35436605874fbe512e4b845ac19237ee70271be7395cf"} Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.879986 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1173153e43cd4c18d6b35436605874fbe512e4b845ac19237ee70271be7395cf" Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.905449 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58bd875f97-nxb6t"] Nov 27 07:14:13 crc kubenswrapper[4971]: I1127 07:14:13.918310 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-58bd875f97-nxb6t"] Nov 27 07:14:14 crc kubenswrapper[4971]: I1127 07:14:14.563099 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="677c92ea-3947-426f-8b7b-ec15116012be" path="/var/lib/kubelet/pods/677c92ea-3947-426f-8b7b-ec15116012be/volumes" Nov 27 07:14:16 crc kubenswrapper[4971]: I1127 07:14:16.906654 4971 generic.go:334] "Generic (PLEG): container finished" podID="6652b368-b37b-48f3-839e-a89982fec862" containerID="4ebd81db73ba88d8cfa0503889ac418a2664333c3598afa00657ff2e14de274c" exitCode=0 Nov 27 07:14:16 crc kubenswrapper[4971]: I1127 07:14:16.906742 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-s6jkh" event={"ID":"6652b368-b37b-48f3-839e-a89982fec862","Type":"ContainerDied","Data":"4ebd81db73ba88d8cfa0503889ac418a2664333c3598afa00657ff2e14de274c"} Nov 27 07:14:17 crc kubenswrapper[4971]: I1127 07:14:17.277981 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-db2qc" podUID="0aa6e1b6-c18b-4a02-a396-880350cde407" containerName="ovn-controller" probeResult="failure" output=< Nov 27 07:14:17 crc kubenswrapper[4971]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Nov 27 07:14:17 crc kubenswrapper[4971]: > Nov 27 07:14:17 crc kubenswrapper[4971]: I1127 07:14:17.520781 4971 scope.go:117] "RemoveContainer" containerID="a1bdaa2fd615824865b633f2e111abeaa788edd29d699b45c3c4467716a512ce" Nov 27 07:14:17 crc kubenswrapper[4971]: I1127 07:14:17.630927 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-7577-account-create-update-v4drs" Nov 27 07:14:17 crc kubenswrapper[4971]: I1127 07:14:17.737158 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad8e41a8-cb48-4f48-82d3-2293e640bf1e-operator-scripts\") pod \"ad8e41a8-cb48-4f48-82d3-2293e640bf1e\" (UID: \"ad8e41a8-cb48-4f48-82d3-2293e640bf1e\") " Nov 27 07:14:17 crc kubenswrapper[4971]: I1127 07:14:17.737387 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lv9s4\" (UniqueName: \"kubernetes.io/projected/ad8e41a8-cb48-4f48-82d3-2293e640bf1e-kube-api-access-lv9s4\") pod \"ad8e41a8-cb48-4f48-82d3-2293e640bf1e\" (UID: \"ad8e41a8-cb48-4f48-82d3-2293e640bf1e\") " Nov 27 07:14:17 crc kubenswrapper[4971]: I1127 07:14:17.738210 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad8e41a8-cb48-4f48-82d3-2293e640bf1e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ad8e41a8-cb48-4f48-82d3-2293e640bf1e" (UID: "ad8e41a8-cb48-4f48-82d3-2293e640bf1e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:14:17 crc kubenswrapper[4971]: I1127 07:14:17.744493 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad8e41a8-cb48-4f48-82d3-2293e640bf1e-kube-api-access-lv9s4" (OuterVolumeSpecName: "kube-api-access-lv9s4") pod "ad8e41a8-cb48-4f48-82d3-2293e640bf1e" (UID: "ad8e41a8-cb48-4f48-82d3-2293e640bf1e"). InnerVolumeSpecName "kube-api-access-lv9s4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:14:17 crc kubenswrapper[4971]: I1127 07:14:17.840427 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad8e41a8-cb48-4f48-82d3-2293e640bf1e-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:17 crc kubenswrapper[4971]: I1127 07:14:17.840924 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lv9s4\" (UniqueName: \"kubernetes.io/projected/ad8e41a8-cb48-4f48-82d3-2293e640bf1e-kube-api-access-lv9s4\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:17 crc kubenswrapper[4971]: I1127 07:14:17.923244 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-7577-account-create-update-v4drs" event={"ID":"ad8e41a8-cb48-4f48-82d3-2293e640bf1e","Type":"ContainerDied","Data":"ba7072c54d9244dbf8358bc6cbf6b57d3a60ea15cf865ff08d24818ef29f3f91"} Nov 27 07:14:17 crc kubenswrapper[4971]: I1127 07:14:17.924566 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ba7072c54d9244dbf8358bc6cbf6b57d3a60ea15cf865ff08d24818ef29f3f91" Nov 27 07:14:17 crc kubenswrapper[4971]: I1127 07:14:17.923327 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-7577-account-create-update-v4drs" Nov 27 07:14:18 crc kubenswrapper[4971]: I1127 07:14:18.241424 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-s6jkh" Nov 27 07:14:18 crc kubenswrapper[4971]: I1127 07:14:18.350947 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6652b368-b37b-48f3-839e-a89982fec862-combined-ca-bundle\") pod \"6652b368-b37b-48f3-839e-a89982fec862\" (UID: \"6652b368-b37b-48f3-839e-a89982fec862\") " Nov 27 07:14:18 crc kubenswrapper[4971]: I1127 07:14:18.351041 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cdpjx\" (UniqueName: \"kubernetes.io/projected/6652b368-b37b-48f3-839e-a89982fec862-kube-api-access-cdpjx\") pod \"6652b368-b37b-48f3-839e-a89982fec862\" (UID: \"6652b368-b37b-48f3-839e-a89982fec862\") " Nov 27 07:14:18 crc kubenswrapper[4971]: I1127 07:14:18.351115 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/6652b368-b37b-48f3-839e-a89982fec862-dispersionconf\") pod \"6652b368-b37b-48f3-839e-a89982fec862\" (UID: \"6652b368-b37b-48f3-839e-a89982fec862\") " Nov 27 07:14:18 crc kubenswrapper[4971]: I1127 07:14:18.351173 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/6652b368-b37b-48f3-839e-a89982fec862-etc-swift\") pod \"6652b368-b37b-48f3-839e-a89982fec862\" (UID: \"6652b368-b37b-48f3-839e-a89982fec862\") " Nov 27 07:14:18 crc kubenswrapper[4971]: I1127 07:14:18.351337 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6652b368-b37b-48f3-839e-a89982fec862-scripts\") pod \"6652b368-b37b-48f3-839e-a89982fec862\" (UID: \"6652b368-b37b-48f3-839e-a89982fec862\") " Nov 27 07:14:18 crc kubenswrapper[4971]: I1127 07:14:18.351409 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/6652b368-b37b-48f3-839e-a89982fec862-ring-data-devices\") pod \"6652b368-b37b-48f3-839e-a89982fec862\" (UID: \"6652b368-b37b-48f3-839e-a89982fec862\") " Nov 27 07:14:18 crc kubenswrapper[4971]: I1127 07:14:18.351449 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/6652b368-b37b-48f3-839e-a89982fec862-swiftconf\") pod \"6652b368-b37b-48f3-839e-a89982fec862\" (UID: \"6652b368-b37b-48f3-839e-a89982fec862\") " Nov 27 07:14:18 crc kubenswrapper[4971]: I1127 07:14:18.352636 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6652b368-b37b-48f3-839e-a89982fec862-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "6652b368-b37b-48f3-839e-a89982fec862" (UID: "6652b368-b37b-48f3-839e-a89982fec862"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:14:18 crc kubenswrapper[4971]: I1127 07:14:18.352630 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6652b368-b37b-48f3-839e-a89982fec862-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "6652b368-b37b-48f3-839e-a89982fec862" (UID: "6652b368-b37b-48f3-839e-a89982fec862"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:14:18 crc kubenswrapper[4971]: I1127 07:14:18.356849 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6652b368-b37b-48f3-839e-a89982fec862-kube-api-access-cdpjx" (OuterVolumeSpecName: "kube-api-access-cdpjx") pod "6652b368-b37b-48f3-839e-a89982fec862" (UID: "6652b368-b37b-48f3-839e-a89982fec862"). InnerVolumeSpecName "kube-api-access-cdpjx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:14:18 crc kubenswrapper[4971]: I1127 07:14:18.361767 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6652b368-b37b-48f3-839e-a89982fec862-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "6652b368-b37b-48f3-839e-a89982fec862" (UID: "6652b368-b37b-48f3-839e-a89982fec862"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:14:18 crc kubenswrapper[4971]: I1127 07:14:18.375292 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6652b368-b37b-48f3-839e-a89982fec862-scripts" (OuterVolumeSpecName: "scripts") pod "6652b368-b37b-48f3-839e-a89982fec862" (UID: "6652b368-b37b-48f3-839e-a89982fec862"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:14:18 crc kubenswrapper[4971]: I1127 07:14:18.377046 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6652b368-b37b-48f3-839e-a89982fec862-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6652b368-b37b-48f3-839e-a89982fec862" (UID: "6652b368-b37b-48f3-839e-a89982fec862"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:14:18 crc kubenswrapper[4971]: I1127 07:14:18.378791 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6652b368-b37b-48f3-839e-a89982fec862-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "6652b368-b37b-48f3-839e-a89982fec862" (UID: "6652b368-b37b-48f3-839e-a89982fec862"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:14:18 crc kubenswrapper[4971]: I1127 07:14:18.453708 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6652b368-b37b-48f3-839e-a89982fec862-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:18 crc kubenswrapper[4971]: I1127 07:14:18.454003 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cdpjx\" (UniqueName: \"kubernetes.io/projected/6652b368-b37b-48f3-839e-a89982fec862-kube-api-access-cdpjx\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:18 crc kubenswrapper[4971]: I1127 07:14:18.454082 4971 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/6652b368-b37b-48f3-839e-a89982fec862-dispersionconf\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:18 crc kubenswrapper[4971]: I1127 07:14:18.454153 4971 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/6652b368-b37b-48f3-839e-a89982fec862-etc-swift\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:18 crc kubenswrapper[4971]: I1127 07:14:18.454274 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6652b368-b37b-48f3-839e-a89982fec862-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:18 crc kubenswrapper[4971]: I1127 07:14:18.454355 4971 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/6652b368-b37b-48f3-839e-a89982fec862-ring-data-devices\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:18 crc kubenswrapper[4971]: I1127 07:14:18.454446 4971 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/6652b368-b37b-48f3-839e-a89982fec862-swiftconf\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:18 crc kubenswrapper[4971]: I1127 07:14:18.934193 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-s6jkh" event={"ID":"6652b368-b37b-48f3-839e-a89982fec862","Type":"ContainerDied","Data":"d6c505da5c7f45453d1a3c792c56478334d88d5e0b4e98d2ec764159b1e6adb0"} Nov 27 07:14:18 crc kubenswrapper[4971]: I1127 07:14:18.934243 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d6c505da5c7f45453d1a3c792c56478334d88d5e0b4e98d2ec764159b1e6adb0" Nov 27 07:14:18 crc kubenswrapper[4971]: I1127 07:14:18.935605 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-s6jkh" Nov 27 07:14:19 crc kubenswrapper[4971]: I1127 07:14:19.169888 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b9ccc9bd-d955-4853-986f-95597f2c70e6-etc-swift\") pod \"swift-storage-0\" (UID: \"b9ccc9bd-d955-4853-986f-95597f2c70e6\") " pod="openstack/swift-storage-0" Nov 27 07:14:19 crc kubenswrapper[4971]: I1127 07:14:19.184410 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b9ccc9bd-d955-4853-986f-95597f2c70e6-etc-swift\") pod \"swift-storage-0\" (UID: \"b9ccc9bd-d955-4853-986f-95597f2c70e6\") " pod="openstack/swift-storage-0" Nov 27 07:14:19 crc kubenswrapper[4971]: I1127 07:14:19.434984 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Nov 27 07:14:19 crc kubenswrapper[4971]: I1127 07:14:19.949559 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Nov 27 07:14:19 crc kubenswrapper[4971]: W1127 07:14:19.958719 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb9ccc9bd_d955_4853_986f_95597f2c70e6.slice/crio-e0c53a5599de41f3e42da5a20ab59718975f8b80a1f6f15d525a576f75869862 WatchSource:0}: Error finding container e0c53a5599de41f3e42da5a20ab59718975f8b80a1f6f15d525a576f75869862: Status 404 returned error can't find the container with id e0c53a5599de41f3e42da5a20ab59718975f8b80a1f6f15d525a576f75869862 Nov 27 07:14:20 crc kubenswrapper[4971]: I1127 07:14:20.733919 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-z4gfl"] Nov 27 07:14:20 crc kubenswrapper[4971]: E1127 07:14:20.734891 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="418a63d5-afc7-450f-a99a-3c67fedbbabd" containerName="mariadb-account-create-update" Nov 27 07:14:20 crc kubenswrapper[4971]: I1127 07:14:20.734919 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="418a63d5-afc7-450f-a99a-3c67fedbbabd" containerName="mariadb-account-create-update" Nov 27 07:14:20 crc kubenswrapper[4971]: E1127 07:14:20.734936 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="677c92ea-3947-426f-8b7b-ec15116012be" containerName="init" Nov 27 07:14:20 crc kubenswrapper[4971]: I1127 07:14:20.735092 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="677c92ea-3947-426f-8b7b-ec15116012be" containerName="init" Nov 27 07:14:20 crc kubenswrapper[4971]: E1127 07:14:20.735110 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c01d5be-466c-4eec-a5b3-261fe0f47d9c" containerName="mariadb-database-create" Nov 27 07:14:20 crc kubenswrapper[4971]: I1127 07:14:20.735121 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c01d5be-466c-4eec-a5b3-261fe0f47d9c" containerName="mariadb-database-create" Nov 27 07:14:20 crc kubenswrapper[4971]: E1127 07:14:20.735137 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6652b368-b37b-48f3-839e-a89982fec862" containerName="swift-ring-rebalance" Nov 27 07:14:20 crc kubenswrapper[4971]: I1127 07:14:20.735142 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="6652b368-b37b-48f3-839e-a89982fec862" containerName="swift-ring-rebalance" Nov 27 07:14:20 crc kubenswrapper[4971]: E1127 07:14:20.735157 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38e2d59c-6699-4df0-aa89-e837dd02993a" containerName="mariadb-database-create" Nov 27 07:14:20 crc kubenswrapper[4971]: I1127 07:14:20.735162 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="38e2d59c-6699-4df0-aa89-e837dd02993a" containerName="mariadb-database-create" Nov 27 07:14:20 crc kubenswrapper[4971]: E1127 07:14:20.735172 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="677c92ea-3947-426f-8b7b-ec15116012be" containerName="dnsmasq-dns" Nov 27 07:14:20 crc kubenswrapper[4971]: I1127 07:14:20.735213 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="677c92ea-3947-426f-8b7b-ec15116012be" containerName="dnsmasq-dns" Nov 27 07:14:20 crc kubenswrapper[4971]: E1127 07:14:20.735222 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96f81740-91f7-431a-be63-94b4fd8fc096" containerName="mariadb-account-create-update" Nov 27 07:14:20 crc kubenswrapper[4971]: I1127 07:14:20.735228 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="96f81740-91f7-431a-be63-94b4fd8fc096" containerName="mariadb-account-create-update" Nov 27 07:14:20 crc kubenswrapper[4971]: E1127 07:14:20.735240 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad8e41a8-cb48-4f48-82d3-2293e640bf1e" containerName="mariadb-account-create-update" Nov 27 07:14:20 crc kubenswrapper[4971]: I1127 07:14:20.735332 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad8e41a8-cb48-4f48-82d3-2293e640bf1e" containerName="mariadb-account-create-update" Nov 27 07:14:20 crc kubenswrapper[4971]: E1127 07:14:20.735344 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1349dfd7-4e8c-458d-a932-c6a77029f735" containerName="mariadb-database-create" Nov 27 07:14:20 crc kubenswrapper[4971]: I1127 07:14:20.735350 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="1349dfd7-4e8c-458d-a932-c6a77029f735" containerName="mariadb-database-create" Nov 27 07:14:20 crc kubenswrapper[4971]: I1127 07:14:20.736738 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="1349dfd7-4e8c-458d-a932-c6a77029f735" containerName="mariadb-database-create" Nov 27 07:14:20 crc kubenswrapper[4971]: I1127 07:14:20.736763 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c01d5be-466c-4eec-a5b3-261fe0f47d9c" containerName="mariadb-database-create" Nov 27 07:14:20 crc kubenswrapper[4971]: I1127 07:14:20.736773 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="96f81740-91f7-431a-be63-94b4fd8fc096" containerName="mariadb-account-create-update" Nov 27 07:14:20 crc kubenswrapper[4971]: I1127 07:14:20.736785 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="677c92ea-3947-426f-8b7b-ec15116012be" containerName="dnsmasq-dns" Nov 27 07:14:20 crc kubenswrapper[4971]: I1127 07:14:20.736791 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="6652b368-b37b-48f3-839e-a89982fec862" containerName="swift-ring-rebalance" Nov 27 07:14:20 crc kubenswrapper[4971]: I1127 07:14:20.736801 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad8e41a8-cb48-4f48-82d3-2293e640bf1e" containerName="mariadb-account-create-update" Nov 27 07:14:20 crc kubenswrapper[4971]: I1127 07:14:20.736809 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="38e2d59c-6699-4df0-aa89-e837dd02993a" containerName="mariadb-database-create" Nov 27 07:14:20 crc kubenswrapper[4971]: I1127 07:14:20.736823 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="418a63d5-afc7-450f-a99a-3c67fedbbabd" containerName="mariadb-account-create-update" Nov 27 07:14:20 crc kubenswrapper[4971]: I1127 07:14:20.737459 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-z4gfl" Nov 27 07:14:20 crc kubenswrapper[4971]: I1127 07:14:20.746274 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-z4gfl"] Nov 27 07:14:20 crc kubenswrapper[4971]: I1127 07:14:20.779295 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Nov 27 07:14:20 crc kubenswrapper[4971]: I1127 07:14:20.779908 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-tzwn4" Nov 27 07:14:20 crc kubenswrapper[4971]: I1127 07:14:20.900352 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c5689fc8-154a-4710-a2d6-9bc41e927f77-db-sync-config-data\") pod \"glance-db-sync-z4gfl\" (UID: \"c5689fc8-154a-4710-a2d6-9bc41e927f77\") " pod="openstack/glance-db-sync-z4gfl" Nov 27 07:14:20 crc kubenswrapper[4971]: I1127 07:14:20.900430 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8cg4h\" (UniqueName: \"kubernetes.io/projected/c5689fc8-154a-4710-a2d6-9bc41e927f77-kube-api-access-8cg4h\") pod \"glance-db-sync-z4gfl\" (UID: \"c5689fc8-154a-4710-a2d6-9bc41e927f77\") " pod="openstack/glance-db-sync-z4gfl" Nov 27 07:14:20 crc kubenswrapper[4971]: I1127 07:14:20.900464 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5689fc8-154a-4710-a2d6-9bc41e927f77-combined-ca-bundle\") pod \"glance-db-sync-z4gfl\" (UID: \"c5689fc8-154a-4710-a2d6-9bc41e927f77\") " pod="openstack/glance-db-sync-z4gfl" Nov 27 07:14:20 crc kubenswrapper[4971]: I1127 07:14:20.900511 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5689fc8-154a-4710-a2d6-9bc41e927f77-config-data\") pod \"glance-db-sync-z4gfl\" (UID: \"c5689fc8-154a-4710-a2d6-9bc41e927f77\") " pod="openstack/glance-db-sync-z4gfl" Nov 27 07:14:20 crc kubenswrapper[4971]: I1127 07:14:20.953957 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b9ccc9bd-d955-4853-986f-95597f2c70e6","Type":"ContainerStarted","Data":"e0c53a5599de41f3e42da5a20ab59718975f8b80a1f6f15d525a576f75869862"} Nov 27 07:14:21 crc kubenswrapper[4971]: I1127 07:14:21.001627 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c5689fc8-154a-4710-a2d6-9bc41e927f77-db-sync-config-data\") pod \"glance-db-sync-z4gfl\" (UID: \"c5689fc8-154a-4710-a2d6-9bc41e927f77\") " pod="openstack/glance-db-sync-z4gfl" Nov 27 07:14:21 crc kubenswrapper[4971]: I1127 07:14:21.001823 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8cg4h\" (UniqueName: \"kubernetes.io/projected/c5689fc8-154a-4710-a2d6-9bc41e927f77-kube-api-access-8cg4h\") pod \"glance-db-sync-z4gfl\" (UID: \"c5689fc8-154a-4710-a2d6-9bc41e927f77\") " pod="openstack/glance-db-sync-z4gfl" Nov 27 07:14:21 crc kubenswrapper[4971]: I1127 07:14:21.001879 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5689fc8-154a-4710-a2d6-9bc41e927f77-combined-ca-bundle\") pod \"glance-db-sync-z4gfl\" (UID: \"c5689fc8-154a-4710-a2d6-9bc41e927f77\") " pod="openstack/glance-db-sync-z4gfl" Nov 27 07:14:21 crc kubenswrapper[4971]: I1127 07:14:21.002064 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5689fc8-154a-4710-a2d6-9bc41e927f77-config-data\") pod \"glance-db-sync-z4gfl\" (UID: \"c5689fc8-154a-4710-a2d6-9bc41e927f77\") " pod="openstack/glance-db-sync-z4gfl" Nov 27 07:14:21 crc kubenswrapper[4971]: I1127 07:14:21.009408 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c5689fc8-154a-4710-a2d6-9bc41e927f77-db-sync-config-data\") pod \"glance-db-sync-z4gfl\" (UID: \"c5689fc8-154a-4710-a2d6-9bc41e927f77\") " pod="openstack/glance-db-sync-z4gfl" Nov 27 07:14:21 crc kubenswrapper[4971]: I1127 07:14:21.013190 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5689fc8-154a-4710-a2d6-9bc41e927f77-combined-ca-bundle\") pod \"glance-db-sync-z4gfl\" (UID: \"c5689fc8-154a-4710-a2d6-9bc41e927f77\") " pod="openstack/glance-db-sync-z4gfl" Nov 27 07:14:21 crc kubenswrapper[4971]: I1127 07:14:21.013317 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5689fc8-154a-4710-a2d6-9bc41e927f77-config-data\") pod \"glance-db-sync-z4gfl\" (UID: \"c5689fc8-154a-4710-a2d6-9bc41e927f77\") " pod="openstack/glance-db-sync-z4gfl" Nov 27 07:14:21 crc kubenswrapper[4971]: I1127 07:14:21.023607 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8cg4h\" (UniqueName: \"kubernetes.io/projected/c5689fc8-154a-4710-a2d6-9bc41e927f77-kube-api-access-8cg4h\") pod \"glance-db-sync-z4gfl\" (UID: \"c5689fc8-154a-4710-a2d6-9bc41e927f77\") " pod="openstack/glance-db-sync-z4gfl" Nov 27 07:14:21 crc kubenswrapper[4971]: I1127 07:14:21.108464 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-z4gfl" Nov 27 07:14:21 crc kubenswrapper[4971]: I1127 07:14:21.725049 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-z4gfl"] Nov 27 07:14:21 crc kubenswrapper[4971]: W1127 07:14:21.738859 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc5689fc8_154a_4710_a2d6_9bc41e927f77.slice/crio-7fdabaa7fcb0627c6583d1b96a6d549a0391d9e1b6ac02c4d067b4a152a4f2e5 WatchSource:0}: Error finding container 7fdabaa7fcb0627c6583d1b96a6d549a0391d9e1b6ac02c4d067b4a152a4f2e5: Status 404 returned error can't find the container with id 7fdabaa7fcb0627c6583d1b96a6d549a0391d9e1b6ac02c4d067b4a152a4f2e5 Nov 27 07:14:21 crc kubenswrapper[4971]: I1127 07:14:21.963105 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b9ccc9bd-d955-4853-986f-95597f2c70e6","Type":"ContainerStarted","Data":"d4737f67b50d857fb0c555964409bd7354a856636790cbe70eea9e16bffacc6e"} Nov 27 07:14:21 crc kubenswrapper[4971]: I1127 07:14:21.963153 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b9ccc9bd-d955-4853-986f-95597f2c70e6","Type":"ContainerStarted","Data":"6c4925ec24ba366b0b0454d48d8855400c5d48953e95c1f7428155fd84127936"} Nov 27 07:14:21 crc kubenswrapper[4971]: I1127 07:14:21.963166 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b9ccc9bd-d955-4853-986f-95597f2c70e6","Type":"ContainerStarted","Data":"b7eb7df2e485a291e4bff098a5b8b04d0f94dc8a687ed53e0061a899159d7b70"} Nov 27 07:14:21 crc kubenswrapper[4971]: I1127 07:14:21.963174 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b9ccc9bd-d955-4853-986f-95597f2c70e6","Type":"ContainerStarted","Data":"dc6a6bc3b9ba7cb3360f786ead574e2d92d8ae060c201dc5a7136ed40e797c3d"} Nov 27 07:14:21 crc kubenswrapper[4971]: I1127 07:14:21.964173 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-z4gfl" event={"ID":"c5689fc8-154a-4710-a2d6-9bc41e927f77","Type":"ContainerStarted","Data":"7fdabaa7fcb0627c6583d1b96a6d549a0391d9e1b6ac02c4d067b4a152a4f2e5"} Nov 27 07:14:22 crc kubenswrapper[4971]: I1127 07:14:22.275430 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-db2qc" podUID="0aa6e1b6-c18b-4a02-a396-880350cde407" containerName="ovn-controller" probeResult="failure" output=< Nov 27 07:14:22 crc kubenswrapper[4971]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Nov 27 07:14:22 crc kubenswrapper[4971]: > Nov 27 07:14:22 crc kubenswrapper[4971]: I1127 07:14:22.293662 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-45rt8" Nov 27 07:14:22 crc kubenswrapper[4971]: I1127 07:14:22.297846 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-45rt8" Nov 27 07:14:22 crc kubenswrapper[4971]: I1127 07:14:22.620720 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-db2qc-config-462hw"] Nov 27 07:14:22 crc kubenswrapper[4971]: I1127 07:14:22.622270 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-db2qc-config-462hw" Nov 27 07:14:22 crc kubenswrapper[4971]: I1127 07:14:22.625255 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Nov 27 07:14:22 crc kubenswrapper[4971]: I1127 07:14:22.632626 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-db2qc-config-462hw"] Nov 27 07:14:22 crc kubenswrapper[4971]: I1127 07:14:22.739699 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/b83e25d7-c01c-47ee-9395-13fa76e54bc9-var-run-ovn\") pod \"ovn-controller-db2qc-config-462hw\" (UID: \"b83e25d7-c01c-47ee-9395-13fa76e54bc9\") " pod="openstack/ovn-controller-db2qc-config-462hw" Nov 27 07:14:22 crc kubenswrapper[4971]: I1127 07:14:22.739773 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/b83e25d7-c01c-47ee-9395-13fa76e54bc9-additional-scripts\") pod \"ovn-controller-db2qc-config-462hw\" (UID: \"b83e25d7-c01c-47ee-9395-13fa76e54bc9\") " pod="openstack/ovn-controller-db2qc-config-462hw" Nov 27 07:14:22 crc kubenswrapper[4971]: I1127 07:14:22.739940 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/b83e25d7-c01c-47ee-9395-13fa76e54bc9-var-log-ovn\") pod \"ovn-controller-db2qc-config-462hw\" (UID: \"b83e25d7-c01c-47ee-9395-13fa76e54bc9\") " pod="openstack/ovn-controller-db2qc-config-462hw" Nov 27 07:14:22 crc kubenswrapper[4971]: I1127 07:14:22.739986 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sh8cs\" (UniqueName: \"kubernetes.io/projected/b83e25d7-c01c-47ee-9395-13fa76e54bc9-kube-api-access-sh8cs\") pod \"ovn-controller-db2qc-config-462hw\" (UID: \"b83e25d7-c01c-47ee-9395-13fa76e54bc9\") " pod="openstack/ovn-controller-db2qc-config-462hw" Nov 27 07:14:22 crc kubenswrapper[4971]: I1127 07:14:22.741225 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b83e25d7-c01c-47ee-9395-13fa76e54bc9-var-run\") pod \"ovn-controller-db2qc-config-462hw\" (UID: \"b83e25d7-c01c-47ee-9395-13fa76e54bc9\") " pod="openstack/ovn-controller-db2qc-config-462hw" Nov 27 07:14:22 crc kubenswrapper[4971]: I1127 07:14:22.741286 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b83e25d7-c01c-47ee-9395-13fa76e54bc9-scripts\") pod \"ovn-controller-db2qc-config-462hw\" (UID: \"b83e25d7-c01c-47ee-9395-13fa76e54bc9\") " pod="openstack/ovn-controller-db2qc-config-462hw" Nov 27 07:14:22 crc kubenswrapper[4971]: I1127 07:14:22.844115 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sh8cs\" (UniqueName: \"kubernetes.io/projected/b83e25d7-c01c-47ee-9395-13fa76e54bc9-kube-api-access-sh8cs\") pod \"ovn-controller-db2qc-config-462hw\" (UID: \"b83e25d7-c01c-47ee-9395-13fa76e54bc9\") " pod="openstack/ovn-controller-db2qc-config-462hw" Nov 27 07:14:22 crc kubenswrapper[4971]: I1127 07:14:22.844220 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b83e25d7-c01c-47ee-9395-13fa76e54bc9-var-run\") pod \"ovn-controller-db2qc-config-462hw\" (UID: \"b83e25d7-c01c-47ee-9395-13fa76e54bc9\") " pod="openstack/ovn-controller-db2qc-config-462hw" Nov 27 07:14:22 crc kubenswrapper[4971]: I1127 07:14:22.844255 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b83e25d7-c01c-47ee-9395-13fa76e54bc9-scripts\") pod \"ovn-controller-db2qc-config-462hw\" (UID: \"b83e25d7-c01c-47ee-9395-13fa76e54bc9\") " pod="openstack/ovn-controller-db2qc-config-462hw" Nov 27 07:14:22 crc kubenswrapper[4971]: I1127 07:14:22.844281 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/b83e25d7-c01c-47ee-9395-13fa76e54bc9-var-run-ovn\") pod \"ovn-controller-db2qc-config-462hw\" (UID: \"b83e25d7-c01c-47ee-9395-13fa76e54bc9\") " pod="openstack/ovn-controller-db2qc-config-462hw" Nov 27 07:14:22 crc kubenswrapper[4971]: I1127 07:14:22.844326 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/b83e25d7-c01c-47ee-9395-13fa76e54bc9-additional-scripts\") pod \"ovn-controller-db2qc-config-462hw\" (UID: \"b83e25d7-c01c-47ee-9395-13fa76e54bc9\") " pod="openstack/ovn-controller-db2qc-config-462hw" Nov 27 07:14:22 crc kubenswrapper[4971]: I1127 07:14:22.844583 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/b83e25d7-c01c-47ee-9395-13fa76e54bc9-var-run-ovn\") pod \"ovn-controller-db2qc-config-462hw\" (UID: \"b83e25d7-c01c-47ee-9395-13fa76e54bc9\") " pod="openstack/ovn-controller-db2qc-config-462hw" Nov 27 07:14:22 crc kubenswrapper[4971]: I1127 07:14:22.844601 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b83e25d7-c01c-47ee-9395-13fa76e54bc9-var-run\") pod \"ovn-controller-db2qc-config-462hw\" (UID: \"b83e25d7-c01c-47ee-9395-13fa76e54bc9\") " pod="openstack/ovn-controller-db2qc-config-462hw" Nov 27 07:14:22 crc kubenswrapper[4971]: I1127 07:14:22.845257 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/b83e25d7-c01c-47ee-9395-13fa76e54bc9-additional-scripts\") pod \"ovn-controller-db2qc-config-462hw\" (UID: \"b83e25d7-c01c-47ee-9395-13fa76e54bc9\") " pod="openstack/ovn-controller-db2qc-config-462hw" Nov 27 07:14:22 crc kubenswrapper[4971]: I1127 07:14:22.845402 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/b83e25d7-c01c-47ee-9395-13fa76e54bc9-var-log-ovn\") pod \"ovn-controller-db2qc-config-462hw\" (UID: \"b83e25d7-c01c-47ee-9395-13fa76e54bc9\") " pod="openstack/ovn-controller-db2qc-config-462hw" Nov 27 07:14:22 crc kubenswrapper[4971]: I1127 07:14:22.845496 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/b83e25d7-c01c-47ee-9395-13fa76e54bc9-var-log-ovn\") pod \"ovn-controller-db2qc-config-462hw\" (UID: \"b83e25d7-c01c-47ee-9395-13fa76e54bc9\") " pod="openstack/ovn-controller-db2qc-config-462hw" Nov 27 07:14:22 crc kubenswrapper[4971]: I1127 07:14:22.846321 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b83e25d7-c01c-47ee-9395-13fa76e54bc9-scripts\") pod \"ovn-controller-db2qc-config-462hw\" (UID: \"b83e25d7-c01c-47ee-9395-13fa76e54bc9\") " pod="openstack/ovn-controller-db2qc-config-462hw" Nov 27 07:14:22 crc kubenswrapper[4971]: I1127 07:14:22.863321 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sh8cs\" (UniqueName: \"kubernetes.io/projected/b83e25d7-c01c-47ee-9395-13fa76e54bc9-kube-api-access-sh8cs\") pod \"ovn-controller-db2qc-config-462hw\" (UID: \"b83e25d7-c01c-47ee-9395-13fa76e54bc9\") " pod="openstack/ovn-controller-db2qc-config-462hw" Nov 27 07:14:22 crc kubenswrapper[4971]: I1127 07:14:22.947046 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-db2qc-config-462hw" Nov 27 07:14:23 crc kubenswrapper[4971]: I1127 07:14:23.404030 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-db2qc-config-462hw"] Nov 27 07:14:23 crc kubenswrapper[4971]: I1127 07:14:23.993564 4971 generic.go:334] "Generic (PLEG): container finished" podID="b83e25d7-c01c-47ee-9395-13fa76e54bc9" containerID="62fd3284f1d80c7ec42f4da6f1463c1ef338c9ed77298a1ace876099604e1ab1" exitCode=0 Nov 27 07:14:23 crc kubenswrapper[4971]: I1127 07:14:23.993632 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-db2qc-config-462hw" event={"ID":"b83e25d7-c01c-47ee-9395-13fa76e54bc9","Type":"ContainerDied","Data":"62fd3284f1d80c7ec42f4da6f1463c1ef338c9ed77298a1ace876099604e1ab1"} Nov 27 07:14:23 crc kubenswrapper[4971]: I1127 07:14:23.993879 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-db2qc-config-462hw" event={"ID":"b83e25d7-c01c-47ee-9395-13fa76e54bc9","Type":"ContainerStarted","Data":"6045be9780fd3083b10b302f652889103bb8923d10ef7ac08f0a5a5fc25c8b78"} Nov 27 07:14:25 crc kubenswrapper[4971]: I1127 07:14:25.012950 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b9ccc9bd-d955-4853-986f-95597f2c70e6","Type":"ContainerStarted","Data":"fad54635f828db66a56bd717dd26946c6194a973dd020bda60d63ac8508944b7"} Nov 27 07:14:25 crc kubenswrapper[4971]: I1127 07:14:25.013261 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b9ccc9bd-d955-4853-986f-95597f2c70e6","Type":"ContainerStarted","Data":"3b8d32d89ca168e8803a3c1d366cfdd72575c01e7332b5f3d73f865eaf632a07"} Nov 27 07:14:25 crc kubenswrapper[4971]: I1127 07:14:25.013273 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b9ccc9bd-d955-4853-986f-95597f2c70e6","Type":"ContainerStarted","Data":"f747a88650d47a0b28abe01bd9c86b1ce37125bcbf5d4510f59880020f4c6436"} Nov 27 07:14:25 crc kubenswrapper[4971]: I1127 07:14:25.313032 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-db2qc-config-462hw" Nov 27 07:14:25 crc kubenswrapper[4971]: I1127 07:14:25.494047 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/b83e25d7-c01c-47ee-9395-13fa76e54bc9-additional-scripts\") pod \"b83e25d7-c01c-47ee-9395-13fa76e54bc9\" (UID: \"b83e25d7-c01c-47ee-9395-13fa76e54bc9\") " Nov 27 07:14:25 crc kubenswrapper[4971]: I1127 07:14:25.494114 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b83e25d7-c01c-47ee-9395-13fa76e54bc9-var-run\") pod \"b83e25d7-c01c-47ee-9395-13fa76e54bc9\" (UID: \"b83e25d7-c01c-47ee-9395-13fa76e54bc9\") " Nov 27 07:14:25 crc kubenswrapper[4971]: I1127 07:14:25.494159 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/b83e25d7-c01c-47ee-9395-13fa76e54bc9-var-run-ovn\") pod \"b83e25d7-c01c-47ee-9395-13fa76e54bc9\" (UID: \"b83e25d7-c01c-47ee-9395-13fa76e54bc9\") " Nov 27 07:14:25 crc kubenswrapper[4971]: I1127 07:14:25.494238 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b83e25d7-c01c-47ee-9395-13fa76e54bc9-scripts\") pod \"b83e25d7-c01c-47ee-9395-13fa76e54bc9\" (UID: \"b83e25d7-c01c-47ee-9395-13fa76e54bc9\") " Nov 27 07:14:25 crc kubenswrapper[4971]: I1127 07:14:25.494260 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sh8cs\" (UniqueName: \"kubernetes.io/projected/b83e25d7-c01c-47ee-9395-13fa76e54bc9-kube-api-access-sh8cs\") pod \"b83e25d7-c01c-47ee-9395-13fa76e54bc9\" (UID: \"b83e25d7-c01c-47ee-9395-13fa76e54bc9\") " Nov 27 07:14:25 crc kubenswrapper[4971]: I1127 07:14:25.494274 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b83e25d7-c01c-47ee-9395-13fa76e54bc9-var-run" (OuterVolumeSpecName: "var-run") pod "b83e25d7-c01c-47ee-9395-13fa76e54bc9" (UID: "b83e25d7-c01c-47ee-9395-13fa76e54bc9"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 07:14:25 crc kubenswrapper[4971]: I1127 07:14:25.494339 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b83e25d7-c01c-47ee-9395-13fa76e54bc9-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "b83e25d7-c01c-47ee-9395-13fa76e54bc9" (UID: "b83e25d7-c01c-47ee-9395-13fa76e54bc9"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 07:14:25 crc kubenswrapper[4971]: I1127 07:14:25.494291 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b83e25d7-c01c-47ee-9395-13fa76e54bc9-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "b83e25d7-c01c-47ee-9395-13fa76e54bc9" (UID: "b83e25d7-c01c-47ee-9395-13fa76e54bc9"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 07:14:25 crc kubenswrapper[4971]: I1127 07:14:25.494295 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/b83e25d7-c01c-47ee-9395-13fa76e54bc9-var-log-ovn\") pod \"b83e25d7-c01c-47ee-9395-13fa76e54bc9\" (UID: \"b83e25d7-c01c-47ee-9395-13fa76e54bc9\") " Nov 27 07:14:25 crc kubenswrapper[4971]: I1127 07:14:25.494925 4971 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b83e25d7-c01c-47ee-9395-13fa76e54bc9-var-run\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:25 crc kubenswrapper[4971]: I1127 07:14:25.494939 4971 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/b83e25d7-c01c-47ee-9395-13fa76e54bc9-var-run-ovn\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:25 crc kubenswrapper[4971]: I1127 07:14:25.494947 4971 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/b83e25d7-c01c-47ee-9395-13fa76e54bc9-var-log-ovn\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:25 crc kubenswrapper[4971]: I1127 07:14:25.495458 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b83e25d7-c01c-47ee-9395-13fa76e54bc9-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "b83e25d7-c01c-47ee-9395-13fa76e54bc9" (UID: "b83e25d7-c01c-47ee-9395-13fa76e54bc9"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:14:25 crc kubenswrapper[4971]: I1127 07:14:25.495672 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b83e25d7-c01c-47ee-9395-13fa76e54bc9-scripts" (OuterVolumeSpecName: "scripts") pod "b83e25d7-c01c-47ee-9395-13fa76e54bc9" (UID: "b83e25d7-c01c-47ee-9395-13fa76e54bc9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:14:25 crc kubenswrapper[4971]: I1127 07:14:25.500794 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b83e25d7-c01c-47ee-9395-13fa76e54bc9-kube-api-access-sh8cs" (OuterVolumeSpecName: "kube-api-access-sh8cs") pod "b83e25d7-c01c-47ee-9395-13fa76e54bc9" (UID: "b83e25d7-c01c-47ee-9395-13fa76e54bc9"). InnerVolumeSpecName "kube-api-access-sh8cs". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:14:25 crc kubenswrapper[4971]: I1127 07:14:25.596760 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b83e25d7-c01c-47ee-9395-13fa76e54bc9-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:25 crc kubenswrapper[4971]: I1127 07:14:25.596805 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sh8cs\" (UniqueName: \"kubernetes.io/projected/b83e25d7-c01c-47ee-9395-13fa76e54bc9-kube-api-access-sh8cs\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:25 crc kubenswrapper[4971]: I1127 07:14:25.596818 4971 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/b83e25d7-c01c-47ee-9395-13fa76e54bc9-additional-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:26 crc kubenswrapper[4971]: I1127 07:14:26.022452 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-db2qc-config-462hw" Nov 27 07:14:26 crc kubenswrapper[4971]: I1127 07:14:26.022539 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-db2qc-config-462hw" event={"ID":"b83e25d7-c01c-47ee-9395-13fa76e54bc9","Type":"ContainerDied","Data":"6045be9780fd3083b10b302f652889103bb8923d10ef7ac08f0a5a5fc25c8b78"} Nov 27 07:14:26 crc kubenswrapper[4971]: I1127 07:14:26.022589 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6045be9780fd3083b10b302f652889103bb8923d10ef7ac08f0a5a5fc25c8b78" Nov 27 07:14:26 crc kubenswrapper[4971]: I1127 07:14:26.026874 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b9ccc9bd-d955-4853-986f-95597f2c70e6","Type":"ContainerStarted","Data":"7170edadd0854106871150c55e357e6c6f8c393bb654d550d3f39d6435d67112"} Nov 27 07:14:26 crc kubenswrapper[4971]: I1127 07:14:26.404522 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-db2qc-config-462hw"] Nov 27 07:14:26 crc kubenswrapper[4971]: I1127 07:14:26.412029 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-db2qc-config-462hw"] Nov 27 07:14:26 crc kubenswrapper[4971]: I1127 07:14:26.414256 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 07:14:26 crc kubenswrapper[4971]: I1127 07:14:26.414305 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 07:14:26 crc kubenswrapper[4971]: I1127 07:14:26.451675 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-db2qc-config-4mv2z"] Nov 27 07:14:26 crc kubenswrapper[4971]: E1127 07:14:26.452067 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b83e25d7-c01c-47ee-9395-13fa76e54bc9" containerName="ovn-config" Nov 27 07:14:26 crc kubenswrapper[4971]: I1127 07:14:26.452080 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b83e25d7-c01c-47ee-9395-13fa76e54bc9" containerName="ovn-config" Nov 27 07:14:26 crc kubenswrapper[4971]: I1127 07:14:26.452286 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="b83e25d7-c01c-47ee-9395-13fa76e54bc9" containerName="ovn-config" Nov 27 07:14:26 crc kubenswrapper[4971]: I1127 07:14:26.453683 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-db2qc-config-4mv2z" Nov 27 07:14:26 crc kubenswrapper[4971]: I1127 07:14:26.455372 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Nov 27 07:14:26 crc kubenswrapper[4971]: I1127 07:14:26.462509 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-db2qc-config-4mv2z"] Nov 27 07:14:26 crc kubenswrapper[4971]: I1127 07:14:26.564957 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b83e25d7-c01c-47ee-9395-13fa76e54bc9" path="/var/lib/kubelet/pods/b83e25d7-c01c-47ee-9395-13fa76e54bc9/volumes" Nov 27 07:14:26 crc kubenswrapper[4971]: I1127 07:14:26.614568 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svjfb\" (UniqueName: \"kubernetes.io/projected/9cd1cdff-852f-4635-851f-6c16a8204a54-kube-api-access-svjfb\") pod \"ovn-controller-db2qc-config-4mv2z\" (UID: \"9cd1cdff-852f-4635-851f-6c16a8204a54\") " pod="openstack/ovn-controller-db2qc-config-4mv2z" Nov 27 07:14:26 crc kubenswrapper[4971]: I1127 07:14:26.614637 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9cd1cdff-852f-4635-851f-6c16a8204a54-additional-scripts\") pod \"ovn-controller-db2qc-config-4mv2z\" (UID: \"9cd1cdff-852f-4635-851f-6c16a8204a54\") " pod="openstack/ovn-controller-db2qc-config-4mv2z" Nov 27 07:14:26 crc kubenswrapper[4971]: I1127 07:14:26.614780 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9cd1cdff-852f-4635-851f-6c16a8204a54-var-run\") pod \"ovn-controller-db2qc-config-4mv2z\" (UID: \"9cd1cdff-852f-4635-851f-6c16a8204a54\") " pod="openstack/ovn-controller-db2qc-config-4mv2z" Nov 27 07:14:26 crc kubenswrapper[4971]: I1127 07:14:26.615115 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9cd1cdff-852f-4635-851f-6c16a8204a54-var-run-ovn\") pod \"ovn-controller-db2qc-config-4mv2z\" (UID: \"9cd1cdff-852f-4635-851f-6c16a8204a54\") " pod="openstack/ovn-controller-db2qc-config-4mv2z" Nov 27 07:14:26 crc kubenswrapper[4971]: I1127 07:14:26.615180 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9cd1cdff-852f-4635-851f-6c16a8204a54-var-log-ovn\") pod \"ovn-controller-db2qc-config-4mv2z\" (UID: \"9cd1cdff-852f-4635-851f-6c16a8204a54\") " pod="openstack/ovn-controller-db2qc-config-4mv2z" Nov 27 07:14:26 crc kubenswrapper[4971]: I1127 07:14:26.615213 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9cd1cdff-852f-4635-851f-6c16a8204a54-scripts\") pod \"ovn-controller-db2qc-config-4mv2z\" (UID: \"9cd1cdff-852f-4635-851f-6c16a8204a54\") " pod="openstack/ovn-controller-db2qc-config-4mv2z" Nov 27 07:14:26 crc kubenswrapper[4971]: I1127 07:14:26.717211 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9cd1cdff-852f-4635-851f-6c16a8204a54-additional-scripts\") pod \"ovn-controller-db2qc-config-4mv2z\" (UID: \"9cd1cdff-852f-4635-851f-6c16a8204a54\") " pod="openstack/ovn-controller-db2qc-config-4mv2z" Nov 27 07:14:26 crc kubenswrapper[4971]: I1127 07:14:26.717285 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9cd1cdff-852f-4635-851f-6c16a8204a54-var-run\") pod \"ovn-controller-db2qc-config-4mv2z\" (UID: \"9cd1cdff-852f-4635-851f-6c16a8204a54\") " pod="openstack/ovn-controller-db2qc-config-4mv2z" Nov 27 07:14:26 crc kubenswrapper[4971]: I1127 07:14:26.717352 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9cd1cdff-852f-4635-851f-6c16a8204a54-var-run-ovn\") pod \"ovn-controller-db2qc-config-4mv2z\" (UID: \"9cd1cdff-852f-4635-851f-6c16a8204a54\") " pod="openstack/ovn-controller-db2qc-config-4mv2z" Nov 27 07:14:26 crc kubenswrapper[4971]: I1127 07:14:26.717381 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9cd1cdff-852f-4635-851f-6c16a8204a54-var-log-ovn\") pod \"ovn-controller-db2qc-config-4mv2z\" (UID: \"9cd1cdff-852f-4635-851f-6c16a8204a54\") " pod="openstack/ovn-controller-db2qc-config-4mv2z" Nov 27 07:14:26 crc kubenswrapper[4971]: I1127 07:14:26.717402 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9cd1cdff-852f-4635-851f-6c16a8204a54-scripts\") pod \"ovn-controller-db2qc-config-4mv2z\" (UID: \"9cd1cdff-852f-4635-851f-6c16a8204a54\") " pod="openstack/ovn-controller-db2qc-config-4mv2z" Nov 27 07:14:26 crc kubenswrapper[4971]: I1127 07:14:26.717444 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svjfb\" (UniqueName: \"kubernetes.io/projected/9cd1cdff-852f-4635-851f-6c16a8204a54-kube-api-access-svjfb\") pod \"ovn-controller-db2qc-config-4mv2z\" (UID: \"9cd1cdff-852f-4635-851f-6c16a8204a54\") " pod="openstack/ovn-controller-db2qc-config-4mv2z" Nov 27 07:14:26 crc kubenswrapper[4971]: I1127 07:14:26.717572 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9cd1cdff-852f-4635-851f-6c16a8204a54-var-run\") pod \"ovn-controller-db2qc-config-4mv2z\" (UID: \"9cd1cdff-852f-4635-851f-6c16a8204a54\") " pod="openstack/ovn-controller-db2qc-config-4mv2z" Nov 27 07:14:26 crc kubenswrapper[4971]: I1127 07:14:26.717649 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9cd1cdff-852f-4635-851f-6c16a8204a54-var-run-ovn\") pod \"ovn-controller-db2qc-config-4mv2z\" (UID: \"9cd1cdff-852f-4635-851f-6c16a8204a54\") " pod="openstack/ovn-controller-db2qc-config-4mv2z" Nov 27 07:14:26 crc kubenswrapper[4971]: I1127 07:14:26.717713 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9cd1cdff-852f-4635-851f-6c16a8204a54-var-log-ovn\") pod \"ovn-controller-db2qc-config-4mv2z\" (UID: \"9cd1cdff-852f-4635-851f-6c16a8204a54\") " pod="openstack/ovn-controller-db2qc-config-4mv2z" Nov 27 07:14:26 crc kubenswrapper[4971]: I1127 07:14:26.718501 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9cd1cdff-852f-4635-851f-6c16a8204a54-additional-scripts\") pod \"ovn-controller-db2qc-config-4mv2z\" (UID: \"9cd1cdff-852f-4635-851f-6c16a8204a54\") " pod="openstack/ovn-controller-db2qc-config-4mv2z" Nov 27 07:14:26 crc kubenswrapper[4971]: I1127 07:14:26.719920 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9cd1cdff-852f-4635-851f-6c16a8204a54-scripts\") pod \"ovn-controller-db2qc-config-4mv2z\" (UID: \"9cd1cdff-852f-4635-851f-6c16a8204a54\") " pod="openstack/ovn-controller-db2qc-config-4mv2z" Nov 27 07:14:26 crc kubenswrapper[4971]: I1127 07:14:26.733555 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svjfb\" (UniqueName: \"kubernetes.io/projected/9cd1cdff-852f-4635-851f-6c16a8204a54-kube-api-access-svjfb\") pod \"ovn-controller-db2qc-config-4mv2z\" (UID: \"9cd1cdff-852f-4635-851f-6c16a8204a54\") " pod="openstack/ovn-controller-db2qc-config-4mv2z" Nov 27 07:14:26 crc kubenswrapper[4971]: I1127 07:14:26.780498 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-db2qc-config-4mv2z" Nov 27 07:14:27 crc kubenswrapper[4971]: I1127 07:14:27.040922 4971 generic.go:334] "Generic (PLEG): container finished" podID="640c3829-d2e9-49e1-82e3-bd213aa992dd" containerID="ddb554636617ff38027bc6df7fed10cfc5e39e121a5847f244d29b6eed3b5a39" exitCode=0 Nov 27 07:14:27 crc kubenswrapper[4971]: I1127 07:14:27.040964 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"640c3829-d2e9-49e1-82e3-bd213aa992dd","Type":"ContainerDied","Data":"ddb554636617ff38027bc6df7fed10cfc5e39e121a5847f244d29b6eed3b5a39"} Nov 27 07:14:27 crc kubenswrapper[4971]: I1127 07:14:27.281523 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-db2qc" Nov 27 07:14:30 crc kubenswrapper[4971]: I1127 07:14:30.066473 4971 generic.go:334] "Generic (PLEG): container finished" podID="d6439a3c-ee26-467c-8e42-5abbbf390f16" containerID="47223c6c413b4fa18667b07a2c8e720f342e5906c3e3e926a37060f670324a52" exitCode=0 Nov 27 07:14:30 crc kubenswrapper[4971]: I1127 07:14:30.066575 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d6439a3c-ee26-467c-8e42-5abbbf390f16","Type":"ContainerDied","Data":"47223c6c413b4fa18667b07a2c8e720f342e5906c3e3e926a37060f670324a52"} Nov 27 07:14:33 crc kubenswrapper[4971]: I1127 07:14:33.786372 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-db2qc-config-4mv2z"] Nov 27 07:14:33 crc kubenswrapper[4971]: W1127 07:14:33.800997 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9cd1cdff_852f_4635_851f_6c16a8204a54.slice/crio-49cd890efa651c94eee2179ac6c3bae33f7bd451853eab11166a8bfcc9396617 WatchSource:0}: Error finding container 49cd890efa651c94eee2179ac6c3bae33f7bd451853eab11166a8bfcc9396617: Status 404 returned error can't find the container with id 49cd890efa651c94eee2179ac6c3bae33f7bd451853eab11166a8bfcc9396617 Nov 27 07:14:34 crc kubenswrapper[4971]: I1127 07:14:34.166040 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-db2qc-config-4mv2z" event={"ID":"9cd1cdff-852f-4635-851f-6c16a8204a54","Type":"ContainerStarted","Data":"e5ed27503d1f4f17194f847b1b73b48f6fe49bdc0adabb45103b24c97ddb67c7"} Nov 27 07:14:34 crc kubenswrapper[4971]: I1127 07:14:34.166390 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-db2qc-config-4mv2z" event={"ID":"9cd1cdff-852f-4635-851f-6c16a8204a54","Type":"ContainerStarted","Data":"49cd890efa651c94eee2179ac6c3bae33f7bd451853eab11166a8bfcc9396617"} Nov 27 07:14:34 crc kubenswrapper[4971]: I1127 07:14:34.172276 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"640c3829-d2e9-49e1-82e3-bd213aa992dd","Type":"ContainerStarted","Data":"f5b92bc920247db85a57df75b23c5a3d6d3ba39c4b4024014d369972c4b8162a"} Nov 27 07:14:34 crc kubenswrapper[4971]: I1127 07:14:34.172672 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:14:34 crc kubenswrapper[4971]: I1127 07:14:34.191943 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-db2qc-config-4mv2z" podStartSLOduration=8.1919204 podStartE2EDuration="8.1919204s" podCreationTimestamp="2025-11-27 07:14:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:14:34.184085708 +0000 UTC m=+1312.376129626" watchObservedRunningTime="2025-11-27 07:14:34.1919204 +0000 UTC m=+1312.383964338" Nov 27 07:14:34 crc kubenswrapper[4971]: I1127 07:14:34.208849 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=43.402055258 podStartE2EDuration="1m18.208829618s" podCreationTimestamp="2025-11-27 07:13:16 +0000 UTC" firstStartedPulling="2025-11-27 07:13:18.174900321 +0000 UTC m=+1236.366944239" lastFinishedPulling="2025-11-27 07:13:52.981674681 +0000 UTC m=+1271.173718599" observedRunningTime="2025-11-27 07:14:34.207943182 +0000 UTC m=+1312.399987100" watchObservedRunningTime="2025-11-27 07:14:34.208829618 +0000 UTC m=+1312.400873556" Nov 27 07:14:34 crc kubenswrapper[4971]: I1127 07:14:34.228838 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b9ccc9bd-d955-4853-986f-95597f2c70e6","Type":"ContainerStarted","Data":"d156d1511b10631f281c42115007fa1a0c13d1d792aafd4fd1d7868813992990"} Nov 27 07:14:34 crc kubenswrapper[4971]: I1127 07:14:34.228902 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b9ccc9bd-d955-4853-986f-95597f2c70e6","Type":"ContainerStarted","Data":"61eee222331bc4cf51d73a2109e6aaf653d19b6ab409e2c0ab5b7ddfc9466313"} Nov 27 07:14:34 crc kubenswrapper[4971]: I1127 07:14:34.228915 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b9ccc9bd-d955-4853-986f-95597f2c70e6","Type":"ContainerStarted","Data":"9d10d81588ceda578cec676b35a37e455caa807351436843b76133d0dfb2603b"} Nov 27 07:14:34 crc kubenswrapper[4971]: I1127 07:14:34.231847 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d6439a3c-ee26-467c-8e42-5abbbf390f16","Type":"ContainerStarted","Data":"77882cbee517c91fc3e1f3a6a066be0183a0d5dff26c18bb82c636aa29cd5b4c"} Nov 27 07:14:34 crc kubenswrapper[4971]: I1127 07:14:34.233456 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Nov 27 07:14:34 crc kubenswrapper[4971]: I1127 07:14:34.274896 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=-9223371957.579905 podStartE2EDuration="1m19.274871364s" podCreationTimestamp="2025-11-27 07:13:15 +0000 UTC" firstStartedPulling="2025-11-27 07:13:17.689733109 +0000 UTC m=+1235.881777027" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:14:34.271728665 +0000 UTC m=+1312.463772603" watchObservedRunningTime="2025-11-27 07:14:34.274871364 +0000 UTC m=+1312.466915302" Nov 27 07:14:35 crc kubenswrapper[4971]: I1127 07:14:35.263086 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b9ccc9bd-d955-4853-986f-95597f2c70e6","Type":"ContainerStarted","Data":"85a6791fcdc604b55a12b82702e281ab125eb3d1d4235331866d471d20e24d57"} Nov 27 07:14:35 crc kubenswrapper[4971]: I1127 07:14:35.265365 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b9ccc9bd-d955-4853-986f-95597f2c70e6","Type":"ContainerStarted","Data":"c21be005b4f8466d61ef2da25c8b9ce2faf1be2408be9a01a2cad03ede7d5772"} Nov 27 07:14:35 crc kubenswrapper[4971]: I1127 07:14:35.265503 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b9ccc9bd-d955-4853-986f-95597f2c70e6","Type":"ContainerStarted","Data":"4a867f795f4a9c59b6d95924698a7ae6fcb777907d413b4d91002386f13f43ce"} Nov 27 07:14:35 crc kubenswrapper[4971]: I1127 07:14:35.265701 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b9ccc9bd-d955-4853-986f-95597f2c70e6","Type":"ContainerStarted","Data":"91caa3649b14fd1b0674535f37443068dd6301ac20d525ca2334e554de92f5ce"} Nov 27 07:14:35 crc kubenswrapper[4971]: I1127 07:14:35.267044 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-z4gfl" event={"ID":"c5689fc8-154a-4710-a2d6-9bc41e927f77","Type":"ContainerStarted","Data":"c878cae761f2b1c9abbbb40694a91fe64be7c38a2b289d4333edaeb94b60ad38"} Nov 27 07:14:35 crc kubenswrapper[4971]: I1127 07:14:35.271630 4971 generic.go:334] "Generic (PLEG): container finished" podID="9cd1cdff-852f-4635-851f-6c16a8204a54" containerID="e5ed27503d1f4f17194f847b1b73b48f6fe49bdc0adabb45103b24c97ddb67c7" exitCode=0 Nov 27 07:14:35 crc kubenswrapper[4971]: I1127 07:14:35.271674 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-db2qc-config-4mv2z" event={"ID":"9cd1cdff-852f-4635-851f-6c16a8204a54","Type":"ContainerDied","Data":"e5ed27503d1f4f17194f847b1b73b48f6fe49bdc0adabb45103b24c97ddb67c7"} Nov 27 07:14:35 crc kubenswrapper[4971]: I1127 07:14:35.305903 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=19.722817133 podStartE2EDuration="33.305884932s" podCreationTimestamp="2025-11-27 07:14:02 +0000 UTC" firstStartedPulling="2025-11-27 07:14:19.960330322 +0000 UTC m=+1298.152374240" lastFinishedPulling="2025-11-27 07:14:33.543398121 +0000 UTC m=+1311.735442039" observedRunningTime="2025-11-27 07:14:35.303222487 +0000 UTC m=+1313.495266435" watchObservedRunningTime="2025-11-27 07:14:35.305884932 +0000 UTC m=+1313.497928870" Nov 27 07:14:35 crc kubenswrapper[4971]: I1127 07:14:35.344356 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-z4gfl" podStartSLOduration=3.496645395 podStartE2EDuration="15.344321248s" podCreationTimestamp="2025-11-27 07:14:20 +0000 UTC" firstStartedPulling="2025-11-27 07:14:21.741404319 +0000 UTC m=+1299.933448237" lastFinishedPulling="2025-11-27 07:14:33.589080172 +0000 UTC m=+1311.781124090" observedRunningTime="2025-11-27 07:14:35.3319882 +0000 UTC m=+1313.524032128" watchObservedRunningTime="2025-11-27 07:14:35.344321248 +0000 UTC m=+1313.536365186" Nov 27 07:14:35 crc kubenswrapper[4971]: I1127 07:14:35.602870 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-766546d8bf-fkncq"] Nov 27 07:14:35 crc kubenswrapper[4971]: I1127 07:14:35.604564 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-766546d8bf-fkncq" Nov 27 07:14:35 crc kubenswrapper[4971]: I1127 07:14:35.607665 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Nov 27 07:14:35 crc kubenswrapper[4971]: I1127 07:14:35.617979 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-766546d8bf-fkncq"] Nov 27 07:14:35 crc kubenswrapper[4971]: I1127 07:14:35.689845 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/989bb210-5253-492e-bd34-8a2edca6a62d-dns-swift-storage-0\") pod \"dnsmasq-dns-766546d8bf-fkncq\" (UID: \"989bb210-5253-492e-bd34-8a2edca6a62d\") " pod="openstack/dnsmasq-dns-766546d8bf-fkncq" Nov 27 07:14:35 crc kubenswrapper[4971]: I1127 07:14:35.690275 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/989bb210-5253-492e-bd34-8a2edca6a62d-dns-svc\") pod \"dnsmasq-dns-766546d8bf-fkncq\" (UID: \"989bb210-5253-492e-bd34-8a2edca6a62d\") " pod="openstack/dnsmasq-dns-766546d8bf-fkncq" Nov 27 07:14:35 crc kubenswrapper[4971]: I1127 07:14:35.690574 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rqpgl\" (UniqueName: \"kubernetes.io/projected/989bb210-5253-492e-bd34-8a2edca6a62d-kube-api-access-rqpgl\") pod \"dnsmasq-dns-766546d8bf-fkncq\" (UID: \"989bb210-5253-492e-bd34-8a2edca6a62d\") " pod="openstack/dnsmasq-dns-766546d8bf-fkncq" Nov 27 07:14:35 crc kubenswrapper[4971]: I1127 07:14:35.690742 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/989bb210-5253-492e-bd34-8a2edca6a62d-ovsdbserver-sb\") pod \"dnsmasq-dns-766546d8bf-fkncq\" (UID: \"989bb210-5253-492e-bd34-8a2edca6a62d\") " pod="openstack/dnsmasq-dns-766546d8bf-fkncq" Nov 27 07:14:35 crc kubenswrapper[4971]: I1127 07:14:35.691059 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/989bb210-5253-492e-bd34-8a2edca6a62d-config\") pod \"dnsmasq-dns-766546d8bf-fkncq\" (UID: \"989bb210-5253-492e-bd34-8a2edca6a62d\") " pod="openstack/dnsmasq-dns-766546d8bf-fkncq" Nov 27 07:14:35 crc kubenswrapper[4971]: I1127 07:14:35.691147 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/989bb210-5253-492e-bd34-8a2edca6a62d-ovsdbserver-nb\") pod \"dnsmasq-dns-766546d8bf-fkncq\" (UID: \"989bb210-5253-492e-bd34-8a2edca6a62d\") " pod="openstack/dnsmasq-dns-766546d8bf-fkncq" Nov 27 07:14:35 crc kubenswrapper[4971]: I1127 07:14:35.792523 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rqpgl\" (UniqueName: \"kubernetes.io/projected/989bb210-5253-492e-bd34-8a2edca6a62d-kube-api-access-rqpgl\") pod \"dnsmasq-dns-766546d8bf-fkncq\" (UID: \"989bb210-5253-492e-bd34-8a2edca6a62d\") " pod="openstack/dnsmasq-dns-766546d8bf-fkncq" Nov 27 07:14:35 crc kubenswrapper[4971]: I1127 07:14:35.792821 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/989bb210-5253-492e-bd34-8a2edca6a62d-ovsdbserver-sb\") pod \"dnsmasq-dns-766546d8bf-fkncq\" (UID: \"989bb210-5253-492e-bd34-8a2edca6a62d\") " pod="openstack/dnsmasq-dns-766546d8bf-fkncq" Nov 27 07:14:35 crc kubenswrapper[4971]: I1127 07:14:35.792949 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/989bb210-5253-492e-bd34-8a2edca6a62d-config\") pod \"dnsmasq-dns-766546d8bf-fkncq\" (UID: \"989bb210-5253-492e-bd34-8a2edca6a62d\") " pod="openstack/dnsmasq-dns-766546d8bf-fkncq" Nov 27 07:14:35 crc kubenswrapper[4971]: I1127 07:14:35.793066 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/989bb210-5253-492e-bd34-8a2edca6a62d-ovsdbserver-nb\") pod \"dnsmasq-dns-766546d8bf-fkncq\" (UID: \"989bb210-5253-492e-bd34-8a2edca6a62d\") " pod="openstack/dnsmasq-dns-766546d8bf-fkncq" Nov 27 07:14:35 crc kubenswrapper[4971]: I1127 07:14:35.793185 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/989bb210-5253-492e-bd34-8a2edca6a62d-dns-swift-storage-0\") pod \"dnsmasq-dns-766546d8bf-fkncq\" (UID: \"989bb210-5253-492e-bd34-8a2edca6a62d\") " pod="openstack/dnsmasq-dns-766546d8bf-fkncq" Nov 27 07:14:35 crc kubenswrapper[4971]: I1127 07:14:35.793309 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/989bb210-5253-492e-bd34-8a2edca6a62d-dns-svc\") pod \"dnsmasq-dns-766546d8bf-fkncq\" (UID: \"989bb210-5253-492e-bd34-8a2edca6a62d\") " pod="openstack/dnsmasq-dns-766546d8bf-fkncq" Nov 27 07:14:35 crc kubenswrapper[4971]: I1127 07:14:35.794116 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/989bb210-5253-492e-bd34-8a2edca6a62d-dns-svc\") pod \"dnsmasq-dns-766546d8bf-fkncq\" (UID: \"989bb210-5253-492e-bd34-8a2edca6a62d\") " pod="openstack/dnsmasq-dns-766546d8bf-fkncq" Nov 27 07:14:35 crc kubenswrapper[4971]: I1127 07:14:35.795009 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/989bb210-5253-492e-bd34-8a2edca6a62d-ovsdbserver-sb\") pod \"dnsmasq-dns-766546d8bf-fkncq\" (UID: \"989bb210-5253-492e-bd34-8a2edca6a62d\") " pod="openstack/dnsmasq-dns-766546d8bf-fkncq" Nov 27 07:14:35 crc kubenswrapper[4971]: I1127 07:14:35.795640 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/989bb210-5253-492e-bd34-8a2edca6a62d-config\") pod \"dnsmasq-dns-766546d8bf-fkncq\" (UID: \"989bb210-5253-492e-bd34-8a2edca6a62d\") " pod="openstack/dnsmasq-dns-766546d8bf-fkncq" Nov 27 07:14:35 crc kubenswrapper[4971]: I1127 07:14:35.796646 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/989bb210-5253-492e-bd34-8a2edca6a62d-dns-swift-storage-0\") pod \"dnsmasq-dns-766546d8bf-fkncq\" (UID: \"989bb210-5253-492e-bd34-8a2edca6a62d\") " pod="openstack/dnsmasq-dns-766546d8bf-fkncq" Nov 27 07:14:35 crc kubenswrapper[4971]: I1127 07:14:35.796764 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/989bb210-5253-492e-bd34-8a2edca6a62d-ovsdbserver-nb\") pod \"dnsmasq-dns-766546d8bf-fkncq\" (UID: \"989bb210-5253-492e-bd34-8a2edca6a62d\") " pod="openstack/dnsmasq-dns-766546d8bf-fkncq" Nov 27 07:14:35 crc kubenswrapper[4971]: I1127 07:14:35.820371 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rqpgl\" (UniqueName: \"kubernetes.io/projected/989bb210-5253-492e-bd34-8a2edca6a62d-kube-api-access-rqpgl\") pod \"dnsmasq-dns-766546d8bf-fkncq\" (UID: \"989bb210-5253-492e-bd34-8a2edca6a62d\") " pod="openstack/dnsmasq-dns-766546d8bf-fkncq" Nov 27 07:14:35 crc kubenswrapper[4971]: I1127 07:14:35.922307 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-766546d8bf-fkncq" Nov 27 07:14:36 crc kubenswrapper[4971]: I1127 07:14:36.392689 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-766546d8bf-fkncq"] Nov 27 07:14:36 crc kubenswrapper[4971]: I1127 07:14:36.528614 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-db2qc-config-4mv2z" Nov 27 07:14:36 crc kubenswrapper[4971]: I1127 07:14:36.610436 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9cd1cdff-852f-4635-851f-6c16a8204a54-var-run-ovn\") pod \"9cd1cdff-852f-4635-851f-6c16a8204a54\" (UID: \"9cd1cdff-852f-4635-851f-6c16a8204a54\") " Nov 27 07:14:36 crc kubenswrapper[4971]: I1127 07:14:36.610516 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9cd1cdff-852f-4635-851f-6c16a8204a54-scripts\") pod \"9cd1cdff-852f-4635-851f-6c16a8204a54\" (UID: \"9cd1cdff-852f-4635-851f-6c16a8204a54\") " Nov 27 07:14:36 crc kubenswrapper[4971]: I1127 07:14:36.610611 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9cd1cdff-852f-4635-851f-6c16a8204a54-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "9cd1cdff-852f-4635-851f-6c16a8204a54" (UID: "9cd1cdff-852f-4635-851f-6c16a8204a54"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 07:14:36 crc kubenswrapper[4971]: I1127 07:14:36.610631 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9cd1cdff-852f-4635-851f-6c16a8204a54-var-run\") pod \"9cd1cdff-852f-4635-851f-6c16a8204a54\" (UID: \"9cd1cdff-852f-4635-851f-6c16a8204a54\") " Nov 27 07:14:36 crc kubenswrapper[4971]: I1127 07:14:36.610664 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9cd1cdff-852f-4635-851f-6c16a8204a54-var-run" (OuterVolumeSpecName: "var-run") pod "9cd1cdff-852f-4635-851f-6c16a8204a54" (UID: "9cd1cdff-852f-4635-851f-6c16a8204a54"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 07:14:36 crc kubenswrapper[4971]: I1127 07:14:36.610745 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9cd1cdff-852f-4635-851f-6c16a8204a54-additional-scripts\") pod \"9cd1cdff-852f-4635-851f-6c16a8204a54\" (UID: \"9cd1cdff-852f-4635-851f-6c16a8204a54\") " Nov 27 07:14:36 crc kubenswrapper[4971]: I1127 07:14:36.610791 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9cd1cdff-852f-4635-851f-6c16a8204a54-var-log-ovn\") pod \"9cd1cdff-852f-4635-851f-6c16a8204a54\" (UID: \"9cd1cdff-852f-4635-851f-6c16a8204a54\") " Nov 27 07:14:36 crc kubenswrapper[4971]: I1127 07:14:36.610822 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-svjfb\" (UniqueName: \"kubernetes.io/projected/9cd1cdff-852f-4635-851f-6c16a8204a54-kube-api-access-svjfb\") pod \"9cd1cdff-852f-4635-851f-6c16a8204a54\" (UID: \"9cd1cdff-852f-4635-851f-6c16a8204a54\") " Nov 27 07:14:36 crc kubenswrapper[4971]: I1127 07:14:36.611698 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9cd1cdff-852f-4635-851f-6c16a8204a54-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "9cd1cdff-852f-4635-851f-6c16a8204a54" (UID: "9cd1cdff-852f-4635-851f-6c16a8204a54"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:14:36 crc kubenswrapper[4971]: I1127 07:14:36.611789 4971 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9cd1cdff-852f-4635-851f-6c16a8204a54-var-run\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:36 crc kubenswrapper[4971]: I1127 07:14:36.611806 4971 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9cd1cdff-852f-4635-851f-6c16a8204a54-var-run-ovn\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:36 crc kubenswrapper[4971]: I1127 07:14:36.611839 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9cd1cdff-852f-4635-851f-6c16a8204a54-scripts" (OuterVolumeSpecName: "scripts") pod "9cd1cdff-852f-4635-851f-6c16a8204a54" (UID: "9cd1cdff-852f-4635-851f-6c16a8204a54"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:14:36 crc kubenswrapper[4971]: I1127 07:14:36.612374 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9cd1cdff-852f-4635-851f-6c16a8204a54-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "9cd1cdff-852f-4635-851f-6c16a8204a54" (UID: "9cd1cdff-852f-4635-851f-6c16a8204a54"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 07:14:36 crc kubenswrapper[4971]: I1127 07:14:36.616366 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9cd1cdff-852f-4635-851f-6c16a8204a54-kube-api-access-svjfb" (OuterVolumeSpecName: "kube-api-access-svjfb") pod "9cd1cdff-852f-4635-851f-6c16a8204a54" (UID: "9cd1cdff-852f-4635-851f-6c16a8204a54"). InnerVolumeSpecName "kube-api-access-svjfb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:14:36 crc kubenswrapper[4971]: I1127 07:14:36.713486 4971 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9cd1cdff-852f-4635-851f-6c16a8204a54-additional-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:36 crc kubenswrapper[4971]: I1127 07:14:36.713517 4971 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9cd1cdff-852f-4635-851f-6c16a8204a54-var-log-ovn\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:36 crc kubenswrapper[4971]: I1127 07:14:36.713525 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-svjfb\" (UniqueName: \"kubernetes.io/projected/9cd1cdff-852f-4635-851f-6c16a8204a54-kube-api-access-svjfb\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:36 crc kubenswrapper[4971]: I1127 07:14:36.713548 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9cd1cdff-852f-4635-851f-6c16a8204a54-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:36 crc kubenswrapper[4971]: I1127 07:14:36.900505 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-db2qc-config-4mv2z"] Nov 27 07:14:36 crc kubenswrapper[4971]: I1127 07:14:36.909081 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-db2qc-config-4mv2z"] Nov 27 07:14:37 crc kubenswrapper[4971]: I1127 07:14:37.022840 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-db2qc-config-r9h9q"] Nov 27 07:14:37 crc kubenswrapper[4971]: E1127 07:14:37.023283 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cd1cdff-852f-4635-851f-6c16a8204a54" containerName="ovn-config" Nov 27 07:14:37 crc kubenswrapper[4971]: I1127 07:14:37.023303 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cd1cdff-852f-4635-851f-6c16a8204a54" containerName="ovn-config" Nov 27 07:14:37 crc kubenswrapper[4971]: I1127 07:14:37.023486 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cd1cdff-852f-4635-851f-6c16a8204a54" containerName="ovn-config" Nov 27 07:14:37 crc kubenswrapper[4971]: I1127 07:14:37.024078 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-db2qc-config-r9h9q" Nov 27 07:14:37 crc kubenswrapper[4971]: I1127 07:14:37.035459 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-db2qc-config-r9h9q"] Nov 27 07:14:37 crc kubenswrapper[4971]: I1127 07:14:37.120956 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x85nc\" (UniqueName: \"kubernetes.io/projected/d9a6c250-755b-4f6a-8a66-e7af5aab678b-kube-api-access-x85nc\") pod \"ovn-controller-db2qc-config-r9h9q\" (UID: \"d9a6c250-755b-4f6a-8a66-e7af5aab678b\") " pod="openstack/ovn-controller-db2qc-config-r9h9q" Nov 27 07:14:37 crc kubenswrapper[4971]: I1127 07:14:37.121091 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d9a6c250-755b-4f6a-8a66-e7af5aab678b-var-run-ovn\") pod \"ovn-controller-db2qc-config-r9h9q\" (UID: \"d9a6c250-755b-4f6a-8a66-e7af5aab678b\") " pod="openstack/ovn-controller-db2qc-config-r9h9q" Nov 27 07:14:37 crc kubenswrapper[4971]: I1127 07:14:37.121171 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d9a6c250-755b-4f6a-8a66-e7af5aab678b-additional-scripts\") pod \"ovn-controller-db2qc-config-r9h9q\" (UID: \"d9a6c250-755b-4f6a-8a66-e7af5aab678b\") " pod="openstack/ovn-controller-db2qc-config-r9h9q" Nov 27 07:14:37 crc kubenswrapper[4971]: I1127 07:14:37.121260 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d9a6c250-755b-4f6a-8a66-e7af5aab678b-var-run\") pod \"ovn-controller-db2qc-config-r9h9q\" (UID: \"d9a6c250-755b-4f6a-8a66-e7af5aab678b\") " pod="openstack/ovn-controller-db2qc-config-r9h9q" Nov 27 07:14:37 crc kubenswrapper[4971]: I1127 07:14:37.121450 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d9a6c250-755b-4f6a-8a66-e7af5aab678b-var-log-ovn\") pod \"ovn-controller-db2qc-config-r9h9q\" (UID: \"d9a6c250-755b-4f6a-8a66-e7af5aab678b\") " pod="openstack/ovn-controller-db2qc-config-r9h9q" Nov 27 07:14:37 crc kubenswrapper[4971]: I1127 07:14:37.121568 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d9a6c250-755b-4f6a-8a66-e7af5aab678b-scripts\") pod \"ovn-controller-db2qc-config-r9h9q\" (UID: \"d9a6c250-755b-4f6a-8a66-e7af5aab678b\") " pod="openstack/ovn-controller-db2qc-config-r9h9q" Nov 27 07:14:37 crc kubenswrapper[4971]: I1127 07:14:37.223867 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d9a6c250-755b-4f6a-8a66-e7af5aab678b-var-run\") pod \"ovn-controller-db2qc-config-r9h9q\" (UID: \"d9a6c250-755b-4f6a-8a66-e7af5aab678b\") " pod="openstack/ovn-controller-db2qc-config-r9h9q" Nov 27 07:14:37 crc kubenswrapper[4971]: I1127 07:14:37.223975 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d9a6c250-755b-4f6a-8a66-e7af5aab678b-var-log-ovn\") pod \"ovn-controller-db2qc-config-r9h9q\" (UID: \"d9a6c250-755b-4f6a-8a66-e7af5aab678b\") " pod="openstack/ovn-controller-db2qc-config-r9h9q" Nov 27 07:14:37 crc kubenswrapper[4971]: I1127 07:14:37.224017 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d9a6c250-755b-4f6a-8a66-e7af5aab678b-scripts\") pod \"ovn-controller-db2qc-config-r9h9q\" (UID: \"d9a6c250-755b-4f6a-8a66-e7af5aab678b\") " pod="openstack/ovn-controller-db2qc-config-r9h9q" Nov 27 07:14:37 crc kubenswrapper[4971]: I1127 07:14:37.224071 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x85nc\" (UniqueName: \"kubernetes.io/projected/d9a6c250-755b-4f6a-8a66-e7af5aab678b-kube-api-access-x85nc\") pod \"ovn-controller-db2qc-config-r9h9q\" (UID: \"d9a6c250-755b-4f6a-8a66-e7af5aab678b\") " pod="openstack/ovn-controller-db2qc-config-r9h9q" Nov 27 07:14:37 crc kubenswrapper[4971]: I1127 07:14:37.224121 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d9a6c250-755b-4f6a-8a66-e7af5aab678b-var-run-ovn\") pod \"ovn-controller-db2qc-config-r9h9q\" (UID: \"d9a6c250-755b-4f6a-8a66-e7af5aab678b\") " pod="openstack/ovn-controller-db2qc-config-r9h9q" Nov 27 07:14:37 crc kubenswrapper[4971]: I1127 07:14:37.224150 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d9a6c250-755b-4f6a-8a66-e7af5aab678b-additional-scripts\") pod \"ovn-controller-db2qc-config-r9h9q\" (UID: \"d9a6c250-755b-4f6a-8a66-e7af5aab678b\") " pod="openstack/ovn-controller-db2qc-config-r9h9q" Nov 27 07:14:37 crc kubenswrapper[4971]: I1127 07:14:37.224223 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d9a6c250-755b-4f6a-8a66-e7af5aab678b-var-run\") pod \"ovn-controller-db2qc-config-r9h9q\" (UID: \"d9a6c250-755b-4f6a-8a66-e7af5aab678b\") " pod="openstack/ovn-controller-db2qc-config-r9h9q" Nov 27 07:14:37 crc kubenswrapper[4971]: I1127 07:14:37.224827 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d9a6c250-755b-4f6a-8a66-e7af5aab678b-var-log-ovn\") pod \"ovn-controller-db2qc-config-r9h9q\" (UID: \"d9a6c250-755b-4f6a-8a66-e7af5aab678b\") " pod="openstack/ovn-controller-db2qc-config-r9h9q" Nov 27 07:14:37 crc kubenswrapper[4971]: I1127 07:14:37.224879 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d9a6c250-755b-4f6a-8a66-e7af5aab678b-var-run-ovn\") pod \"ovn-controller-db2qc-config-r9h9q\" (UID: \"d9a6c250-755b-4f6a-8a66-e7af5aab678b\") " pod="openstack/ovn-controller-db2qc-config-r9h9q" Nov 27 07:14:37 crc kubenswrapper[4971]: I1127 07:14:37.224918 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d9a6c250-755b-4f6a-8a66-e7af5aab678b-additional-scripts\") pod \"ovn-controller-db2qc-config-r9h9q\" (UID: \"d9a6c250-755b-4f6a-8a66-e7af5aab678b\") " pod="openstack/ovn-controller-db2qc-config-r9h9q" Nov 27 07:14:37 crc kubenswrapper[4971]: I1127 07:14:37.226248 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d9a6c250-755b-4f6a-8a66-e7af5aab678b-scripts\") pod \"ovn-controller-db2qc-config-r9h9q\" (UID: \"d9a6c250-755b-4f6a-8a66-e7af5aab678b\") " pod="openstack/ovn-controller-db2qc-config-r9h9q" Nov 27 07:14:37 crc kubenswrapper[4971]: I1127 07:14:37.244556 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x85nc\" (UniqueName: \"kubernetes.io/projected/d9a6c250-755b-4f6a-8a66-e7af5aab678b-kube-api-access-x85nc\") pod \"ovn-controller-db2qc-config-r9h9q\" (UID: \"d9a6c250-755b-4f6a-8a66-e7af5aab678b\") " pod="openstack/ovn-controller-db2qc-config-r9h9q" Nov 27 07:14:37 crc kubenswrapper[4971]: I1127 07:14:37.308824 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="49cd890efa651c94eee2179ac6c3bae33f7bd451853eab11166a8bfcc9396617" Nov 27 07:14:37 crc kubenswrapper[4971]: I1127 07:14:37.308936 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-db2qc-config-4mv2z" Nov 27 07:14:37 crc kubenswrapper[4971]: I1127 07:14:37.310804 4971 generic.go:334] "Generic (PLEG): container finished" podID="989bb210-5253-492e-bd34-8a2edca6a62d" containerID="db1427ff2182b41785d769d9289e5a7bd0820aa4252aa9168d8b7ccc0cc9af49" exitCode=0 Nov 27 07:14:37 crc kubenswrapper[4971]: I1127 07:14:37.310866 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-766546d8bf-fkncq" event={"ID":"989bb210-5253-492e-bd34-8a2edca6a62d","Type":"ContainerDied","Data":"db1427ff2182b41785d769d9289e5a7bd0820aa4252aa9168d8b7ccc0cc9af49"} Nov 27 07:14:37 crc kubenswrapper[4971]: I1127 07:14:37.310913 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-766546d8bf-fkncq" event={"ID":"989bb210-5253-492e-bd34-8a2edca6a62d","Type":"ContainerStarted","Data":"f6b6b50187447aa82503546d504cea3ddb6e64b027f721e60632a0450522172f"} Nov 27 07:14:37 crc kubenswrapper[4971]: I1127 07:14:37.348516 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-db2qc-config-r9h9q" Nov 27 07:14:37 crc kubenswrapper[4971]: I1127 07:14:37.902595 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-db2qc-config-r9h9q"] Nov 27 07:14:38 crc kubenswrapper[4971]: I1127 07:14:38.321825 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-766546d8bf-fkncq" event={"ID":"989bb210-5253-492e-bd34-8a2edca6a62d","Type":"ContainerStarted","Data":"7d98f4d78c01ef4858eb32cf5dcd31847b47938f1d243beee43d7ddbe6fdf54f"} Nov 27 07:14:38 crc kubenswrapper[4971]: I1127 07:14:38.322171 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-766546d8bf-fkncq" Nov 27 07:14:38 crc kubenswrapper[4971]: I1127 07:14:38.323167 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-db2qc-config-r9h9q" event={"ID":"d9a6c250-755b-4f6a-8a66-e7af5aab678b","Type":"ContainerStarted","Data":"19d72d074c4d1f42fcf35b5590a9d84eff4ace3cd7f8dfc492efd1e086be618e"} Nov 27 07:14:38 crc kubenswrapper[4971]: I1127 07:14:38.323217 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-db2qc-config-r9h9q" event={"ID":"d9a6c250-755b-4f6a-8a66-e7af5aab678b","Type":"ContainerStarted","Data":"0f3b10024838b11e348b61388d9206a92ce5ba45cab76ec8ba02f7098e7b02b7"} Nov 27 07:14:38 crc kubenswrapper[4971]: I1127 07:14:38.353651 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-766546d8bf-fkncq" podStartSLOduration=3.353628448 podStartE2EDuration="3.353628448s" podCreationTimestamp="2025-11-27 07:14:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:14:38.348713269 +0000 UTC m=+1316.540757197" watchObservedRunningTime="2025-11-27 07:14:38.353628448 +0000 UTC m=+1316.545672366" Nov 27 07:14:38 crc kubenswrapper[4971]: I1127 07:14:38.369093 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-db2qc-config-r9h9q" podStartSLOduration=1.369070085 podStartE2EDuration="1.369070085s" podCreationTimestamp="2025-11-27 07:14:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:14:38.368940361 +0000 UTC m=+1316.560984279" watchObservedRunningTime="2025-11-27 07:14:38.369070085 +0000 UTC m=+1316.561114003" Nov 27 07:14:38 crc kubenswrapper[4971]: I1127 07:14:38.562200 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9cd1cdff-852f-4635-851f-6c16a8204a54" path="/var/lib/kubelet/pods/9cd1cdff-852f-4635-851f-6c16a8204a54/volumes" Nov 27 07:14:39 crc kubenswrapper[4971]: I1127 07:14:39.332612 4971 generic.go:334] "Generic (PLEG): container finished" podID="d9a6c250-755b-4f6a-8a66-e7af5aab678b" containerID="19d72d074c4d1f42fcf35b5590a9d84eff4ace3cd7f8dfc492efd1e086be618e" exitCode=0 Nov 27 07:14:39 crc kubenswrapper[4971]: I1127 07:14:39.332688 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-db2qc-config-r9h9q" event={"ID":"d9a6c250-755b-4f6a-8a66-e7af5aab678b","Type":"ContainerDied","Data":"19d72d074c4d1f42fcf35b5590a9d84eff4ace3cd7f8dfc492efd1e086be618e"} Nov 27 07:14:40 crc kubenswrapper[4971]: I1127 07:14:40.746909 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-db2qc-config-r9h9q" Nov 27 07:14:40 crc kubenswrapper[4971]: I1127 07:14:40.788767 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d9a6c250-755b-4f6a-8a66-e7af5aab678b-var-run-ovn\") pod \"d9a6c250-755b-4f6a-8a66-e7af5aab678b\" (UID: \"d9a6c250-755b-4f6a-8a66-e7af5aab678b\") " Nov 27 07:14:40 crc kubenswrapper[4971]: I1127 07:14:40.788905 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d9a6c250-755b-4f6a-8a66-e7af5aab678b-additional-scripts\") pod \"d9a6c250-755b-4f6a-8a66-e7af5aab678b\" (UID: \"d9a6c250-755b-4f6a-8a66-e7af5aab678b\") " Nov 27 07:14:40 crc kubenswrapper[4971]: I1127 07:14:40.788931 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d9a6c250-755b-4f6a-8a66-e7af5aab678b-scripts\") pod \"d9a6c250-755b-4f6a-8a66-e7af5aab678b\" (UID: \"d9a6c250-755b-4f6a-8a66-e7af5aab678b\") " Nov 27 07:14:40 crc kubenswrapper[4971]: I1127 07:14:40.788956 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d9a6c250-755b-4f6a-8a66-e7af5aab678b-var-log-ovn\") pod \"d9a6c250-755b-4f6a-8a66-e7af5aab678b\" (UID: \"d9a6c250-755b-4f6a-8a66-e7af5aab678b\") " Nov 27 07:14:40 crc kubenswrapper[4971]: I1127 07:14:40.788982 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d9a6c250-755b-4f6a-8a66-e7af5aab678b-var-run\") pod \"d9a6c250-755b-4f6a-8a66-e7af5aab678b\" (UID: \"d9a6c250-755b-4f6a-8a66-e7af5aab678b\") " Nov 27 07:14:40 crc kubenswrapper[4971]: I1127 07:14:40.789102 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x85nc\" (UniqueName: \"kubernetes.io/projected/d9a6c250-755b-4f6a-8a66-e7af5aab678b-kube-api-access-x85nc\") pod \"d9a6c250-755b-4f6a-8a66-e7af5aab678b\" (UID: \"d9a6c250-755b-4f6a-8a66-e7af5aab678b\") " Nov 27 07:14:40 crc kubenswrapper[4971]: I1127 07:14:40.789792 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9a6c250-755b-4f6a-8a66-e7af5aab678b-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "d9a6c250-755b-4f6a-8a66-e7af5aab678b" (UID: "d9a6c250-755b-4f6a-8a66-e7af5aab678b"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:14:40 crc kubenswrapper[4971]: I1127 07:14:40.789856 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d9a6c250-755b-4f6a-8a66-e7af5aab678b-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "d9a6c250-755b-4f6a-8a66-e7af5aab678b" (UID: "d9a6c250-755b-4f6a-8a66-e7af5aab678b"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 07:14:40 crc kubenswrapper[4971]: I1127 07:14:40.789879 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d9a6c250-755b-4f6a-8a66-e7af5aab678b-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "d9a6c250-755b-4f6a-8a66-e7af5aab678b" (UID: "d9a6c250-755b-4f6a-8a66-e7af5aab678b"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 07:14:40 crc kubenswrapper[4971]: I1127 07:14:40.790778 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9a6c250-755b-4f6a-8a66-e7af5aab678b-scripts" (OuterVolumeSpecName: "scripts") pod "d9a6c250-755b-4f6a-8a66-e7af5aab678b" (UID: "d9a6c250-755b-4f6a-8a66-e7af5aab678b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:14:40 crc kubenswrapper[4971]: I1127 07:14:40.790837 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d9a6c250-755b-4f6a-8a66-e7af5aab678b-var-run" (OuterVolumeSpecName: "var-run") pod "d9a6c250-755b-4f6a-8a66-e7af5aab678b" (UID: "d9a6c250-755b-4f6a-8a66-e7af5aab678b"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 07:14:40 crc kubenswrapper[4971]: I1127 07:14:40.796781 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9a6c250-755b-4f6a-8a66-e7af5aab678b-kube-api-access-x85nc" (OuterVolumeSpecName: "kube-api-access-x85nc") pod "d9a6c250-755b-4f6a-8a66-e7af5aab678b" (UID: "d9a6c250-755b-4f6a-8a66-e7af5aab678b"). InnerVolumeSpecName "kube-api-access-x85nc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:14:40 crc kubenswrapper[4971]: I1127 07:14:40.892444 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x85nc\" (UniqueName: \"kubernetes.io/projected/d9a6c250-755b-4f6a-8a66-e7af5aab678b-kube-api-access-x85nc\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:40 crc kubenswrapper[4971]: I1127 07:14:40.892486 4971 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d9a6c250-755b-4f6a-8a66-e7af5aab678b-var-run-ovn\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:40 crc kubenswrapper[4971]: I1127 07:14:40.892509 4971 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d9a6c250-755b-4f6a-8a66-e7af5aab678b-additional-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:40 crc kubenswrapper[4971]: I1127 07:14:40.892522 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d9a6c250-755b-4f6a-8a66-e7af5aab678b-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:40 crc kubenswrapper[4971]: I1127 07:14:40.892550 4971 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d9a6c250-755b-4f6a-8a66-e7af5aab678b-var-log-ovn\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:40 crc kubenswrapper[4971]: I1127 07:14:40.892563 4971 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d9a6c250-755b-4f6a-8a66-e7af5aab678b-var-run\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:40 crc kubenswrapper[4971]: I1127 07:14:40.976069 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-db2qc-config-r9h9q"] Nov 27 07:14:40 crc kubenswrapper[4971]: I1127 07:14:40.982172 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-db2qc-config-r9h9q"] Nov 27 07:14:41 crc kubenswrapper[4971]: I1127 07:14:41.348732 4971 generic.go:334] "Generic (PLEG): container finished" podID="c5689fc8-154a-4710-a2d6-9bc41e927f77" containerID="c878cae761f2b1c9abbbb40694a91fe64be7c38a2b289d4333edaeb94b60ad38" exitCode=0 Nov 27 07:14:41 crc kubenswrapper[4971]: I1127 07:14:41.348799 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-z4gfl" event={"ID":"c5689fc8-154a-4710-a2d6-9bc41e927f77","Type":"ContainerDied","Data":"c878cae761f2b1c9abbbb40694a91fe64be7c38a2b289d4333edaeb94b60ad38"} Nov 27 07:14:41 crc kubenswrapper[4971]: I1127 07:14:41.350792 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0f3b10024838b11e348b61388d9206a92ce5ba45cab76ec8ba02f7098e7b02b7" Nov 27 07:14:41 crc kubenswrapper[4971]: I1127 07:14:41.350894 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-db2qc-config-r9h9q" Nov 27 07:14:42 crc kubenswrapper[4971]: I1127 07:14:42.562250 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9a6c250-755b-4f6a-8a66-e7af5aab678b" path="/var/lib/kubelet/pods/d9a6c250-755b-4f6a-8a66-e7af5aab678b/volumes" Nov 27 07:14:42 crc kubenswrapper[4971]: I1127 07:14:42.874942 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-z4gfl" Nov 27 07:14:42 crc kubenswrapper[4971]: I1127 07:14:42.927054 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c5689fc8-154a-4710-a2d6-9bc41e927f77-db-sync-config-data\") pod \"c5689fc8-154a-4710-a2d6-9bc41e927f77\" (UID: \"c5689fc8-154a-4710-a2d6-9bc41e927f77\") " Nov 27 07:14:42 crc kubenswrapper[4971]: I1127 07:14:42.927139 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5689fc8-154a-4710-a2d6-9bc41e927f77-config-data\") pod \"c5689fc8-154a-4710-a2d6-9bc41e927f77\" (UID: \"c5689fc8-154a-4710-a2d6-9bc41e927f77\") " Nov 27 07:14:42 crc kubenswrapper[4971]: I1127 07:14:42.927241 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8cg4h\" (UniqueName: \"kubernetes.io/projected/c5689fc8-154a-4710-a2d6-9bc41e927f77-kube-api-access-8cg4h\") pod \"c5689fc8-154a-4710-a2d6-9bc41e927f77\" (UID: \"c5689fc8-154a-4710-a2d6-9bc41e927f77\") " Nov 27 07:14:42 crc kubenswrapper[4971]: I1127 07:14:42.927362 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5689fc8-154a-4710-a2d6-9bc41e927f77-combined-ca-bundle\") pod \"c5689fc8-154a-4710-a2d6-9bc41e927f77\" (UID: \"c5689fc8-154a-4710-a2d6-9bc41e927f77\") " Nov 27 07:14:42 crc kubenswrapper[4971]: I1127 07:14:42.935379 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5689fc8-154a-4710-a2d6-9bc41e927f77-kube-api-access-8cg4h" (OuterVolumeSpecName: "kube-api-access-8cg4h") pod "c5689fc8-154a-4710-a2d6-9bc41e927f77" (UID: "c5689fc8-154a-4710-a2d6-9bc41e927f77"). InnerVolumeSpecName "kube-api-access-8cg4h". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:14:42 crc kubenswrapper[4971]: I1127 07:14:42.935615 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5689fc8-154a-4710-a2d6-9bc41e927f77-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "c5689fc8-154a-4710-a2d6-9bc41e927f77" (UID: "c5689fc8-154a-4710-a2d6-9bc41e927f77"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:14:42 crc kubenswrapper[4971]: I1127 07:14:42.960851 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5689fc8-154a-4710-a2d6-9bc41e927f77-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c5689fc8-154a-4710-a2d6-9bc41e927f77" (UID: "c5689fc8-154a-4710-a2d6-9bc41e927f77"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:14:42 crc kubenswrapper[4971]: I1127 07:14:42.970161 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5689fc8-154a-4710-a2d6-9bc41e927f77-config-data" (OuterVolumeSpecName: "config-data") pod "c5689fc8-154a-4710-a2d6-9bc41e927f77" (UID: "c5689fc8-154a-4710-a2d6-9bc41e927f77"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:14:43 crc kubenswrapper[4971]: I1127 07:14:43.030135 4971 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c5689fc8-154a-4710-a2d6-9bc41e927f77-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:43 crc kubenswrapper[4971]: I1127 07:14:43.030180 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5689fc8-154a-4710-a2d6-9bc41e927f77-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:43 crc kubenswrapper[4971]: I1127 07:14:43.030194 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8cg4h\" (UniqueName: \"kubernetes.io/projected/c5689fc8-154a-4710-a2d6-9bc41e927f77-kube-api-access-8cg4h\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:43 crc kubenswrapper[4971]: I1127 07:14:43.030209 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5689fc8-154a-4710-a2d6-9bc41e927f77-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:43 crc kubenswrapper[4971]: I1127 07:14:43.367832 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-z4gfl" event={"ID":"c5689fc8-154a-4710-a2d6-9bc41e927f77","Type":"ContainerDied","Data":"7fdabaa7fcb0627c6583d1b96a6d549a0391d9e1b6ac02c4d067b4a152a4f2e5"} Nov 27 07:14:43 crc kubenswrapper[4971]: I1127 07:14:43.368086 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7fdabaa7fcb0627c6583d1b96a6d549a0391d9e1b6ac02c4d067b4a152a4f2e5" Nov 27 07:14:43 crc kubenswrapper[4971]: I1127 07:14:43.367875 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-z4gfl" Nov 27 07:14:43 crc kubenswrapper[4971]: I1127 07:14:43.793115 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-766546d8bf-fkncq"] Nov 27 07:14:43 crc kubenswrapper[4971]: I1127 07:14:43.793693 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-766546d8bf-fkncq" podUID="989bb210-5253-492e-bd34-8a2edca6a62d" containerName="dnsmasq-dns" containerID="cri-o://7d98f4d78c01ef4858eb32cf5dcd31847b47938f1d243beee43d7ddbe6fdf54f" gracePeriod=10 Nov 27 07:14:43 crc kubenswrapper[4971]: I1127 07:14:43.798956 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-766546d8bf-fkncq" Nov 27 07:14:43 crc kubenswrapper[4971]: I1127 07:14:43.818191 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-fd79dd9f9-z2dsj"] Nov 27 07:14:43 crc kubenswrapper[4971]: E1127 07:14:43.819836 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9a6c250-755b-4f6a-8a66-e7af5aab678b" containerName="ovn-config" Nov 27 07:14:43 crc kubenswrapper[4971]: I1127 07:14:43.819879 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9a6c250-755b-4f6a-8a66-e7af5aab678b" containerName="ovn-config" Nov 27 07:14:43 crc kubenswrapper[4971]: E1127 07:14:43.819897 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5689fc8-154a-4710-a2d6-9bc41e927f77" containerName="glance-db-sync" Nov 27 07:14:43 crc kubenswrapper[4971]: I1127 07:14:43.819907 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5689fc8-154a-4710-a2d6-9bc41e927f77" containerName="glance-db-sync" Nov 27 07:14:43 crc kubenswrapper[4971]: I1127 07:14:43.820161 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9a6c250-755b-4f6a-8a66-e7af5aab678b" containerName="ovn-config" Nov 27 07:14:43 crc kubenswrapper[4971]: I1127 07:14:43.820413 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5689fc8-154a-4710-a2d6-9bc41e927f77" containerName="glance-db-sync" Nov 27 07:14:43 crc kubenswrapper[4971]: I1127 07:14:43.823466 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fd79dd9f9-z2dsj" Nov 27 07:14:43 crc kubenswrapper[4971]: I1127 07:14:43.855308 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-fd79dd9f9-z2dsj"] Nov 27 07:14:43 crc kubenswrapper[4971]: I1127 07:14:43.856296 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46ee0072-fbf4-40fa-b9bc-40fae95462c3-config\") pod \"dnsmasq-dns-fd79dd9f9-z2dsj\" (UID: \"46ee0072-fbf4-40fa-b9bc-40fae95462c3\") " pod="openstack/dnsmasq-dns-fd79dd9f9-z2dsj" Nov 27 07:14:43 crc kubenswrapper[4971]: I1127 07:14:43.856343 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/46ee0072-fbf4-40fa-b9bc-40fae95462c3-ovsdbserver-nb\") pod \"dnsmasq-dns-fd79dd9f9-z2dsj\" (UID: \"46ee0072-fbf4-40fa-b9bc-40fae95462c3\") " pod="openstack/dnsmasq-dns-fd79dd9f9-z2dsj" Nov 27 07:14:43 crc kubenswrapper[4971]: I1127 07:14:43.856388 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/46ee0072-fbf4-40fa-b9bc-40fae95462c3-ovsdbserver-sb\") pod \"dnsmasq-dns-fd79dd9f9-z2dsj\" (UID: \"46ee0072-fbf4-40fa-b9bc-40fae95462c3\") " pod="openstack/dnsmasq-dns-fd79dd9f9-z2dsj" Nov 27 07:14:43 crc kubenswrapper[4971]: I1127 07:14:43.856436 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/46ee0072-fbf4-40fa-b9bc-40fae95462c3-dns-swift-storage-0\") pod \"dnsmasq-dns-fd79dd9f9-z2dsj\" (UID: \"46ee0072-fbf4-40fa-b9bc-40fae95462c3\") " pod="openstack/dnsmasq-dns-fd79dd9f9-z2dsj" Nov 27 07:14:43 crc kubenswrapper[4971]: I1127 07:14:43.856456 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v682b\" (UniqueName: \"kubernetes.io/projected/46ee0072-fbf4-40fa-b9bc-40fae95462c3-kube-api-access-v682b\") pod \"dnsmasq-dns-fd79dd9f9-z2dsj\" (UID: \"46ee0072-fbf4-40fa-b9bc-40fae95462c3\") " pod="openstack/dnsmasq-dns-fd79dd9f9-z2dsj" Nov 27 07:14:43 crc kubenswrapper[4971]: I1127 07:14:43.856482 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/46ee0072-fbf4-40fa-b9bc-40fae95462c3-dns-svc\") pod \"dnsmasq-dns-fd79dd9f9-z2dsj\" (UID: \"46ee0072-fbf4-40fa-b9bc-40fae95462c3\") " pod="openstack/dnsmasq-dns-fd79dd9f9-z2dsj" Nov 27 07:14:43 crc kubenswrapper[4971]: I1127 07:14:43.957953 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v682b\" (UniqueName: \"kubernetes.io/projected/46ee0072-fbf4-40fa-b9bc-40fae95462c3-kube-api-access-v682b\") pod \"dnsmasq-dns-fd79dd9f9-z2dsj\" (UID: \"46ee0072-fbf4-40fa-b9bc-40fae95462c3\") " pod="openstack/dnsmasq-dns-fd79dd9f9-z2dsj" Nov 27 07:14:43 crc kubenswrapper[4971]: I1127 07:14:43.958006 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/46ee0072-fbf4-40fa-b9bc-40fae95462c3-dns-swift-storage-0\") pod \"dnsmasq-dns-fd79dd9f9-z2dsj\" (UID: \"46ee0072-fbf4-40fa-b9bc-40fae95462c3\") " pod="openstack/dnsmasq-dns-fd79dd9f9-z2dsj" Nov 27 07:14:43 crc kubenswrapper[4971]: I1127 07:14:43.958056 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/46ee0072-fbf4-40fa-b9bc-40fae95462c3-dns-svc\") pod \"dnsmasq-dns-fd79dd9f9-z2dsj\" (UID: \"46ee0072-fbf4-40fa-b9bc-40fae95462c3\") " pod="openstack/dnsmasq-dns-fd79dd9f9-z2dsj" Nov 27 07:14:43 crc kubenswrapper[4971]: I1127 07:14:43.958113 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46ee0072-fbf4-40fa-b9bc-40fae95462c3-config\") pod \"dnsmasq-dns-fd79dd9f9-z2dsj\" (UID: \"46ee0072-fbf4-40fa-b9bc-40fae95462c3\") " pod="openstack/dnsmasq-dns-fd79dd9f9-z2dsj" Nov 27 07:14:43 crc kubenswrapper[4971]: I1127 07:14:43.958163 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/46ee0072-fbf4-40fa-b9bc-40fae95462c3-ovsdbserver-nb\") pod \"dnsmasq-dns-fd79dd9f9-z2dsj\" (UID: \"46ee0072-fbf4-40fa-b9bc-40fae95462c3\") " pod="openstack/dnsmasq-dns-fd79dd9f9-z2dsj" Nov 27 07:14:43 crc kubenswrapper[4971]: I1127 07:14:43.958226 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/46ee0072-fbf4-40fa-b9bc-40fae95462c3-ovsdbserver-sb\") pod \"dnsmasq-dns-fd79dd9f9-z2dsj\" (UID: \"46ee0072-fbf4-40fa-b9bc-40fae95462c3\") " pod="openstack/dnsmasq-dns-fd79dd9f9-z2dsj" Nov 27 07:14:43 crc kubenswrapper[4971]: I1127 07:14:43.959285 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/46ee0072-fbf4-40fa-b9bc-40fae95462c3-dns-swift-storage-0\") pod \"dnsmasq-dns-fd79dd9f9-z2dsj\" (UID: \"46ee0072-fbf4-40fa-b9bc-40fae95462c3\") " pod="openstack/dnsmasq-dns-fd79dd9f9-z2dsj" Nov 27 07:14:43 crc kubenswrapper[4971]: I1127 07:14:43.959892 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/46ee0072-fbf4-40fa-b9bc-40fae95462c3-ovsdbserver-sb\") pod \"dnsmasq-dns-fd79dd9f9-z2dsj\" (UID: \"46ee0072-fbf4-40fa-b9bc-40fae95462c3\") " pod="openstack/dnsmasq-dns-fd79dd9f9-z2dsj" Nov 27 07:14:43 crc kubenswrapper[4971]: I1127 07:14:43.960264 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/46ee0072-fbf4-40fa-b9bc-40fae95462c3-dns-svc\") pod \"dnsmasq-dns-fd79dd9f9-z2dsj\" (UID: \"46ee0072-fbf4-40fa-b9bc-40fae95462c3\") " pod="openstack/dnsmasq-dns-fd79dd9f9-z2dsj" Nov 27 07:14:43 crc kubenswrapper[4971]: I1127 07:14:43.960820 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46ee0072-fbf4-40fa-b9bc-40fae95462c3-config\") pod \"dnsmasq-dns-fd79dd9f9-z2dsj\" (UID: \"46ee0072-fbf4-40fa-b9bc-40fae95462c3\") " pod="openstack/dnsmasq-dns-fd79dd9f9-z2dsj" Nov 27 07:14:43 crc kubenswrapper[4971]: I1127 07:14:43.960894 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/46ee0072-fbf4-40fa-b9bc-40fae95462c3-ovsdbserver-nb\") pod \"dnsmasq-dns-fd79dd9f9-z2dsj\" (UID: \"46ee0072-fbf4-40fa-b9bc-40fae95462c3\") " pod="openstack/dnsmasq-dns-fd79dd9f9-z2dsj" Nov 27 07:14:43 crc kubenswrapper[4971]: I1127 07:14:43.976773 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v682b\" (UniqueName: \"kubernetes.io/projected/46ee0072-fbf4-40fa-b9bc-40fae95462c3-kube-api-access-v682b\") pod \"dnsmasq-dns-fd79dd9f9-z2dsj\" (UID: \"46ee0072-fbf4-40fa-b9bc-40fae95462c3\") " pod="openstack/dnsmasq-dns-fd79dd9f9-z2dsj" Nov 27 07:14:44 crc kubenswrapper[4971]: I1127 07:14:44.143026 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fd79dd9f9-z2dsj" Nov 27 07:14:44 crc kubenswrapper[4971]: I1127 07:14:44.371566 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-766546d8bf-fkncq" Nov 27 07:14:44 crc kubenswrapper[4971]: I1127 07:14:44.411951 4971 generic.go:334] "Generic (PLEG): container finished" podID="989bb210-5253-492e-bd34-8a2edca6a62d" containerID="7d98f4d78c01ef4858eb32cf5dcd31847b47938f1d243beee43d7ddbe6fdf54f" exitCode=0 Nov 27 07:14:44 crc kubenswrapper[4971]: I1127 07:14:44.412002 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-766546d8bf-fkncq" event={"ID":"989bb210-5253-492e-bd34-8a2edca6a62d","Type":"ContainerDied","Data":"7d98f4d78c01ef4858eb32cf5dcd31847b47938f1d243beee43d7ddbe6fdf54f"} Nov 27 07:14:44 crc kubenswrapper[4971]: I1127 07:14:44.412033 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-766546d8bf-fkncq" event={"ID":"989bb210-5253-492e-bd34-8a2edca6a62d","Type":"ContainerDied","Data":"f6b6b50187447aa82503546d504cea3ddb6e64b027f721e60632a0450522172f"} Nov 27 07:14:44 crc kubenswrapper[4971]: I1127 07:14:44.412053 4971 scope.go:117] "RemoveContainer" containerID="7d98f4d78c01ef4858eb32cf5dcd31847b47938f1d243beee43d7ddbe6fdf54f" Nov 27 07:14:44 crc kubenswrapper[4971]: I1127 07:14:44.412237 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-766546d8bf-fkncq" Nov 27 07:14:45 crc kubenswrapper[4971]: I1127 07:14:44.456818 4971 scope.go:117] "RemoveContainer" containerID="db1427ff2182b41785d769d9289e5a7bd0820aa4252aa9168d8b7ccc0cc9af49" Nov 27 07:14:45 crc kubenswrapper[4971]: I1127 07:14:44.488215 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/989bb210-5253-492e-bd34-8a2edca6a62d-ovsdbserver-sb\") pod \"989bb210-5253-492e-bd34-8a2edca6a62d\" (UID: \"989bb210-5253-492e-bd34-8a2edca6a62d\") " Nov 27 07:14:45 crc kubenswrapper[4971]: I1127 07:14:44.488309 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/989bb210-5253-492e-bd34-8a2edca6a62d-config\") pod \"989bb210-5253-492e-bd34-8a2edca6a62d\" (UID: \"989bb210-5253-492e-bd34-8a2edca6a62d\") " Nov 27 07:14:45 crc kubenswrapper[4971]: I1127 07:14:44.488367 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/989bb210-5253-492e-bd34-8a2edca6a62d-dns-svc\") pod \"989bb210-5253-492e-bd34-8a2edca6a62d\" (UID: \"989bb210-5253-492e-bd34-8a2edca6a62d\") " Nov 27 07:14:45 crc kubenswrapper[4971]: I1127 07:14:44.488428 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rqpgl\" (UniqueName: \"kubernetes.io/projected/989bb210-5253-492e-bd34-8a2edca6a62d-kube-api-access-rqpgl\") pod \"989bb210-5253-492e-bd34-8a2edca6a62d\" (UID: \"989bb210-5253-492e-bd34-8a2edca6a62d\") " Nov 27 07:14:45 crc kubenswrapper[4971]: I1127 07:14:44.488492 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/989bb210-5253-492e-bd34-8a2edca6a62d-ovsdbserver-nb\") pod \"989bb210-5253-492e-bd34-8a2edca6a62d\" (UID: \"989bb210-5253-492e-bd34-8a2edca6a62d\") " Nov 27 07:14:45 crc kubenswrapper[4971]: I1127 07:14:44.488515 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/989bb210-5253-492e-bd34-8a2edca6a62d-dns-swift-storage-0\") pod \"989bb210-5253-492e-bd34-8a2edca6a62d\" (UID: \"989bb210-5253-492e-bd34-8a2edca6a62d\") " Nov 27 07:14:45 crc kubenswrapper[4971]: I1127 07:14:44.493796 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/989bb210-5253-492e-bd34-8a2edca6a62d-kube-api-access-rqpgl" (OuterVolumeSpecName: "kube-api-access-rqpgl") pod "989bb210-5253-492e-bd34-8a2edca6a62d" (UID: "989bb210-5253-492e-bd34-8a2edca6a62d"). InnerVolumeSpecName "kube-api-access-rqpgl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:14:45 crc kubenswrapper[4971]: I1127 07:14:44.505763 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rqpgl\" (UniqueName: \"kubernetes.io/projected/989bb210-5253-492e-bd34-8a2edca6a62d-kube-api-access-rqpgl\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:45 crc kubenswrapper[4971]: I1127 07:14:44.514735 4971 scope.go:117] "RemoveContainer" containerID="7d98f4d78c01ef4858eb32cf5dcd31847b47938f1d243beee43d7ddbe6fdf54f" Nov 27 07:14:45 crc kubenswrapper[4971]: E1127 07:14:44.515382 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d98f4d78c01ef4858eb32cf5dcd31847b47938f1d243beee43d7ddbe6fdf54f\": container with ID starting with 7d98f4d78c01ef4858eb32cf5dcd31847b47938f1d243beee43d7ddbe6fdf54f not found: ID does not exist" containerID="7d98f4d78c01ef4858eb32cf5dcd31847b47938f1d243beee43d7ddbe6fdf54f" Nov 27 07:14:45 crc kubenswrapper[4971]: I1127 07:14:44.515441 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d98f4d78c01ef4858eb32cf5dcd31847b47938f1d243beee43d7ddbe6fdf54f"} err="failed to get container status \"7d98f4d78c01ef4858eb32cf5dcd31847b47938f1d243beee43d7ddbe6fdf54f\": rpc error: code = NotFound desc = could not find container \"7d98f4d78c01ef4858eb32cf5dcd31847b47938f1d243beee43d7ddbe6fdf54f\": container with ID starting with 7d98f4d78c01ef4858eb32cf5dcd31847b47938f1d243beee43d7ddbe6fdf54f not found: ID does not exist" Nov 27 07:14:45 crc kubenswrapper[4971]: I1127 07:14:44.515472 4971 scope.go:117] "RemoveContainer" containerID="db1427ff2182b41785d769d9289e5a7bd0820aa4252aa9168d8b7ccc0cc9af49" Nov 27 07:14:45 crc kubenswrapper[4971]: E1127 07:14:44.515956 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db1427ff2182b41785d769d9289e5a7bd0820aa4252aa9168d8b7ccc0cc9af49\": container with ID starting with db1427ff2182b41785d769d9289e5a7bd0820aa4252aa9168d8b7ccc0cc9af49 not found: ID does not exist" containerID="db1427ff2182b41785d769d9289e5a7bd0820aa4252aa9168d8b7ccc0cc9af49" Nov 27 07:14:45 crc kubenswrapper[4971]: I1127 07:14:44.515997 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db1427ff2182b41785d769d9289e5a7bd0820aa4252aa9168d8b7ccc0cc9af49"} err="failed to get container status \"db1427ff2182b41785d769d9289e5a7bd0820aa4252aa9168d8b7ccc0cc9af49\": rpc error: code = NotFound desc = could not find container \"db1427ff2182b41785d769d9289e5a7bd0820aa4252aa9168d8b7ccc0cc9af49\": container with ID starting with db1427ff2182b41785d769d9289e5a7bd0820aa4252aa9168d8b7ccc0cc9af49 not found: ID does not exist" Nov 27 07:14:45 crc kubenswrapper[4971]: I1127 07:14:44.539147 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/989bb210-5253-492e-bd34-8a2edca6a62d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "989bb210-5253-492e-bd34-8a2edca6a62d" (UID: "989bb210-5253-492e-bd34-8a2edca6a62d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:14:45 crc kubenswrapper[4971]: I1127 07:14:44.540248 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/989bb210-5253-492e-bd34-8a2edca6a62d-config" (OuterVolumeSpecName: "config") pod "989bb210-5253-492e-bd34-8a2edca6a62d" (UID: "989bb210-5253-492e-bd34-8a2edca6a62d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:14:45 crc kubenswrapper[4971]: I1127 07:14:44.546804 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/989bb210-5253-492e-bd34-8a2edca6a62d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "989bb210-5253-492e-bd34-8a2edca6a62d" (UID: "989bb210-5253-492e-bd34-8a2edca6a62d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:14:45 crc kubenswrapper[4971]: I1127 07:14:44.552329 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/989bb210-5253-492e-bd34-8a2edca6a62d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "989bb210-5253-492e-bd34-8a2edca6a62d" (UID: "989bb210-5253-492e-bd34-8a2edca6a62d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:14:45 crc kubenswrapper[4971]: I1127 07:14:44.552472 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/989bb210-5253-492e-bd34-8a2edca6a62d-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "989bb210-5253-492e-bd34-8a2edca6a62d" (UID: "989bb210-5253-492e-bd34-8a2edca6a62d"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:14:45 crc kubenswrapper[4971]: I1127 07:14:44.607904 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/989bb210-5253-492e-bd34-8a2edca6a62d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:45 crc kubenswrapper[4971]: I1127 07:14:44.607938 4971 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/989bb210-5253-492e-bd34-8a2edca6a62d-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:45 crc kubenswrapper[4971]: I1127 07:14:44.607951 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/989bb210-5253-492e-bd34-8a2edca6a62d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:45 crc kubenswrapper[4971]: I1127 07:14:44.607962 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/989bb210-5253-492e-bd34-8a2edca6a62d-config\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:45 crc kubenswrapper[4971]: I1127 07:14:44.607970 4971 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/989bb210-5253-492e-bd34-8a2edca6a62d-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:45 crc kubenswrapper[4971]: I1127 07:14:44.740666 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-766546d8bf-fkncq"] Nov 27 07:14:45 crc kubenswrapper[4971]: I1127 07:14:44.752005 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-766546d8bf-fkncq"] Nov 27 07:14:45 crc kubenswrapper[4971]: I1127 07:14:44.771759 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-fd79dd9f9-z2dsj"] Nov 27 07:14:45 crc kubenswrapper[4971]: I1127 07:14:45.421130 4971 generic.go:334] "Generic (PLEG): container finished" podID="46ee0072-fbf4-40fa-b9bc-40fae95462c3" containerID="c6a9de26b76ffd90526d58f0c36d51422b5a7bf192b60d1eb043ebe5cedc7092" exitCode=0 Nov 27 07:14:45 crc kubenswrapper[4971]: I1127 07:14:45.421242 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fd79dd9f9-z2dsj" event={"ID":"46ee0072-fbf4-40fa-b9bc-40fae95462c3","Type":"ContainerDied","Data":"c6a9de26b76ffd90526d58f0c36d51422b5a7bf192b60d1eb043ebe5cedc7092"} Nov 27 07:14:45 crc kubenswrapper[4971]: I1127 07:14:45.422025 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fd79dd9f9-z2dsj" event={"ID":"46ee0072-fbf4-40fa-b9bc-40fae95462c3","Type":"ContainerStarted","Data":"655c579ca0015c7ef8afb8e457b57c67be56196409feccf039f0d0efb0406200"} Nov 27 07:14:46 crc kubenswrapper[4971]: I1127 07:14:46.437326 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fd79dd9f9-z2dsj" event={"ID":"46ee0072-fbf4-40fa-b9bc-40fae95462c3","Type":"ContainerStarted","Data":"dff606b7b165d6268b04e3c652abf9420dd9caabe77ed3c1f0c431beda8c9de5"} Nov 27 07:14:46 crc kubenswrapper[4971]: I1127 07:14:46.437638 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-fd79dd9f9-z2dsj" Nov 27 07:14:46 crc kubenswrapper[4971]: I1127 07:14:46.463010 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-fd79dd9f9-z2dsj" podStartSLOduration=3.462992298 podStartE2EDuration="3.462992298s" podCreationTimestamp="2025-11-27 07:14:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:14:46.457263316 +0000 UTC m=+1324.649307234" watchObservedRunningTime="2025-11-27 07:14:46.462992298 +0000 UTC m=+1324.655036216" Nov 27 07:14:46 crc kubenswrapper[4971]: I1127 07:14:46.560211 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="989bb210-5253-492e-bd34-8a2edca6a62d" path="/var/lib/kubelet/pods/989bb210-5253-492e-bd34-8a2edca6a62d/volumes" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.067770 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.382899 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-78rv4"] Nov 27 07:14:47 crc kubenswrapper[4971]: E1127 07:14:47.383313 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="989bb210-5253-492e-bd34-8a2edca6a62d" containerName="init" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.383332 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="989bb210-5253-492e-bd34-8a2edca6a62d" containerName="init" Nov 27 07:14:47 crc kubenswrapper[4971]: E1127 07:14:47.383347 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="989bb210-5253-492e-bd34-8a2edca6a62d" containerName="dnsmasq-dns" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.383355 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="989bb210-5253-492e-bd34-8a2edca6a62d" containerName="dnsmasq-dns" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.383549 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="989bb210-5253-492e-bd34-8a2edca6a62d" containerName="dnsmasq-dns" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.384359 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-78rv4" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.401463 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-78rv4"] Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.425843 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.458794 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/49b88fe9-5c45-423c-ab21-fbd5e2bb14c8-operator-scripts\") pod \"cinder-db-create-78rv4\" (UID: \"49b88fe9-5c45-423c-ab21-fbd5e2bb14c8\") " pod="openstack/cinder-db-create-78rv4" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.458868 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5968\" (UniqueName: \"kubernetes.io/projected/49b88fe9-5c45-423c-ab21-fbd5e2bb14c8-kube-api-access-s5968\") pod \"cinder-db-create-78rv4\" (UID: \"49b88fe9-5c45-423c-ab21-fbd5e2bb14c8\") " pod="openstack/cinder-db-create-78rv4" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.501468 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-287xw"] Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.503028 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-287xw" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.515771 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-287xw"] Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.526436 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-6f3a-account-create-update-4dqdw"] Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.527921 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-6f3a-account-create-update-4dqdw" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.532714 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.543865 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-6f3a-account-create-update-4dqdw"] Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.561975 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5dedf934-ea25-4736-9220-80efdbd3756e-operator-scripts\") pod \"barbican-db-create-287xw\" (UID: \"5dedf934-ea25-4736-9220-80efdbd3756e\") " pod="openstack/barbican-db-create-287xw" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.562059 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/49b88fe9-5c45-423c-ab21-fbd5e2bb14c8-operator-scripts\") pod \"cinder-db-create-78rv4\" (UID: \"49b88fe9-5c45-423c-ab21-fbd5e2bb14c8\") " pod="openstack/cinder-db-create-78rv4" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.562143 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5968\" (UniqueName: \"kubernetes.io/projected/49b88fe9-5c45-423c-ab21-fbd5e2bb14c8-kube-api-access-s5968\") pod \"cinder-db-create-78rv4\" (UID: \"49b88fe9-5c45-423c-ab21-fbd5e2bb14c8\") " pod="openstack/cinder-db-create-78rv4" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.562233 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jdsjz\" (UniqueName: \"kubernetes.io/projected/5dedf934-ea25-4736-9220-80efdbd3756e-kube-api-access-jdsjz\") pod \"barbican-db-create-287xw\" (UID: \"5dedf934-ea25-4736-9220-80efdbd3756e\") " pod="openstack/barbican-db-create-287xw" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.564455 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/49b88fe9-5c45-423c-ab21-fbd5e2bb14c8-operator-scripts\") pod \"cinder-db-create-78rv4\" (UID: \"49b88fe9-5c45-423c-ab21-fbd5e2bb14c8\") " pod="openstack/cinder-db-create-78rv4" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.598219 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-2596-account-create-update-krr9x"] Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.599787 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-2596-account-create-update-krr9x" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.622576 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.622862 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5968\" (UniqueName: \"kubernetes.io/projected/49b88fe9-5c45-423c-ab21-fbd5e2bb14c8-kube-api-access-s5968\") pod \"cinder-db-create-78rv4\" (UID: \"49b88fe9-5c45-423c-ab21-fbd5e2bb14c8\") " pod="openstack/cinder-db-create-78rv4" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.630994 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-2596-account-create-update-krr9x"] Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.663616 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jdsjz\" (UniqueName: \"kubernetes.io/projected/5dedf934-ea25-4736-9220-80efdbd3756e-kube-api-access-jdsjz\") pod \"barbican-db-create-287xw\" (UID: \"5dedf934-ea25-4736-9220-80efdbd3756e\") " pod="openstack/barbican-db-create-287xw" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.663768 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5dedf934-ea25-4736-9220-80efdbd3756e-operator-scripts\") pod \"barbican-db-create-287xw\" (UID: \"5dedf934-ea25-4736-9220-80efdbd3756e\") " pod="openstack/barbican-db-create-287xw" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.663805 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgp4k\" (UniqueName: \"kubernetes.io/projected/2be44422-3e3d-4564-9d1b-0a3489a93dac-kube-api-access-mgp4k\") pod \"cinder-6f3a-account-create-update-4dqdw\" (UID: \"2be44422-3e3d-4564-9d1b-0a3489a93dac\") " pod="openstack/cinder-6f3a-account-create-update-4dqdw" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.663854 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d30c76d9-61bb-4891-bb73-c2e41fcdf0b5-operator-scripts\") pod \"barbican-2596-account-create-update-krr9x\" (UID: \"d30c76d9-61bb-4891-bb73-c2e41fcdf0b5\") " pod="openstack/barbican-2596-account-create-update-krr9x" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.663910 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kv9tf\" (UniqueName: \"kubernetes.io/projected/d30c76d9-61bb-4891-bb73-c2e41fcdf0b5-kube-api-access-kv9tf\") pod \"barbican-2596-account-create-update-krr9x\" (UID: \"d30c76d9-61bb-4891-bb73-c2e41fcdf0b5\") " pod="openstack/barbican-2596-account-create-update-krr9x" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.663979 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2be44422-3e3d-4564-9d1b-0a3489a93dac-operator-scripts\") pod \"cinder-6f3a-account-create-update-4dqdw\" (UID: \"2be44422-3e3d-4564-9d1b-0a3489a93dac\") " pod="openstack/cinder-6f3a-account-create-update-4dqdw" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.665611 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5dedf934-ea25-4736-9220-80efdbd3756e-operator-scripts\") pod \"barbican-db-create-287xw\" (UID: \"5dedf934-ea25-4736-9220-80efdbd3756e\") " pod="openstack/barbican-db-create-287xw" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.695297 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jdsjz\" (UniqueName: \"kubernetes.io/projected/5dedf934-ea25-4736-9220-80efdbd3756e-kube-api-access-jdsjz\") pod \"barbican-db-create-287xw\" (UID: \"5dedf934-ea25-4736-9220-80efdbd3756e\") " pod="openstack/barbican-db-create-287xw" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.701241 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-78rv4" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.765419 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgp4k\" (UniqueName: \"kubernetes.io/projected/2be44422-3e3d-4564-9d1b-0a3489a93dac-kube-api-access-mgp4k\") pod \"cinder-6f3a-account-create-update-4dqdw\" (UID: \"2be44422-3e3d-4564-9d1b-0a3489a93dac\") " pod="openstack/cinder-6f3a-account-create-update-4dqdw" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.765501 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d30c76d9-61bb-4891-bb73-c2e41fcdf0b5-operator-scripts\") pod \"barbican-2596-account-create-update-krr9x\" (UID: \"d30c76d9-61bb-4891-bb73-c2e41fcdf0b5\") " pod="openstack/barbican-2596-account-create-update-krr9x" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.765600 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kv9tf\" (UniqueName: \"kubernetes.io/projected/d30c76d9-61bb-4891-bb73-c2e41fcdf0b5-kube-api-access-kv9tf\") pod \"barbican-2596-account-create-update-krr9x\" (UID: \"d30c76d9-61bb-4891-bb73-c2e41fcdf0b5\") " pod="openstack/barbican-2596-account-create-update-krr9x" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.765632 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2be44422-3e3d-4564-9d1b-0a3489a93dac-operator-scripts\") pod \"cinder-6f3a-account-create-update-4dqdw\" (UID: \"2be44422-3e3d-4564-9d1b-0a3489a93dac\") " pod="openstack/cinder-6f3a-account-create-update-4dqdw" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.766680 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2be44422-3e3d-4564-9d1b-0a3489a93dac-operator-scripts\") pod \"cinder-6f3a-account-create-update-4dqdw\" (UID: \"2be44422-3e3d-4564-9d1b-0a3489a93dac\") " pod="openstack/cinder-6f3a-account-create-update-4dqdw" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.766686 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d30c76d9-61bb-4891-bb73-c2e41fcdf0b5-operator-scripts\") pod \"barbican-2596-account-create-update-krr9x\" (UID: \"d30c76d9-61bb-4891-bb73-c2e41fcdf0b5\") " pod="openstack/barbican-2596-account-create-update-krr9x" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.798263 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgp4k\" (UniqueName: \"kubernetes.io/projected/2be44422-3e3d-4564-9d1b-0a3489a93dac-kube-api-access-mgp4k\") pod \"cinder-6f3a-account-create-update-4dqdw\" (UID: \"2be44422-3e3d-4564-9d1b-0a3489a93dac\") " pod="openstack/cinder-6f3a-account-create-update-4dqdw" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.802631 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-t2rfb"] Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.804179 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-t2rfb" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.816641 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-t2rfb"] Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.821335 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-287xw" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.832116 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kv9tf\" (UniqueName: \"kubernetes.io/projected/d30c76d9-61bb-4891-bb73-c2e41fcdf0b5-kube-api-access-kv9tf\") pod \"barbican-2596-account-create-update-krr9x\" (UID: \"d30c76d9-61bb-4891-bb73-c2e41fcdf0b5\") " pod="openstack/barbican-2596-account-create-update-krr9x" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.840781 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-hcxsg"] Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.842205 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-hcxsg" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.847007 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.847264 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.847504 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.847686 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-gcnm2" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.866977 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-6f3a-account-create-update-4dqdw" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.868819 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6x85\" (UniqueName: \"kubernetes.io/projected/ffa0ba8c-c0c5-4283-84eb-427a9def8359-kube-api-access-x6x85\") pod \"neutron-db-create-t2rfb\" (UID: \"ffa0ba8c-c0c5-4283-84eb-427a9def8359\") " pod="openstack/neutron-db-create-t2rfb" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.868947 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ffa0ba8c-c0c5-4283-84eb-427a9def8359-operator-scripts\") pod \"neutron-db-create-t2rfb\" (UID: \"ffa0ba8c-c0c5-4283-84eb-427a9def8359\") " pod="openstack/neutron-db-create-t2rfb" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.949029 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-hcxsg"] Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.959109 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-1af4-account-create-update-fz7bt"] Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.960569 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-1af4-account-create-update-fz7bt" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.968396 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.974181 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-2596-account-create-update-krr9x" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.981456 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/201f9996-de6c-49c4-916c-78946e7d1144-config-data\") pod \"keystone-db-sync-hcxsg\" (UID: \"201f9996-de6c-49c4-916c-78946e7d1144\") " pod="openstack/keystone-db-sync-hcxsg" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.985344 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ffa0ba8c-c0c5-4283-84eb-427a9def8359-operator-scripts\") pod \"neutron-db-create-t2rfb\" (UID: \"ffa0ba8c-c0c5-4283-84eb-427a9def8359\") " pod="openstack/neutron-db-create-t2rfb" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.986129 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qlfdj\" (UniqueName: \"kubernetes.io/projected/201f9996-de6c-49c4-916c-78946e7d1144-kube-api-access-qlfdj\") pod \"keystone-db-sync-hcxsg\" (UID: \"201f9996-de6c-49c4-916c-78946e7d1144\") " pod="openstack/keystone-db-sync-hcxsg" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.986230 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6x85\" (UniqueName: \"kubernetes.io/projected/ffa0ba8c-c0c5-4283-84eb-427a9def8359-kube-api-access-x6x85\") pod \"neutron-db-create-t2rfb\" (UID: \"ffa0ba8c-c0c5-4283-84eb-427a9def8359\") " pod="openstack/neutron-db-create-t2rfb" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.986433 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ffa0ba8c-c0c5-4283-84eb-427a9def8359-operator-scripts\") pod \"neutron-db-create-t2rfb\" (UID: \"ffa0ba8c-c0c5-4283-84eb-427a9def8359\") " pod="openstack/neutron-db-create-t2rfb" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.986443 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/201f9996-de6c-49c4-916c-78946e7d1144-combined-ca-bundle\") pod \"keystone-db-sync-hcxsg\" (UID: \"201f9996-de6c-49c4-916c-78946e7d1144\") " pod="openstack/keystone-db-sync-hcxsg" Nov 27 07:14:47 crc kubenswrapper[4971]: I1127 07:14:47.993674 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-1af4-account-create-update-fz7bt"] Nov 27 07:14:48 crc kubenswrapper[4971]: I1127 07:14:48.017217 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6x85\" (UniqueName: \"kubernetes.io/projected/ffa0ba8c-c0c5-4283-84eb-427a9def8359-kube-api-access-x6x85\") pod \"neutron-db-create-t2rfb\" (UID: \"ffa0ba8c-c0c5-4283-84eb-427a9def8359\") " pod="openstack/neutron-db-create-t2rfb" Nov 27 07:14:48 crc kubenswrapper[4971]: I1127 07:14:48.088626 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fe01235a-8eb5-49d8-afeb-32fc11bde3cc-operator-scripts\") pod \"neutron-1af4-account-create-update-fz7bt\" (UID: \"fe01235a-8eb5-49d8-afeb-32fc11bde3cc\") " pod="openstack/neutron-1af4-account-create-update-fz7bt" Nov 27 07:14:48 crc kubenswrapper[4971]: I1127 07:14:48.088744 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/201f9996-de6c-49c4-916c-78946e7d1144-combined-ca-bundle\") pod \"keystone-db-sync-hcxsg\" (UID: \"201f9996-de6c-49c4-916c-78946e7d1144\") " pod="openstack/keystone-db-sync-hcxsg" Nov 27 07:14:48 crc kubenswrapper[4971]: I1127 07:14:48.088793 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/201f9996-de6c-49c4-916c-78946e7d1144-config-data\") pod \"keystone-db-sync-hcxsg\" (UID: \"201f9996-de6c-49c4-916c-78946e7d1144\") " pod="openstack/keystone-db-sync-hcxsg" Nov 27 07:14:48 crc kubenswrapper[4971]: I1127 07:14:48.088825 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqhxw\" (UniqueName: \"kubernetes.io/projected/fe01235a-8eb5-49d8-afeb-32fc11bde3cc-kube-api-access-dqhxw\") pod \"neutron-1af4-account-create-update-fz7bt\" (UID: \"fe01235a-8eb5-49d8-afeb-32fc11bde3cc\") " pod="openstack/neutron-1af4-account-create-update-fz7bt" Nov 27 07:14:48 crc kubenswrapper[4971]: I1127 07:14:48.088904 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qlfdj\" (UniqueName: \"kubernetes.io/projected/201f9996-de6c-49c4-916c-78946e7d1144-kube-api-access-qlfdj\") pod \"keystone-db-sync-hcxsg\" (UID: \"201f9996-de6c-49c4-916c-78946e7d1144\") " pod="openstack/keystone-db-sync-hcxsg" Nov 27 07:14:48 crc kubenswrapper[4971]: I1127 07:14:48.093550 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/201f9996-de6c-49c4-916c-78946e7d1144-config-data\") pod \"keystone-db-sync-hcxsg\" (UID: \"201f9996-de6c-49c4-916c-78946e7d1144\") " pod="openstack/keystone-db-sync-hcxsg" Nov 27 07:14:48 crc kubenswrapper[4971]: I1127 07:14:48.097214 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/201f9996-de6c-49c4-916c-78946e7d1144-combined-ca-bundle\") pod \"keystone-db-sync-hcxsg\" (UID: \"201f9996-de6c-49c4-916c-78946e7d1144\") " pod="openstack/keystone-db-sync-hcxsg" Nov 27 07:14:48 crc kubenswrapper[4971]: I1127 07:14:48.113802 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qlfdj\" (UniqueName: \"kubernetes.io/projected/201f9996-de6c-49c4-916c-78946e7d1144-kube-api-access-qlfdj\") pod \"keystone-db-sync-hcxsg\" (UID: \"201f9996-de6c-49c4-916c-78946e7d1144\") " pod="openstack/keystone-db-sync-hcxsg" Nov 27 07:14:48 crc kubenswrapper[4971]: I1127 07:14:48.179911 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-t2rfb" Nov 27 07:14:48 crc kubenswrapper[4971]: I1127 07:14:48.189899 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fe01235a-8eb5-49d8-afeb-32fc11bde3cc-operator-scripts\") pod \"neutron-1af4-account-create-update-fz7bt\" (UID: \"fe01235a-8eb5-49d8-afeb-32fc11bde3cc\") " pod="openstack/neutron-1af4-account-create-update-fz7bt" Nov 27 07:14:48 crc kubenswrapper[4971]: I1127 07:14:48.189977 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqhxw\" (UniqueName: \"kubernetes.io/projected/fe01235a-8eb5-49d8-afeb-32fc11bde3cc-kube-api-access-dqhxw\") pod \"neutron-1af4-account-create-update-fz7bt\" (UID: \"fe01235a-8eb5-49d8-afeb-32fc11bde3cc\") " pod="openstack/neutron-1af4-account-create-update-fz7bt" Nov 27 07:14:48 crc kubenswrapper[4971]: I1127 07:14:48.190742 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fe01235a-8eb5-49d8-afeb-32fc11bde3cc-operator-scripts\") pod \"neutron-1af4-account-create-update-fz7bt\" (UID: \"fe01235a-8eb5-49d8-afeb-32fc11bde3cc\") " pod="openstack/neutron-1af4-account-create-update-fz7bt" Nov 27 07:14:48 crc kubenswrapper[4971]: I1127 07:14:48.197605 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-hcxsg" Nov 27 07:14:48 crc kubenswrapper[4971]: I1127 07:14:48.215404 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqhxw\" (UniqueName: \"kubernetes.io/projected/fe01235a-8eb5-49d8-afeb-32fc11bde3cc-kube-api-access-dqhxw\") pod \"neutron-1af4-account-create-update-fz7bt\" (UID: \"fe01235a-8eb5-49d8-afeb-32fc11bde3cc\") " pod="openstack/neutron-1af4-account-create-update-fz7bt" Nov 27 07:14:48 crc kubenswrapper[4971]: I1127 07:14:48.295245 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-1af4-account-create-update-fz7bt" Nov 27 07:14:48 crc kubenswrapper[4971]: I1127 07:14:48.370596 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-78rv4"] Nov 27 07:14:48 crc kubenswrapper[4971]: W1127 07:14:48.396855 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod49b88fe9_5c45_423c_ab21_fbd5e2bb14c8.slice/crio-aac2dd380613b768cab4a80e0a3fd8b98675c650e163a2fbd933ff8d8a6f14a6 WatchSource:0}: Error finding container aac2dd380613b768cab4a80e0a3fd8b98675c650e163a2fbd933ff8d8a6f14a6: Status 404 returned error can't find the container with id aac2dd380613b768cab4a80e0a3fd8b98675c650e163a2fbd933ff8d8a6f14a6 Nov 27 07:14:48 crc kubenswrapper[4971]: I1127 07:14:48.484719 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-78rv4" event={"ID":"49b88fe9-5c45-423c-ab21-fbd5e2bb14c8","Type":"ContainerStarted","Data":"aac2dd380613b768cab4a80e0a3fd8b98675c650e163a2fbd933ff8d8a6f14a6"} Nov 27 07:14:48 crc kubenswrapper[4971]: I1127 07:14:48.492748 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-287xw"] Nov 27 07:14:48 crc kubenswrapper[4971]: W1127 07:14:48.554346 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2be44422_3e3d_4564_9d1b_0a3489a93dac.slice/crio-ec1ac04f20d3b31b840e5be2305ac54b730b0409a226711b301688b8413c03bb WatchSource:0}: Error finding container ec1ac04f20d3b31b840e5be2305ac54b730b0409a226711b301688b8413c03bb: Status 404 returned error can't find the container with id ec1ac04f20d3b31b840e5be2305ac54b730b0409a226711b301688b8413c03bb Nov 27 07:14:48 crc kubenswrapper[4971]: W1127 07:14:48.582110 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd30c76d9_61bb_4891_bb73_c2e41fcdf0b5.slice/crio-58605badc7b1290d69837b18ae6e8bd3d095dfe8378f8e8c6d7d7a679fce21a9 WatchSource:0}: Error finding container 58605badc7b1290d69837b18ae6e8bd3d095dfe8378f8e8c6d7d7a679fce21a9: Status 404 returned error can't find the container with id 58605badc7b1290d69837b18ae6e8bd3d095dfe8378f8e8c6d7d7a679fce21a9 Nov 27 07:14:48 crc kubenswrapper[4971]: I1127 07:14:48.593298 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-6f3a-account-create-update-4dqdw"] Nov 27 07:14:48 crc kubenswrapper[4971]: I1127 07:14:48.593340 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-2596-account-create-update-krr9x"] Nov 27 07:14:48 crc kubenswrapper[4971]: I1127 07:14:48.626723 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-t2rfb"] Nov 27 07:14:48 crc kubenswrapper[4971]: I1127 07:14:48.692709 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-hcxsg"] Nov 27 07:14:48 crc kubenswrapper[4971]: W1127 07:14:48.719663 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod201f9996_de6c_49c4_916c_78946e7d1144.slice/crio-74faf445287c7b9be4294b915ad78d13caacd71f6a0b54e7c9fb121c4a479a58 WatchSource:0}: Error finding container 74faf445287c7b9be4294b915ad78d13caacd71f6a0b54e7c9fb121c4a479a58: Status 404 returned error can't find the container with id 74faf445287c7b9be4294b915ad78d13caacd71f6a0b54e7c9fb121c4a479a58 Nov 27 07:14:48 crc kubenswrapper[4971]: I1127 07:14:48.905774 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-1af4-account-create-update-fz7bt"] Nov 27 07:14:48 crc kubenswrapper[4971]: W1127 07:14:48.910062 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfe01235a_8eb5_49d8_afeb_32fc11bde3cc.slice/crio-1b864af6472fa4b0c81fa861ea1deafa7ab692579b0604ae090d391ed968834a WatchSource:0}: Error finding container 1b864af6472fa4b0c81fa861ea1deafa7ab692579b0604ae090d391ed968834a: Status 404 returned error can't find the container with id 1b864af6472fa4b0c81fa861ea1deafa7ab692579b0604ae090d391ed968834a Nov 27 07:14:49 crc kubenswrapper[4971]: I1127 07:14:49.505853 4971 generic.go:334] "Generic (PLEG): container finished" podID="49b88fe9-5c45-423c-ab21-fbd5e2bb14c8" containerID="6e5293e955a6149c8c7aa1f5687fb247520e178d25695632832288aedee32ee5" exitCode=0 Nov 27 07:14:49 crc kubenswrapper[4971]: I1127 07:14:49.505933 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-78rv4" event={"ID":"49b88fe9-5c45-423c-ab21-fbd5e2bb14c8","Type":"ContainerDied","Data":"6e5293e955a6149c8c7aa1f5687fb247520e178d25695632832288aedee32ee5"} Nov 27 07:14:49 crc kubenswrapper[4971]: I1127 07:14:49.510943 4971 generic.go:334] "Generic (PLEG): container finished" podID="2be44422-3e3d-4564-9d1b-0a3489a93dac" containerID="fc558568c9d543263589e1931a3c435d97461a215fdc5e59dd09dce5d9f01ade" exitCode=0 Nov 27 07:14:49 crc kubenswrapper[4971]: I1127 07:14:49.511012 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-6f3a-account-create-update-4dqdw" event={"ID":"2be44422-3e3d-4564-9d1b-0a3489a93dac","Type":"ContainerDied","Data":"fc558568c9d543263589e1931a3c435d97461a215fdc5e59dd09dce5d9f01ade"} Nov 27 07:14:49 crc kubenswrapper[4971]: I1127 07:14:49.511043 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-6f3a-account-create-update-4dqdw" event={"ID":"2be44422-3e3d-4564-9d1b-0a3489a93dac","Type":"ContainerStarted","Data":"ec1ac04f20d3b31b840e5be2305ac54b730b0409a226711b301688b8413c03bb"} Nov 27 07:14:49 crc kubenswrapper[4971]: I1127 07:14:49.513305 4971 generic.go:334] "Generic (PLEG): container finished" podID="d30c76d9-61bb-4891-bb73-c2e41fcdf0b5" containerID="1e163b91b3515a2dbbcef9e7284d68670096a3a915c4458561ff3dd7b93abebc" exitCode=0 Nov 27 07:14:49 crc kubenswrapper[4971]: I1127 07:14:49.513373 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-2596-account-create-update-krr9x" event={"ID":"d30c76d9-61bb-4891-bb73-c2e41fcdf0b5","Type":"ContainerDied","Data":"1e163b91b3515a2dbbcef9e7284d68670096a3a915c4458561ff3dd7b93abebc"} Nov 27 07:14:49 crc kubenswrapper[4971]: I1127 07:14:49.513394 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-2596-account-create-update-krr9x" event={"ID":"d30c76d9-61bb-4891-bb73-c2e41fcdf0b5","Type":"ContainerStarted","Data":"58605badc7b1290d69837b18ae6e8bd3d095dfe8378f8e8c6d7d7a679fce21a9"} Nov 27 07:14:49 crc kubenswrapper[4971]: I1127 07:14:49.518422 4971 generic.go:334] "Generic (PLEG): container finished" podID="5dedf934-ea25-4736-9220-80efdbd3756e" containerID="15f2b3adb5a54ba7ac4520153e7954e26f56a37b66305e65275cb91b2a888859" exitCode=0 Nov 27 07:14:49 crc kubenswrapper[4971]: I1127 07:14:49.518503 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-287xw" event={"ID":"5dedf934-ea25-4736-9220-80efdbd3756e","Type":"ContainerDied","Data":"15f2b3adb5a54ba7ac4520153e7954e26f56a37b66305e65275cb91b2a888859"} Nov 27 07:14:49 crc kubenswrapper[4971]: I1127 07:14:49.518559 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-287xw" event={"ID":"5dedf934-ea25-4736-9220-80efdbd3756e","Type":"ContainerStarted","Data":"7fea98093d2cbfbfed6c1dea841f90fad36f164e924cc7b6463eb7c8912dbe25"} Nov 27 07:14:49 crc kubenswrapper[4971]: I1127 07:14:49.520895 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-1af4-account-create-update-fz7bt" event={"ID":"fe01235a-8eb5-49d8-afeb-32fc11bde3cc","Type":"ContainerStarted","Data":"99b269fcf95dc6f0a32925b3507b866cc4131b10ea3efe81550f10df524dc91e"} Nov 27 07:14:49 crc kubenswrapper[4971]: I1127 07:14:49.520938 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-1af4-account-create-update-fz7bt" event={"ID":"fe01235a-8eb5-49d8-afeb-32fc11bde3cc","Type":"ContainerStarted","Data":"1b864af6472fa4b0c81fa861ea1deafa7ab692579b0604ae090d391ed968834a"} Nov 27 07:14:49 crc kubenswrapper[4971]: I1127 07:14:49.522681 4971 generic.go:334] "Generic (PLEG): container finished" podID="ffa0ba8c-c0c5-4283-84eb-427a9def8359" containerID="73523b2b33bbdbab3268c46a389de99191a628958e5fbee9adae95574ea7a1c6" exitCode=0 Nov 27 07:14:49 crc kubenswrapper[4971]: I1127 07:14:49.522731 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-t2rfb" event={"ID":"ffa0ba8c-c0c5-4283-84eb-427a9def8359","Type":"ContainerDied","Data":"73523b2b33bbdbab3268c46a389de99191a628958e5fbee9adae95574ea7a1c6"} Nov 27 07:14:49 crc kubenswrapper[4971]: I1127 07:14:49.522749 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-t2rfb" event={"ID":"ffa0ba8c-c0c5-4283-84eb-427a9def8359","Type":"ContainerStarted","Data":"66fc97d6b07fac5e33efe8ca285428031fabf1d709670db35fccc4c1f7eee05f"} Nov 27 07:14:49 crc kubenswrapper[4971]: I1127 07:14:49.529407 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-hcxsg" event={"ID":"201f9996-de6c-49c4-916c-78946e7d1144","Type":"ContainerStarted","Data":"74faf445287c7b9be4294b915ad78d13caacd71f6a0b54e7c9fb121c4a479a58"} Nov 27 07:14:49 crc kubenswrapper[4971]: I1127 07:14:49.567696 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-1af4-account-create-update-fz7bt" podStartSLOduration=2.567674793 podStartE2EDuration="2.567674793s" podCreationTimestamp="2025-11-27 07:14:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:14:49.561789867 +0000 UTC m=+1327.753833785" watchObservedRunningTime="2025-11-27 07:14:49.567674793 +0000 UTC m=+1327.759718701" Nov 27 07:14:50 crc kubenswrapper[4971]: I1127 07:14:50.539516 4971 generic.go:334] "Generic (PLEG): container finished" podID="fe01235a-8eb5-49d8-afeb-32fc11bde3cc" containerID="99b269fcf95dc6f0a32925b3507b866cc4131b10ea3efe81550f10df524dc91e" exitCode=0 Nov 27 07:14:50 crc kubenswrapper[4971]: I1127 07:14:50.540004 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-1af4-account-create-update-fz7bt" event={"ID":"fe01235a-8eb5-49d8-afeb-32fc11bde3cc","Type":"ContainerDied","Data":"99b269fcf95dc6f0a32925b3507b866cc4131b10ea3efe81550f10df524dc91e"} Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.014437 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-2596-account-create-update-krr9x" Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.078353 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kv9tf\" (UniqueName: \"kubernetes.io/projected/d30c76d9-61bb-4891-bb73-c2e41fcdf0b5-kube-api-access-kv9tf\") pod \"d30c76d9-61bb-4891-bb73-c2e41fcdf0b5\" (UID: \"d30c76d9-61bb-4891-bb73-c2e41fcdf0b5\") " Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.078596 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d30c76d9-61bb-4891-bb73-c2e41fcdf0b5-operator-scripts\") pod \"d30c76d9-61bb-4891-bb73-c2e41fcdf0b5\" (UID: \"d30c76d9-61bb-4891-bb73-c2e41fcdf0b5\") " Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.079276 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d30c76d9-61bb-4891-bb73-c2e41fcdf0b5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d30c76d9-61bb-4891-bb73-c2e41fcdf0b5" (UID: "d30c76d9-61bb-4891-bb73-c2e41fcdf0b5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.087914 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d30c76d9-61bb-4891-bb73-c2e41fcdf0b5-kube-api-access-kv9tf" (OuterVolumeSpecName: "kube-api-access-kv9tf") pod "d30c76d9-61bb-4891-bb73-c2e41fcdf0b5" (UID: "d30c76d9-61bb-4891-bb73-c2e41fcdf0b5"). InnerVolumeSpecName "kube-api-access-kv9tf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.180178 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d30c76d9-61bb-4891-bb73-c2e41fcdf0b5-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.180215 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kv9tf\" (UniqueName: \"kubernetes.io/projected/d30c76d9-61bb-4891-bb73-c2e41fcdf0b5-kube-api-access-kv9tf\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.200343 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-6f3a-account-create-update-4dqdw" Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.206185 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-78rv4" Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.228373 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-287xw" Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.233632 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-t2rfb" Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.280948 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mgp4k\" (UniqueName: \"kubernetes.io/projected/2be44422-3e3d-4564-9d1b-0a3489a93dac-kube-api-access-mgp4k\") pod \"2be44422-3e3d-4564-9d1b-0a3489a93dac\" (UID: \"2be44422-3e3d-4564-9d1b-0a3489a93dac\") " Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.281456 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2be44422-3e3d-4564-9d1b-0a3489a93dac-operator-scripts\") pod \"2be44422-3e3d-4564-9d1b-0a3489a93dac\" (UID: \"2be44422-3e3d-4564-9d1b-0a3489a93dac\") " Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.281695 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s5968\" (UniqueName: \"kubernetes.io/projected/49b88fe9-5c45-423c-ab21-fbd5e2bb14c8-kube-api-access-s5968\") pod \"49b88fe9-5c45-423c-ab21-fbd5e2bb14c8\" (UID: \"49b88fe9-5c45-423c-ab21-fbd5e2bb14c8\") " Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.281796 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/49b88fe9-5c45-423c-ab21-fbd5e2bb14c8-operator-scripts\") pod \"49b88fe9-5c45-423c-ab21-fbd5e2bb14c8\" (UID: \"49b88fe9-5c45-423c-ab21-fbd5e2bb14c8\") " Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.285814 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2be44422-3e3d-4564-9d1b-0a3489a93dac-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2be44422-3e3d-4564-9d1b-0a3489a93dac" (UID: "2be44422-3e3d-4564-9d1b-0a3489a93dac"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.288140 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2be44422-3e3d-4564-9d1b-0a3489a93dac-kube-api-access-mgp4k" (OuterVolumeSpecName: "kube-api-access-mgp4k") pod "2be44422-3e3d-4564-9d1b-0a3489a93dac" (UID: "2be44422-3e3d-4564-9d1b-0a3489a93dac"). InnerVolumeSpecName "kube-api-access-mgp4k". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.289384 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49b88fe9-5c45-423c-ab21-fbd5e2bb14c8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "49b88fe9-5c45-423c-ab21-fbd5e2bb14c8" (UID: "49b88fe9-5c45-423c-ab21-fbd5e2bb14c8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.294793 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49b88fe9-5c45-423c-ab21-fbd5e2bb14c8-kube-api-access-s5968" (OuterVolumeSpecName: "kube-api-access-s5968") pod "49b88fe9-5c45-423c-ab21-fbd5e2bb14c8" (UID: "49b88fe9-5c45-423c-ab21-fbd5e2bb14c8"). InnerVolumeSpecName "kube-api-access-s5968". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.383080 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jdsjz\" (UniqueName: \"kubernetes.io/projected/5dedf934-ea25-4736-9220-80efdbd3756e-kube-api-access-jdsjz\") pod \"5dedf934-ea25-4736-9220-80efdbd3756e\" (UID: \"5dedf934-ea25-4736-9220-80efdbd3756e\") " Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.383279 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5dedf934-ea25-4736-9220-80efdbd3756e-operator-scripts\") pod \"5dedf934-ea25-4736-9220-80efdbd3756e\" (UID: \"5dedf934-ea25-4736-9220-80efdbd3756e\") " Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.385265 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ffa0ba8c-c0c5-4283-84eb-427a9def8359-operator-scripts\") pod \"ffa0ba8c-c0c5-4283-84eb-427a9def8359\" (UID: \"ffa0ba8c-c0c5-4283-84eb-427a9def8359\") " Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.385296 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x6x85\" (UniqueName: \"kubernetes.io/projected/ffa0ba8c-c0c5-4283-84eb-427a9def8359-kube-api-access-x6x85\") pod \"ffa0ba8c-c0c5-4283-84eb-427a9def8359\" (UID: \"ffa0ba8c-c0c5-4283-84eb-427a9def8359\") " Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.385790 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2be44422-3e3d-4564-9d1b-0a3489a93dac-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.385812 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s5968\" (UniqueName: \"kubernetes.io/projected/49b88fe9-5c45-423c-ab21-fbd5e2bb14c8-kube-api-access-s5968\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.385825 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/49b88fe9-5c45-423c-ab21-fbd5e2bb14c8-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.385836 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mgp4k\" (UniqueName: \"kubernetes.io/projected/2be44422-3e3d-4564-9d1b-0a3489a93dac-kube-api-access-mgp4k\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.386295 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5dedf934-ea25-4736-9220-80efdbd3756e-kube-api-access-jdsjz" (OuterVolumeSpecName: "kube-api-access-jdsjz") pod "5dedf934-ea25-4736-9220-80efdbd3756e" (UID: "5dedf934-ea25-4736-9220-80efdbd3756e"). InnerVolumeSpecName "kube-api-access-jdsjz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.386368 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5dedf934-ea25-4736-9220-80efdbd3756e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5dedf934-ea25-4736-9220-80efdbd3756e" (UID: "5dedf934-ea25-4736-9220-80efdbd3756e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.386639 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ffa0ba8c-c0c5-4283-84eb-427a9def8359-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ffa0ba8c-c0c5-4283-84eb-427a9def8359" (UID: "ffa0ba8c-c0c5-4283-84eb-427a9def8359"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.388682 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ffa0ba8c-c0c5-4283-84eb-427a9def8359-kube-api-access-x6x85" (OuterVolumeSpecName: "kube-api-access-x6x85") pod "ffa0ba8c-c0c5-4283-84eb-427a9def8359" (UID: "ffa0ba8c-c0c5-4283-84eb-427a9def8359"). InnerVolumeSpecName "kube-api-access-x6x85". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.487989 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ffa0ba8c-c0c5-4283-84eb-427a9def8359-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.488018 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x6x85\" (UniqueName: \"kubernetes.io/projected/ffa0ba8c-c0c5-4283-84eb-427a9def8359-kube-api-access-x6x85\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.488029 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jdsjz\" (UniqueName: \"kubernetes.io/projected/5dedf934-ea25-4736-9220-80efdbd3756e-kube-api-access-jdsjz\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.488038 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5dedf934-ea25-4736-9220-80efdbd3756e-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.551465 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-287xw" event={"ID":"5dedf934-ea25-4736-9220-80efdbd3756e","Type":"ContainerDied","Data":"7fea98093d2cbfbfed6c1dea841f90fad36f164e924cc7b6463eb7c8912dbe25"} Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.551729 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7fea98093d2cbfbfed6c1dea841f90fad36f164e924cc7b6463eb7c8912dbe25" Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.551772 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-287xw" Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.561856 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-t2rfb" event={"ID":"ffa0ba8c-c0c5-4283-84eb-427a9def8359","Type":"ContainerDied","Data":"66fc97d6b07fac5e33efe8ca285428031fabf1d709670db35fccc4c1f7eee05f"} Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.561886 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="66fc97d6b07fac5e33efe8ca285428031fabf1d709670db35fccc4c1f7eee05f" Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.561943 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-t2rfb" Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.568563 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-78rv4" event={"ID":"49b88fe9-5c45-423c-ab21-fbd5e2bb14c8","Type":"ContainerDied","Data":"aac2dd380613b768cab4a80e0a3fd8b98675c650e163a2fbd933ff8d8a6f14a6"} Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.568606 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aac2dd380613b768cab4a80e0a3fd8b98675c650e163a2fbd933ff8d8a6f14a6" Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.568692 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-78rv4" Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.576944 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-6f3a-account-create-update-4dqdw" event={"ID":"2be44422-3e3d-4564-9d1b-0a3489a93dac","Type":"ContainerDied","Data":"ec1ac04f20d3b31b840e5be2305ac54b730b0409a226711b301688b8413c03bb"} Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.577010 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ec1ac04f20d3b31b840e5be2305ac54b730b0409a226711b301688b8413c03bb" Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.576960 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-6f3a-account-create-update-4dqdw" Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.580961 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-2596-account-create-update-krr9x" Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.582522 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-2596-account-create-update-krr9x" event={"ID":"d30c76d9-61bb-4891-bb73-c2e41fcdf0b5","Type":"ContainerDied","Data":"58605badc7b1290d69837b18ae6e8bd3d095dfe8378f8e8c6d7d7a679fce21a9"} Nov 27 07:14:51 crc kubenswrapper[4971]: I1127 07:14:51.582589 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="58605badc7b1290d69837b18ae6e8bd3d095dfe8378f8e8c6d7d7a679fce21a9" Nov 27 07:14:54 crc kubenswrapper[4971]: I1127 07:14:54.146694 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-fd79dd9f9-z2dsj" Nov 27 07:14:54 crc kubenswrapper[4971]: I1127 07:14:54.280441 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c8cb8df65-n7qt6"] Nov 27 07:14:54 crc kubenswrapper[4971]: I1127 07:14:54.280755 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7c8cb8df65-n7qt6" podUID="8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb" containerName="dnsmasq-dns" containerID="cri-o://d089e8139b56227d8ce7e38a5d3c9d7c7321d12c9471e1e224872d3fbca70bd8" gracePeriod=10 Nov 27 07:14:54 crc kubenswrapper[4971]: I1127 07:14:54.616494 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-1af4-account-create-update-fz7bt" event={"ID":"fe01235a-8eb5-49d8-afeb-32fc11bde3cc","Type":"ContainerDied","Data":"1b864af6472fa4b0c81fa861ea1deafa7ab692579b0604ae090d391ed968834a"} Nov 27 07:14:54 crc kubenswrapper[4971]: I1127 07:14:54.616839 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1b864af6472fa4b0c81fa861ea1deafa7ab692579b0604ae090d391ed968834a" Nov 27 07:14:54 crc kubenswrapper[4971]: I1127 07:14:54.620846 4971 generic.go:334] "Generic (PLEG): container finished" podID="8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb" containerID="d089e8139b56227d8ce7e38a5d3c9d7c7321d12c9471e1e224872d3fbca70bd8" exitCode=0 Nov 27 07:14:54 crc kubenswrapper[4971]: I1127 07:14:54.620891 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c8cb8df65-n7qt6" event={"ID":"8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb","Type":"ContainerDied","Data":"d089e8139b56227d8ce7e38a5d3c9d7c7321d12c9471e1e224872d3fbca70bd8"} Nov 27 07:14:54 crc kubenswrapper[4971]: I1127 07:14:54.625694 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-1af4-account-create-update-fz7bt" Nov 27 07:14:54 crc kubenswrapper[4971]: I1127 07:14:54.716597 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c8cb8df65-n7qt6" Nov 27 07:14:54 crc kubenswrapper[4971]: I1127 07:14:54.758871 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fe01235a-8eb5-49d8-afeb-32fc11bde3cc-operator-scripts\") pod \"fe01235a-8eb5-49d8-afeb-32fc11bde3cc\" (UID: \"fe01235a-8eb5-49d8-afeb-32fc11bde3cc\") " Nov 27 07:14:54 crc kubenswrapper[4971]: I1127 07:14:54.759111 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dqhxw\" (UniqueName: \"kubernetes.io/projected/fe01235a-8eb5-49d8-afeb-32fc11bde3cc-kube-api-access-dqhxw\") pod \"fe01235a-8eb5-49d8-afeb-32fc11bde3cc\" (UID: \"fe01235a-8eb5-49d8-afeb-32fc11bde3cc\") " Nov 27 07:14:54 crc kubenswrapper[4971]: I1127 07:14:54.759693 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe01235a-8eb5-49d8-afeb-32fc11bde3cc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fe01235a-8eb5-49d8-afeb-32fc11bde3cc" (UID: "fe01235a-8eb5-49d8-afeb-32fc11bde3cc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:14:54 crc kubenswrapper[4971]: I1127 07:14:54.764218 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe01235a-8eb5-49d8-afeb-32fc11bde3cc-kube-api-access-dqhxw" (OuterVolumeSpecName: "kube-api-access-dqhxw") pod "fe01235a-8eb5-49d8-afeb-32fc11bde3cc" (UID: "fe01235a-8eb5-49d8-afeb-32fc11bde3cc"). InnerVolumeSpecName "kube-api-access-dqhxw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:14:54 crc kubenswrapper[4971]: I1127 07:14:54.861130 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb-config\") pod \"8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb\" (UID: \"8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb\") " Nov 27 07:14:54 crc kubenswrapper[4971]: I1127 07:14:54.861253 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-86s6x\" (UniqueName: \"kubernetes.io/projected/8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb-kube-api-access-86s6x\") pod \"8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb\" (UID: \"8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb\") " Nov 27 07:14:54 crc kubenswrapper[4971]: I1127 07:14:54.861312 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb-ovsdbserver-sb\") pod \"8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb\" (UID: \"8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb\") " Nov 27 07:14:54 crc kubenswrapper[4971]: I1127 07:14:54.861370 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb-ovsdbserver-nb\") pod \"8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb\" (UID: \"8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb\") " Nov 27 07:14:54 crc kubenswrapper[4971]: I1127 07:14:54.861495 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb-dns-svc\") pod \"8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb\" (UID: \"8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb\") " Nov 27 07:14:54 crc kubenswrapper[4971]: I1127 07:14:54.861970 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fe01235a-8eb5-49d8-afeb-32fc11bde3cc-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:54 crc kubenswrapper[4971]: I1127 07:14:54.861997 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dqhxw\" (UniqueName: \"kubernetes.io/projected/fe01235a-8eb5-49d8-afeb-32fc11bde3cc-kube-api-access-dqhxw\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:54 crc kubenswrapper[4971]: I1127 07:14:54.864960 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb-kube-api-access-86s6x" (OuterVolumeSpecName: "kube-api-access-86s6x") pod "8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb" (UID: "8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb"). InnerVolumeSpecName "kube-api-access-86s6x". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:14:54 crc kubenswrapper[4971]: I1127 07:14:54.903795 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb" (UID: "8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:14:54 crc kubenswrapper[4971]: I1127 07:14:54.912174 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb" (UID: "8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:14:54 crc kubenswrapper[4971]: I1127 07:14:54.914230 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb" (UID: "8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:14:54 crc kubenswrapper[4971]: I1127 07:14:54.919575 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb-config" (OuterVolumeSpecName: "config") pod "8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb" (UID: "8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:14:54 crc kubenswrapper[4971]: I1127 07:14:54.963634 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-86s6x\" (UniqueName: \"kubernetes.io/projected/8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb-kube-api-access-86s6x\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:54 crc kubenswrapper[4971]: I1127 07:14:54.963678 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:54 crc kubenswrapper[4971]: I1127 07:14:54.963690 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:54 crc kubenswrapper[4971]: I1127 07:14:54.963703 4971 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:54 crc kubenswrapper[4971]: I1127 07:14:54.963713 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb-config\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:55 crc kubenswrapper[4971]: I1127 07:14:55.630313 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-hcxsg" event={"ID":"201f9996-de6c-49c4-916c-78946e7d1144","Type":"ContainerStarted","Data":"e5320499a3f1a0d1809604b9208a63f4d3d044c098be5c08c36f1210c9322b8b"} Nov 27 07:14:55 crc kubenswrapper[4971]: I1127 07:14:55.633298 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c8cb8df65-n7qt6" event={"ID":"8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb","Type":"ContainerDied","Data":"5ea9966f723aeea730e49cc7a5c3b87b631d27fe0e2d13125b22912fc38e7618"} Nov 27 07:14:55 crc kubenswrapper[4971]: I1127 07:14:55.633344 4971 scope.go:117] "RemoveContainer" containerID="d089e8139b56227d8ce7e38a5d3c9d7c7321d12c9471e1e224872d3fbca70bd8" Nov 27 07:14:55 crc kubenswrapper[4971]: I1127 07:14:55.633440 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c8cb8df65-n7qt6" Nov 27 07:14:55 crc kubenswrapper[4971]: I1127 07:14:55.633666 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-1af4-account-create-update-fz7bt" Nov 27 07:14:55 crc kubenswrapper[4971]: I1127 07:14:55.668008 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-hcxsg" podStartSLOduration=2.945284127 podStartE2EDuration="8.667981993s" podCreationTimestamp="2025-11-27 07:14:47 +0000 UTC" firstStartedPulling="2025-11-27 07:14:48.739741445 +0000 UTC m=+1326.931785403" lastFinishedPulling="2025-11-27 07:14:54.462439331 +0000 UTC m=+1332.654483269" observedRunningTime="2025-11-27 07:14:55.663023353 +0000 UTC m=+1333.855067281" watchObservedRunningTime="2025-11-27 07:14:55.667981993 +0000 UTC m=+1333.860025911" Nov 27 07:14:55 crc kubenswrapper[4971]: I1127 07:14:55.682648 4971 scope.go:117] "RemoveContainer" containerID="1503ad59fe2f7a731dc4ddb7d6b70443caede2d1e9949aeb41cab028d1b729a9" Nov 27 07:14:55 crc kubenswrapper[4971]: I1127 07:14:55.730636 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c8cb8df65-n7qt6"] Nov 27 07:14:55 crc kubenswrapper[4971]: I1127 07:14:55.748264 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7c8cb8df65-n7qt6"] Nov 27 07:14:56 crc kubenswrapper[4971]: I1127 07:14:56.413416 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 07:14:56 crc kubenswrapper[4971]: I1127 07:14:56.413823 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 07:14:56 crc kubenswrapper[4971]: I1127 07:14:56.565809 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb" path="/var/lib/kubelet/pods/8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb/volumes" Nov 27 07:14:57 crc kubenswrapper[4971]: I1127 07:14:57.651450 4971 generic.go:334] "Generic (PLEG): container finished" podID="201f9996-de6c-49c4-916c-78946e7d1144" containerID="e5320499a3f1a0d1809604b9208a63f4d3d044c098be5c08c36f1210c9322b8b" exitCode=0 Nov 27 07:14:57 crc kubenswrapper[4971]: I1127 07:14:57.651495 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-hcxsg" event={"ID":"201f9996-de6c-49c4-916c-78946e7d1144","Type":"ContainerDied","Data":"e5320499a3f1a0d1809604b9208a63f4d3d044c098be5c08c36f1210c9322b8b"} Nov 27 07:14:58 crc kubenswrapper[4971]: I1127 07:14:58.981975 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-hcxsg" Nov 27 07:14:59 crc kubenswrapper[4971]: I1127 07:14:59.049124 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/201f9996-de6c-49c4-916c-78946e7d1144-combined-ca-bundle\") pod \"201f9996-de6c-49c4-916c-78946e7d1144\" (UID: \"201f9996-de6c-49c4-916c-78946e7d1144\") " Nov 27 07:14:59 crc kubenswrapper[4971]: I1127 07:14:59.049194 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qlfdj\" (UniqueName: \"kubernetes.io/projected/201f9996-de6c-49c4-916c-78946e7d1144-kube-api-access-qlfdj\") pod \"201f9996-de6c-49c4-916c-78946e7d1144\" (UID: \"201f9996-de6c-49c4-916c-78946e7d1144\") " Nov 27 07:14:59 crc kubenswrapper[4971]: I1127 07:14:59.049390 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/201f9996-de6c-49c4-916c-78946e7d1144-config-data\") pod \"201f9996-de6c-49c4-916c-78946e7d1144\" (UID: \"201f9996-de6c-49c4-916c-78946e7d1144\") " Nov 27 07:14:59 crc kubenswrapper[4971]: I1127 07:14:59.055401 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/201f9996-de6c-49c4-916c-78946e7d1144-kube-api-access-qlfdj" (OuterVolumeSpecName: "kube-api-access-qlfdj") pod "201f9996-de6c-49c4-916c-78946e7d1144" (UID: "201f9996-de6c-49c4-916c-78946e7d1144"). InnerVolumeSpecName "kube-api-access-qlfdj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:14:59 crc kubenswrapper[4971]: I1127 07:14:59.080713 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/201f9996-de6c-49c4-916c-78946e7d1144-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "201f9996-de6c-49c4-916c-78946e7d1144" (UID: "201f9996-de6c-49c4-916c-78946e7d1144"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:14:59 crc kubenswrapper[4971]: I1127 07:14:59.100478 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/201f9996-de6c-49c4-916c-78946e7d1144-config-data" (OuterVolumeSpecName: "config-data") pod "201f9996-de6c-49c4-916c-78946e7d1144" (UID: "201f9996-de6c-49c4-916c-78946e7d1144"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:14:59 crc kubenswrapper[4971]: I1127 07:14:59.152565 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/201f9996-de6c-49c4-916c-78946e7d1144-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:59 crc kubenswrapper[4971]: I1127 07:14:59.152893 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qlfdj\" (UniqueName: \"kubernetes.io/projected/201f9996-de6c-49c4-916c-78946e7d1144-kube-api-access-qlfdj\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:59 crc kubenswrapper[4971]: I1127 07:14:59.152905 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/201f9996-de6c-49c4-916c-78946e7d1144-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:14:59 crc kubenswrapper[4971]: I1127 07:14:59.668746 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-hcxsg" event={"ID":"201f9996-de6c-49c4-916c-78946e7d1144","Type":"ContainerDied","Data":"74faf445287c7b9be4294b915ad78d13caacd71f6a0b54e7c9fb121c4a479a58"} Nov 27 07:14:59 crc kubenswrapper[4971]: I1127 07:14:59.668798 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-hcxsg" Nov 27 07:14:59 crc kubenswrapper[4971]: I1127 07:14:59.668808 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="74faf445287c7b9be4294b915ad78d13caacd71f6a0b54e7c9fb121c4a479a58" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.163866 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403795-sb5ts"] Nov 27 07:15:00 crc kubenswrapper[4971]: E1127 07:15:00.164375 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb" containerName="init" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.164394 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb" containerName="init" Nov 27 07:15:00 crc kubenswrapper[4971]: E1127 07:15:00.164407 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe01235a-8eb5-49d8-afeb-32fc11bde3cc" containerName="mariadb-account-create-update" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.164415 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe01235a-8eb5-49d8-afeb-32fc11bde3cc" containerName="mariadb-account-create-update" Nov 27 07:15:00 crc kubenswrapper[4971]: E1127 07:15:00.164428 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d30c76d9-61bb-4891-bb73-c2e41fcdf0b5" containerName="mariadb-account-create-update" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.164436 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="d30c76d9-61bb-4891-bb73-c2e41fcdf0b5" containerName="mariadb-account-create-update" Nov 27 07:15:00 crc kubenswrapper[4971]: E1127 07:15:00.164452 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5dedf934-ea25-4736-9220-80efdbd3756e" containerName="mariadb-database-create" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.164459 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="5dedf934-ea25-4736-9220-80efdbd3756e" containerName="mariadb-database-create" Nov 27 07:15:00 crc kubenswrapper[4971]: E1127 07:15:00.164472 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2be44422-3e3d-4564-9d1b-0a3489a93dac" containerName="mariadb-account-create-update" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.164480 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="2be44422-3e3d-4564-9d1b-0a3489a93dac" containerName="mariadb-account-create-update" Nov 27 07:15:00 crc kubenswrapper[4971]: E1127 07:15:00.164499 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49b88fe9-5c45-423c-ab21-fbd5e2bb14c8" containerName="mariadb-database-create" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.164506 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="49b88fe9-5c45-423c-ab21-fbd5e2bb14c8" containerName="mariadb-database-create" Nov 27 07:15:00 crc kubenswrapper[4971]: E1127 07:15:00.164521 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb" containerName="dnsmasq-dns" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.164550 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb" containerName="dnsmasq-dns" Nov 27 07:15:00 crc kubenswrapper[4971]: E1127 07:15:00.164563 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffa0ba8c-c0c5-4283-84eb-427a9def8359" containerName="mariadb-database-create" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.164571 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffa0ba8c-c0c5-4283-84eb-427a9def8359" containerName="mariadb-database-create" Nov 27 07:15:00 crc kubenswrapper[4971]: E1127 07:15:00.164596 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="201f9996-de6c-49c4-916c-78946e7d1144" containerName="keystone-db-sync" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.164603 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="201f9996-de6c-49c4-916c-78946e7d1144" containerName="keystone-db-sync" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.164799 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="d30c76d9-61bb-4891-bb73-c2e41fcdf0b5" containerName="mariadb-account-create-update" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.164816 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="49b88fe9-5c45-423c-ab21-fbd5e2bb14c8" containerName="mariadb-database-create" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.164827 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="5dedf934-ea25-4736-9220-80efdbd3756e" containerName="mariadb-database-create" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.164845 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="2be44422-3e3d-4564-9d1b-0a3489a93dac" containerName="mariadb-account-create-update" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.164861 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe01235a-8eb5-49d8-afeb-32fc11bde3cc" containerName="mariadb-account-create-update" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.164869 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="201f9996-de6c-49c4-916c-78946e7d1144" containerName="keystone-db-sync" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.164882 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e9e2347-7f0c-4e7e-9c84-a9afaf885bdb" containerName="dnsmasq-dns" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.164890 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="ffa0ba8c-c0c5-4283-84eb-427a9def8359" containerName="mariadb-database-create" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.165573 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403795-sb5ts" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.171427 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.171827 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.175594 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403795-sb5ts"] Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.271623 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7gbcc\" (UniqueName: \"kubernetes.io/projected/8111ff30-6af9-4f0c-95cf-6dc2460ff4bb-kube-api-access-7gbcc\") pod \"collect-profiles-29403795-sb5ts\" (UID: \"8111ff30-6af9-4f0c-95cf-6dc2460ff4bb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403795-sb5ts" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.271891 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8111ff30-6af9-4f0c-95cf-6dc2460ff4bb-secret-volume\") pod \"collect-profiles-29403795-sb5ts\" (UID: \"8111ff30-6af9-4f0c-95cf-6dc2460ff4bb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403795-sb5ts" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.271996 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-dfcfh"] Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.272257 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8111ff30-6af9-4f0c-95cf-6dc2460ff4bb-config-volume\") pod \"collect-profiles-29403795-sb5ts\" (UID: \"8111ff30-6af9-4f0c-95cf-6dc2460ff4bb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403795-sb5ts" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.273378 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dfcfh" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.280107 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.280551 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-gcnm2" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.280843 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.281049 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.287214 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.306641 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-85cf5c47c-97mcd"] Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.308614 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85cf5c47c-97mcd" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.312862 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-dfcfh"] Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.334076 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-85cf5c47c-97mcd"] Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.373800 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4b06d68-ae10-4c05-a95f-f84e6ba3a038-scripts\") pod \"keystone-bootstrap-dfcfh\" (UID: \"b4b06d68-ae10-4c05-a95f-f84e6ba3a038\") " pod="openstack/keystone-bootstrap-dfcfh" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.373865 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aae7be1b-5928-4e63-9fc0-f439a1994ce1-config\") pod \"dnsmasq-dns-85cf5c47c-97mcd\" (UID: \"aae7be1b-5928-4e63-9fc0-f439a1994ce1\") " pod="openstack/dnsmasq-dns-85cf5c47c-97mcd" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.373901 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8111ff30-6af9-4f0c-95cf-6dc2460ff4bb-config-volume\") pod \"collect-profiles-29403795-sb5ts\" (UID: \"8111ff30-6af9-4f0c-95cf-6dc2460ff4bb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403795-sb5ts" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.373938 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/aae7be1b-5928-4e63-9fc0-f439a1994ce1-dns-swift-storage-0\") pod \"dnsmasq-dns-85cf5c47c-97mcd\" (UID: \"aae7be1b-5928-4e63-9fc0-f439a1994ce1\") " pod="openstack/dnsmasq-dns-85cf5c47c-97mcd" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.373966 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/aae7be1b-5928-4e63-9fc0-f439a1994ce1-dns-svc\") pod \"dnsmasq-dns-85cf5c47c-97mcd\" (UID: \"aae7be1b-5928-4e63-9fc0-f439a1994ce1\") " pod="openstack/dnsmasq-dns-85cf5c47c-97mcd" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.373998 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/aae7be1b-5928-4e63-9fc0-f439a1994ce1-ovsdbserver-sb\") pod \"dnsmasq-dns-85cf5c47c-97mcd\" (UID: \"aae7be1b-5928-4e63-9fc0-f439a1994ce1\") " pod="openstack/dnsmasq-dns-85cf5c47c-97mcd" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.374018 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b4b06d68-ae10-4c05-a95f-f84e6ba3a038-fernet-keys\") pod \"keystone-bootstrap-dfcfh\" (UID: \"b4b06d68-ae10-4c05-a95f-f84e6ba3a038\") " pod="openstack/keystone-bootstrap-dfcfh" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.374041 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2l8lm\" (UniqueName: \"kubernetes.io/projected/aae7be1b-5928-4e63-9fc0-f439a1994ce1-kube-api-access-2l8lm\") pod \"dnsmasq-dns-85cf5c47c-97mcd\" (UID: \"aae7be1b-5928-4e63-9fc0-f439a1994ce1\") " pod="openstack/dnsmasq-dns-85cf5c47c-97mcd" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.374067 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4b06d68-ae10-4c05-a95f-f84e6ba3a038-combined-ca-bundle\") pod \"keystone-bootstrap-dfcfh\" (UID: \"b4b06d68-ae10-4c05-a95f-f84e6ba3a038\") " pod="openstack/keystone-bootstrap-dfcfh" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.374087 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4b06d68-ae10-4c05-a95f-f84e6ba3a038-config-data\") pod \"keystone-bootstrap-dfcfh\" (UID: \"b4b06d68-ae10-4c05-a95f-f84e6ba3a038\") " pod="openstack/keystone-bootstrap-dfcfh" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.374117 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b4b06d68-ae10-4c05-a95f-f84e6ba3a038-credential-keys\") pod \"keystone-bootstrap-dfcfh\" (UID: \"b4b06d68-ae10-4c05-a95f-f84e6ba3a038\") " pod="openstack/keystone-bootstrap-dfcfh" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.374139 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7gbcc\" (UniqueName: \"kubernetes.io/projected/8111ff30-6af9-4f0c-95cf-6dc2460ff4bb-kube-api-access-7gbcc\") pod \"collect-profiles-29403795-sb5ts\" (UID: \"8111ff30-6af9-4f0c-95cf-6dc2460ff4bb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403795-sb5ts" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.374161 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/aae7be1b-5928-4e63-9fc0-f439a1994ce1-ovsdbserver-nb\") pod \"dnsmasq-dns-85cf5c47c-97mcd\" (UID: \"aae7be1b-5928-4e63-9fc0-f439a1994ce1\") " pod="openstack/dnsmasq-dns-85cf5c47c-97mcd" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.374186 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvdbd\" (UniqueName: \"kubernetes.io/projected/b4b06d68-ae10-4c05-a95f-f84e6ba3a038-kube-api-access-zvdbd\") pod \"keystone-bootstrap-dfcfh\" (UID: \"b4b06d68-ae10-4c05-a95f-f84e6ba3a038\") " pod="openstack/keystone-bootstrap-dfcfh" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.374202 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8111ff30-6af9-4f0c-95cf-6dc2460ff4bb-secret-volume\") pod \"collect-profiles-29403795-sb5ts\" (UID: \"8111ff30-6af9-4f0c-95cf-6dc2460ff4bb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403795-sb5ts" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.376035 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8111ff30-6af9-4f0c-95cf-6dc2460ff4bb-config-volume\") pod \"collect-profiles-29403795-sb5ts\" (UID: \"8111ff30-6af9-4f0c-95cf-6dc2460ff4bb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403795-sb5ts" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.399781 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7gbcc\" (UniqueName: \"kubernetes.io/projected/8111ff30-6af9-4f0c-95cf-6dc2460ff4bb-kube-api-access-7gbcc\") pod \"collect-profiles-29403795-sb5ts\" (UID: \"8111ff30-6af9-4f0c-95cf-6dc2460ff4bb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403795-sb5ts" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.412919 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8111ff30-6af9-4f0c-95cf-6dc2460ff4bb-secret-volume\") pod \"collect-profiles-29403795-sb5ts\" (UID: \"8111ff30-6af9-4f0c-95cf-6dc2460ff4bb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403795-sb5ts" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.476168 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2l8lm\" (UniqueName: \"kubernetes.io/projected/aae7be1b-5928-4e63-9fc0-f439a1994ce1-kube-api-access-2l8lm\") pod \"dnsmasq-dns-85cf5c47c-97mcd\" (UID: \"aae7be1b-5928-4e63-9fc0-f439a1994ce1\") " pod="openstack/dnsmasq-dns-85cf5c47c-97mcd" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.476231 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4b06d68-ae10-4c05-a95f-f84e6ba3a038-combined-ca-bundle\") pod \"keystone-bootstrap-dfcfh\" (UID: \"b4b06d68-ae10-4c05-a95f-f84e6ba3a038\") " pod="openstack/keystone-bootstrap-dfcfh" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.476256 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4b06d68-ae10-4c05-a95f-f84e6ba3a038-config-data\") pod \"keystone-bootstrap-dfcfh\" (UID: \"b4b06d68-ae10-4c05-a95f-f84e6ba3a038\") " pod="openstack/keystone-bootstrap-dfcfh" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.476287 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b4b06d68-ae10-4c05-a95f-f84e6ba3a038-credential-keys\") pod \"keystone-bootstrap-dfcfh\" (UID: \"b4b06d68-ae10-4c05-a95f-f84e6ba3a038\") " pod="openstack/keystone-bootstrap-dfcfh" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.476313 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/aae7be1b-5928-4e63-9fc0-f439a1994ce1-ovsdbserver-nb\") pod \"dnsmasq-dns-85cf5c47c-97mcd\" (UID: \"aae7be1b-5928-4e63-9fc0-f439a1994ce1\") " pod="openstack/dnsmasq-dns-85cf5c47c-97mcd" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.476337 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvdbd\" (UniqueName: \"kubernetes.io/projected/b4b06d68-ae10-4c05-a95f-f84e6ba3a038-kube-api-access-zvdbd\") pod \"keystone-bootstrap-dfcfh\" (UID: \"b4b06d68-ae10-4c05-a95f-f84e6ba3a038\") " pod="openstack/keystone-bootstrap-dfcfh" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.476428 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4b06d68-ae10-4c05-a95f-f84e6ba3a038-scripts\") pod \"keystone-bootstrap-dfcfh\" (UID: \"b4b06d68-ae10-4c05-a95f-f84e6ba3a038\") " pod="openstack/keystone-bootstrap-dfcfh" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.476471 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aae7be1b-5928-4e63-9fc0-f439a1994ce1-config\") pod \"dnsmasq-dns-85cf5c47c-97mcd\" (UID: \"aae7be1b-5928-4e63-9fc0-f439a1994ce1\") " pod="openstack/dnsmasq-dns-85cf5c47c-97mcd" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.476517 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/aae7be1b-5928-4e63-9fc0-f439a1994ce1-dns-swift-storage-0\") pod \"dnsmasq-dns-85cf5c47c-97mcd\" (UID: \"aae7be1b-5928-4e63-9fc0-f439a1994ce1\") " pod="openstack/dnsmasq-dns-85cf5c47c-97mcd" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.476565 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/aae7be1b-5928-4e63-9fc0-f439a1994ce1-dns-svc\") pod \"dnsmasq-dns-85cf5c47c-97mcd\" (UID: \"aae7be1b-5928-4e63-9fc0-f439a1994ce1\") " pod="openstack/dnsmasq-dns-85cf5c47c-97mcd" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.476796 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/aae7be1b-5928-4e63-9fc0-f439a1994ce1-ovsdbserver-sb\") pod \"dnsmasq-dns-85cf5c47c-97mcd\" (UID: \"aae7be1b-5928-4e63-9fc0-f439a1994ce1\") " pod="openstack/dnsmasq-dns-85cf5c47c-97mcd" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.476817 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b4b06d68-ae10-4c05-a95f-f84e6ba3a038-fernet-keys\") pod \"keystone-bootstrap-dfcfh\" (UID: \"b4b06d68-ae10-4c05-a95f-f84e6ba3a038\") " pod="openstack/keystone-bootstrap-dfcfh" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.481180 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/aae7be1b-5928-4e63-9fc0-f439a1994ce1-dns-swift-storage-0\") pod \"dnsmasq-dns-85cf5c47c-97mcd\" (UID: \"aae7be1b-5928-4e63-9fc0-f439a1994ce1\") " pod="openstack/dnsmasq-dns-85cf5c47c-97mcd" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.481643 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aae7be1b-5928-4e63-9fc0-f439a1994ce1-config\") pod \"dnsmasq-dns-85cf5c47c-97mcd\" (UID: \"aae7be1b-5928-4e63-9fc0-f439a1994ce1\") " pod="openstack/dnsmasq-dns-85cf5c47c-97mcd" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.482352 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/aae7be1b-5928-4e63-9fc0-f439a1994ce1-dns-svc\") pod \"dnsmasq-dns-85cf5c47c-97mcd\" (UID: \"aae7be1b-5928-4e63-9fc0-f439a1994ce1\") " pod="openstack/dnsmasq-dns-85cf5c47c-97mcd" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.484243 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4b06d68-ae10-4c05-a95f-f84e6ba3a038-combined-ca-bundle\") pod \"keystone-bootstrap-dfcfh\" (UID: \"b4b06d68-ae10-4c05-a95f-f84e6ba3a038\") " pod="openstack/keystone-bootstrap-dfcfh" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.485016 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403795-sb5ts" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.485312 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4b06d68-ae10-4c05-a95f-f84e6ba3a038-config-data\") pod \"keystone-bootstrap-dfcfh\" (UID: \"b4b06d68-ae10-4c05-a95f-f84e6ba3a038\") " pod="openstack/keystone-bootstrap-dfcfh" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.486208 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4b06d68-ae10-4c05-a95f-f84e6ba3a038-scripts\") pod \"keystone-bootstrap-dfcfh\" (UID: \"b4b06d68-ae10-4c05-a95f-f84e6ba3a038\") " pod="openstack/keystone-bootstrap-dfcfh" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.486428 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/aae7be1b-5928-4e63-9fc0-f439a1994ce1-ovsdbserver-nb\") pod \"dnsmasq-dns-85cf5c47c-97mcd\" (UID: \"aae7be1b-5928-4e63-9fc0-f439a1994ce1\") " pod="openstack/dnsmasq-dns-85cf5c47c-97mcd" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.487317 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b4b06d68-ae10-4c05-a95f-f84e6ba3a038-credential-keys\") pod \"keystone-bootstrap-dfcfh\" (UID: \"b4b06d68-ae10-4c05-a95f-f84e6ba3a038\") " pod="openstack/keystone-bootstrap-dfcfh" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.495524 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/aae7be1b-5928-4e63-9fc0-f439a1994ce1-ovsdbserver-sb\") pod \"dnsmasq-dns-85cf5c47c-97mcd\" (UID: \"aae7be1b-5928-4e63-9fc0-f439a1994ce1\") " pod="openstack/dnsmasq-dns-85cf5c47c-97mcd" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.496475 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b4b06d68-ae10-4c05-a95f-f84e6ba3a038-fernet-keys\") pod \"keystone-bootstrap-dfcfh\" (UID: \"b4b06d68-ae10-4c05-a95f-f84e6ba3a038\") " pod="openstack/keystone-bootstrap-dfcfh" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.521732 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2l8lm\" (UniqueName: \"kubernetes.io/projected/aae7be1b-5928-4e63-9fc0-f439a1994ce1-kube-api-access-2l8lm\") pod \"dnsmasq-dns-85cf5c47c-97mcd\" (UID: \"aae7be1b-5928-4e63-9fc0-f439a1994ce1\") " pod="openstack/dnsmasq-dns-85cf5c47c-97mcd" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.560599 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvdbd\" (UniqueName: \"kubernetes.io/projected/b4b06d68-ae10-4c05-a95f-f84e6ba3a038-kube-api-access-zvdbd\") pod \"keystone-bootstrap-dfcfh\" (UID: \"b4b06d68-ae10-4c05-a95f-f84e6ba3a038\") " pod="openstack/keystone-bootstrap-dfcfh" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.612141 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dfcfh" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.616030 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-wsg7b"] Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.617457 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-wsg7b" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.628767 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85cf5c47c-97mcd" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.647612 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.647808 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-5qpx5" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.647886 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-wsg7b"] Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.647934 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.694633 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/58ccbc84-3120-4e10-a215-07d41267629d-config\") pod \"neutron-db-sync-wsg7b\" (UID: \"58ccbc84-3120-4e10-a215-07d41267629d\") " pod="openstack/neutron-db-sync-wsg7b" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.694807 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c76dw\" (UniqueName: \"kubernetes.io/projected/58ccbc84-3120-4e10-a215-07d41267629d-kube-api-access-c76dw\") pod \"neutron-db-sync-wsg7b\" (UID: \"58ccbc84-3120-4e10-a215-07d41267629d\") " pod="openstack/neutron-db-sync-wsg7b" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.694881 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58ccbc84-3120-4e10-a215-07d41267629d-combined-ca-bundle\") pod \"neutron-db-sync-wsg7b\" (UID: \"58ccbc84-3120-4e10-a215-07d41267629d\") " pod="openstack/neutron-db-sync-wsg7b" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.696749 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-th4ww"] Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.710328 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-th4ww" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.723223 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-7v5sc" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.723550 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.723764 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.754174 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-th4ww"] Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.807476 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd83d353-e492-4119-8890-77569c78bed2-config-data\") pod \"cinder-db-sync-th4ww\" (UID: \"cd83d353-e492-4119-8890-77569c78bed2\") " pod="openstack/cinder-db-sync-th4ww" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.807567 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd83d353-e492-4119-8890-77569c78bed2-combined-ca-bundle\") pod \"cinder-db-sync-th4ww\" (UID: \"cd83d353-e492-4119-8890-77569c78bed2\") " pod="openstack/cinder-db-sync-th4ww" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.807619 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c76dw\" (UniqueName: \"kubernetes.io/projected/58ccbc84-3120-4e10-a215-07d41267629d-kube-api-access-c76dw\") pod \"neutron-db-sync-wsg7b\" (UID: \"58ccbc84-3120-4e10-a215-07d41267629d\") " pod="openstack/neutron-db-sync-wsg7b" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.807656 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58ccbc84-3120-4e10-a215-07d41267629d-combined-ca-bundle\") pod \"neutron-db-sync-wsg7b\" (UID: \"58ccbc84-3120-4e10-a215-07d41267629d\") " pod="openstack/neutron-db-sync-wsg7b" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.807689 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/cd83d353-e492-4119-8890-77569c78bed2-db-sync-config-data\") pod \"cinder-db-sync-th4ww\" (UID: \"cd83d353-e492-4119-8890-77569c78bed2\") " pod="openstack/cinder-db-sync-th4ww" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.807723 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cd83d353-e492-4119-8890-77569c78bed2-etc-machine-id\") pod \"cinder-db-sync-th4ww\" (UID: \"cd83d353-e492-4119-8890-77569c78bed2\") " pod="openstack/cinder-db-sync-th4ww" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.807771 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/58ccbc84-3120-4e10-a215-07d41267629d-config\") pod \"neutron-db-sync-wsg7b\" (UID: \"58ccbc84-3120-4e10-a215-07d41267629d\") " pod="openstack/neutron-db-sync-wsg7b" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.807805 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gt7fq\" (UniqueName: \"kubernetes.io/projected/cd83d353-e492-4119-8890-77569c78bed2-kube-api-access-gt7fq\") pod \"cinder-db-sync-th4ww\" (UID: \"cd83d353-e492-4119-8890-77569c78bed2\") " pod="openstack/cinder-db-sync-th4ww" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.807865 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd83d353-e492-4119-8890-77569c78bed2-scripts\") pod \"cinder-db-sync-th4ww\" (UID: \"cd83d353-e492-4119-8890-77569c78bed2\") " pod="openstack/cinder-db-sync-th4ww" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.831418 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58ccbc84-3120-4e10-a215-07d41267629d-combined-ca-bundle\") pod \"neutron-db-sync-wsg7b\" (UID: \"58ccbc84-3120-4e10-a215-07d41267629d\") " pod="openstack/neutron-db-sync-wsg7b" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.831525 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-xpjjx"] Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.843602 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/58ccbc84-3120-4e10-a215-07d41267629d-config\") pod \"neutron-db-sync-wsg7b\" (UID: \"58ccbc84-3120-4e10-a215-07d41267629d\") " pod="openstack/neutron-db-sync-wsg7b" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.844302 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-xpjjx" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.856411 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-2njps" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.856846 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.857694 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c76dw\" (UniqueName: \"kubernetes.io/projected/58ccbc84-3120-4e10-a215-07d41267629d-kube-api-access-c76dw\") pod \"neutron-db-sync-wsg7b\" (UID: \"58ccbc84-3120-4e10-a215-07d41267629d\") " pod="openstack/neutron-db-sync-wsg7b" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.918002 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/cd83d353-e492-4119-8890-77569c78bed2-db-sync-config-data\") pod \"cinder-db-sync-th4ww\" (UID: \"cd83d353-e492-4119-8890-77569c78bed2\") " pod="openstack/cinder-db-sync-th4ww" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.918050 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cd83d353-e492-4119-8890-77569c78bed2-etc-machine-id\") pod \"cinder-db-sync-th4ww\" (UID: \"cd83d353-e492-4119-8890-77569c78bed2\") " pod="openstack/cinder-db-sync-th4ww" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.918095 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpt96\" (UniqueName: \"kubernetes.io/projected/04c6fe9e-2c13-4757-b2f7-237b1b8849f7-kube-api-access-fpt96\") pod \"barbican-db-sync-xpjjx\" (UID: \"04c6fe9e-2c13-4757-b2f7-237b1b8849f7\") " pod="openstack/barbican-db-sync-xpjjx" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.918122 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/04c6fe9e-2c13-4757-b2f7-237b1b8849f7-db-sync-config-data\") pod \"barbican-db-sync-xpjjx\" (UID: \"04c6fe9e-2c13-4757-b2f7-237b1b8849f7\") " pod="openstack/barbican-db-sync-xpjjx" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.918145 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gt7fq\" (UniqueName: \"kubernetes.io/projected/cd83d353-e492-4119-8890-77569c78bed2-kube-api-access-gt7fq\") pod \"cinder-db-sync-th4ww\" (UID: \"cd83d353-e492-4119-8890-77569c78bed2\") " pod="openstack/cinder-db-sync-th4ww" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.918153 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cd83d353-e492-4119-8890-77569c78bed2-etc-machine-id\") pod \"cinder-db-sync-th4ww\" (UID: \"cd83d353-e492-4119-8890-77569c78bed2\") " pod="openstack/cinder-db-sync-th4ww" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.918202 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd83d353-e492-4119-8890-77569c78bed2-scripts\") pod \"cinder-db-sync-th4ww\" (UID: \"cd83d353-e492-4119-8890-77569c78bed2\") " pod="openstack/cinder-db-sync-th4ww" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.918239 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04c6fe9e-2c13-4757-b2f7-237b1b8849f7-combined-ca-bundle\") pod \"barbican-db-sync-xpjjx\" (UID: \"04c6fe9e-2c13-4757-b2f7-237b1b8849f7\") " pod="openstack/barbican-db-sync-xpjjx" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.918283 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd83d353-e492-4119-8890-77569c78bed2-config-data\") pod \"cinder-db-sync-th4ww\" (UID: \"cd83d353-e492-4119-8890-77569c78bed2\") " pod="openstack/cinder-db-sync-th4ww" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.918311 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd83d353-e492-4119-8890-77569c78bed2-combined-ca-bundle\") pod \"cinder-db-sync-th4ww\" (UID: \"cd83d353-e492-4119-8890-77569c78bed2\") " pod="openstack/cinder-db-sync-th4ww" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.927553 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/cd83d353-e492-4119-8890-77569c78bed2-db-sync-config-data\") pod \"cinder-db-sync-th4ww\" (UID: \"cd83d353-e492-4119-8890-77569c78bed2\") " pod="openstack/cinder-db-sync-th4ww" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.941104 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd83d353-e492-4119-8890-77569c78bed2-scripts\") pod \"cinder-db-sync-th4ww\" (UID: \"cd83d353-e492-4119-8890-77569c78bed2\") " pod="openstack/cinder-db-sync-th4ww" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.943976 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd83d353-e492-4119-8890-77569c78bed2-config-data\") pod \"cinder-db-sync-th4ww\" (UID: \"cd83d353-e492-4119-8890-77569c78bed2\") " pod="openstack/cinder-db-sync-th4ww" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.960569 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd83d353-e492-4119-8890-77569c78bed2-combined-ca-bundle\") pod \"cinder-db-sync-th4ww\" (UID: \"cd83d353-e492-4119-8890-77569c78bed2\") " pod="openstack/cinder-db-sync-th4ww" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.965604 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-xpjjx"] Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.972504 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gt7fq\" (UniqueName: \"kubernetes.io/projected/cd83d353-e492-4119-8890-77569c78bed2-kube-api-access-gt7fq\") pod \"cinder-db-sync-th4ww\" (UID: \"cd83d353-e492-4119-8890-77569c78bed2\") " pod="openstack/cinder-db-sync-th4ww" Nov 27 07:15:00 crc kubenswrapper[4971]: I1127 07:15:00.989438 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-wsg7b" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.026522 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpt96\" (UniqueName: \"kubernetes.io/projected/04c6fe9e-2c13-4757-b2f7-237b1b8849f7-kube-api-access-fpt96\") pod \"barbican-db-sync-xpjjx\" (UID: \"04c6fe9e-2c13-4757-b2f7-237b1b8849f7\") " pod="openstack/barbican-db-sync-xpjjx" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.026579 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/04c6fe9e-2c13-4757-b2f7-237b1b8849f7-db-sync-config-data\") pod \"barbican-db-sync-xpjjx\" (UID: \"04c6fe9e-2c13-4757-b2f7-237b1b8849f7\") " pod="openstack/barbican-db-sync-xpjjx" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.026634 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04c6fe9e-2c13-4757-b2f7-237b1b8849f7-combined-ca-bundle\") pod \"barbican-db-sync-xpjjx\" (UID: \"04c6fe9e-2c13-4757-b2f7-237b1b8849f7\") " pod="openstack/barbican-db-sync-xpjjx" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.040603 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-85cf5c47c-97mcd"] Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.073019 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-th4ww" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.095749 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04c6fe9e-2c13-4757-b2f7-237b1b8849f7-combined-ca-bundle\") pod \"barbican-db-sync-xpjjx\" (UID: \"04c6fe9e-2c13-4757-b2f7-237b1b8849f7\") " pod="openstack/barbican-db-sync-xpjjx" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.101309 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/04c6fe9e-2c13-4757-b2f7-237b1b8849f7-db-sync-config-data\") pod \"barbican-db-sync-xpjjx\" (UID: \"04c6fe9e-2c13-4757-b2f7-237b1b8849f7\") " pod="openstack/barbican-db-sync-xpjjx" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.109199 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpt96\" (UniqueName: \"kubernetes.io/projected/04c6fe9e-2c13-4757-b2f7-237b1b8849f7-kube-api-access-fpt96\") pod \"barbican-db-sync-xpjjx\" (UID: \"04c6fe9e-2c13-4757-b2f7-237b1b8849f7\") " pod="openstack/barbican-db-sync-xpjjx" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.151704 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-kv5bz"] Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.153244 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-kv5bz" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.161215 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.161655 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-74s52" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.163327 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.188444 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.192763 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.198581 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.198815 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.218960 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-kv5bz"] Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.229645 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.234692 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ncglj\" (UniqueName: \"kubernetes.io/projected/b1ab1943-4c64-4083-8947-aa821a7298d2-kube-api-access-ncglj\") pod \"ceilometer-0\" (UID: \"b1ab1943-4c64-4083-8947-aa821a7298d2\") " pod="openstack/ceilometer-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.234751 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1ab1943-4c64-4083-8947-aa821a7298d2-config-data\") pod \"ceilometer-0\" (UID: \"b1ab1943-4c64-4083-8947-aa821a7298d2\") " pod="openstack/ceilometer-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.234789 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1098ae71-b794-4670-af66-67bd17375e2c-scripts\") pod \"placement-db-sync-kv5bz\" (UID: \"1098ae71-b794-4670-af66-67bd17375e2c\") " pod="openstack/placement-db-sync-kv5bz" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.234814 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1ab1943-4c64-4083-8947-aa821a7298d2-scripts\") pod \"ceilometer-0\" (UID: \"b1ab1943-4c64-4083-8947-aa821a7298d2\") " pod="openstack/ceilometer-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.234853 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9n2wm\" (UniqueName: \"kubernetes.io/projected/1098ae71-b794-4670-af66-67bd17375e2c-kube-api-access-9n2wm\") pod \"placement-db-sync-kv5bz\" (UID: \"1098ae71-b794-4670-af66-67bd17375e2c\") " pod="openstack/placement-db-sync-kv5bz" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.234901 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1098ae71-b794-4670-af66-67bd17375e2c-config-data\") pod \"placement-db-sync-kv5bz\" (UID: \"1098ae71-b794-4670-af66-67bd17375e2c\") " pod="openstack/placement-db-sync-kv5bz" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.234939 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b1ab1943-4c64-4083-8947-aa821a7298d2-run-httpd\") pod \"ceilometer-0\" (UID: \"b1ab1943-4c64-4083-8947-aa821a7298d2\") " pod="openstack/ceilometer-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.234963 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1098ae71-b794-4670-af66-67bd17375e2c-logs\") pod \"placement-db-sync-kv5bz\" (UID: \"1098ae71-b794-4670-af66-67bd17375e2c\") " pod="openstack/placement-db-sync-kv5bz" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.234997 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b1ab1943-4c64-4083-8947-aa821a7298d2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b1ab1943-4c64-4083-8947-aa821a7298d2\") " pod="openstack/ceilometer-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.235023 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b1ab1943-4c64-4083-8947-aa821a7298d2-log-httpd\") pod \"ceilometer-0\" (UID: \"b1ab1943-4c64-4083-8947-aa821a7298d2\") " pod="openstack/ceilometer-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.235052 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1098ae71-b794-4670-af66-67bd17375e2c-combined-ca-bundle\") pod \"placement-db-sync-kv5bz\" (UID: \"1098ae71-b794-4670-af66-67bd17375e2c\") " pod="openstack/placement-db-sync-kv5bz" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.235110 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1ab1943-4c64-4083-8947-aa821a7298d2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b1ab1943-4c64-4083-8947-aa821a7298d2\") " pod="openstack/ceilometer-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.239471 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b97f654c9-9p9vb"] Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.243088 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b97f654c9-9p9vb" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.252424 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b97f654c9-9p9vb"] Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.267041 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-xpjjx" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.339328 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b1ab1943-4c64-4083-8947-aa821a7298d2-run-httpd\") pod \"ceilometer-0\" (UID: \"b1ab1943-4c64-4083-8947-aa821a7298d2\") " pod="openstack/ceilometer-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.339605 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1098ae71-b794-4670-af66-67bd17375e2c-logs\") pod \"placement-db-sync-kv5bz\" (UID: \"1098ae71-b794-4670-af66-67bd17375e2c\") " pod="openstack/placement-db-sync-kv5bz" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.339668 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/521d9ffc-606c-4aca-a94d-8a667ce541d3-dns-swift-storage-0\") pod \"dnsmasq-dns-b97f654c9-9p9vb\" (UID: \"521d9ffc-606c-4aca-a94d-8a667ce541d3\") " pod="openstack/dnsmasq-dns-b97f654c9-9p9vb" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.339701 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/521d9ffc-606c-4aca-a94d-8a667ce541d3-config\") pod \"dnsmasq-dns-b97f654c9-9p9vb\" (UID: \"521d9ffc-606c-4aca-a94d-8a667ce541d3\") " pod="openstack/dnsmasq-dns-b97f654c9-9p9vb" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.339763 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b1ab1943-4c64-4083-8947-aa821a7298d2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b1ab1943-4c64-4083-8947-aa821a7298d2\") " pod="openstack/ceilometer-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.339808 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b1ab1943-4c64-4083-8947-aa821a7298d2-log-httpd\") pod \"ceilometer-0\" (UID: \"b1ab1943-4c64-4083-8947-aa821a7298d2\") " pod="openstack/ceilometer-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.339834 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lcwzk\" (UniqueName: \"kubernetes.io/projected/521d9ffc-606c-4aca-a94d-8a667ce541d3-kube-api-access-lcwzk\") pod \"dnsmasq-dns-b97f654c9-9p9vb\" (UID: \"521d9ffc-606c-4aca-a94d-8a667ce541d3\") " pod="openstack/dnsmasq-dns-b97f654c9-9p9vb" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.339881 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b1ab1943-4c64-4083-8947-aa821a7298d2-run-httpd\") pod \"ceilometer-0\" (UID: \"b1ab1943-4c64-4083-8947-aa821a7298d2\") " pod="openstack/ceilometer-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.339892 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1098ae71-b794-4670-af66-67bd17375e2c-combined-ca-bundle\") pod \"placement-db-sync-kv5bz\" (UID: \"1098ae71-b794-4670-af66-67bd17375e2c\") " pod="openstack/placement-db-sync-kv5bz" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.339936 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/521d9ffc-606c-4aca-a94d-8a667ce541d3-ovsdbserver-nb\") pod \"dnsmasq-dns-b97f654c9-9p9vb\" (UID: \"521d9ffc-606c-4aca-a94d-8a667ce541d3\") " pod="openstack/dnsmasq-dns-b97f654c9-9p9vb" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.340008 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1ab1943-4c64-4083-8947-aa821a7298d2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b1ab1943-4c64-4083-8947-aa821a7298d2\") " pod="openstack/ceilometer-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.340051 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ncglj\" (UniqueName: \"kubernetes.io/projected/b1ab1943-4c64-4083-8947-aa821a7298d2-kube-api-access-ncglj\") pod \"ceilometer-0\" (UID: \"b1ab1943-4c64-4083-8947-aa821a7298d2\") " pod="openstack/ceilometer-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.340086 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1ab1943-4c64-4083-8947-aa821a7298d2-config-data\") pod \"ceilometer-0\" (UID: \"b1ab1943-4c64-4083-8947-aa821a7298d2\") " pod="openstack/ceilometer-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.340113 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1098ae71-b794-4670-af66-67bd17375e2c-scripts\") pod \"placement-db-sync-kv5bz\" (UID: \"1098ae71-b794-4670-af66-67bd17375e2c\") " pod="openstack/placement-db-sync-kv5bz" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.340131 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/521d9ffc-606c-4aca-a94d-8a667ce541d3-dns-svc\") pod \"dnsmasq-dns-b97f654c9-9p9vb\" (UID: \"521d9ffc-606c-4aca-a94d-8a667ce541d3\") " pod="openstack/dnsmasq-dns-b97f654c9-9p9vb" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.340148 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1ab1943-4c64-4083-8947-aa821a7298d2-scripts\") pod \"ceilometer-0\" (UID: \"b1ab1943-4c64-4083-8947-aa821a7298d2\") " pod="openstack/ceilometer-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.340177 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9n2wm\" (UniqueName: \"kubernetes.io/projected/1098ae71-b794-4670-af66-67bd17375e2c-kube-api-access-9n2wm\") pod \"placement-db-sync-kv5bz\" (UID: \"1098ae71-b794-4670-af66-67bd17375e2c\") " pod="openstack/placement-db-sync-kv5bz" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.340215 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1098ae71-b794-4670-af66-67bd17375e2c-config-data\") pod \"placement-db-sync-kv5bz\" (UID: \"1098ae71-b794-4670-af66-67bd17375e2c\") " pod="openstack/placement-db-sync-kv5bz" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.340241 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/521d9ffc-606c-4aca-a94d-8a667ce541d3-ovsdbserver-sb\") pod \"dnsmasq-dns-b97f654c9-9p9vb\" (UID: \"521d9ffc-606c-4aca-a94d-8a667ce541d3\") " pod="openstack/dnsmasq-dns-b97f654c9-9p9vb" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.340241 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b1ab1943-4c64-4083-8947-aa821a7298d2-log-httpd\") pod \"ceilometer-0\" (UID: \"b1ab1943-4c64-4083-8947-aa821a7298d2\") " pod="openstack/ceilometer-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.340962 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1098ae71-b794-4670-af66-67bd17375e2c-logs\") pod \"placement-db-sync-kv5bz\" (UID: \"1098ae71-b794-4670-af66-67bd17375e2c\") " pod="openstack/placement-db-sync-kv5bz" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.352847 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1ab1943-4c64-4083-8947-aa821a7298d2-scripts\") pod \"ceilometer-0\" (UID: \"b1ab1943-4c64-4083-8947-aa821a7298d2\") " pod="openstack/ceilometer-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.359084 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1098ae71-b794-4670-af66-67bd17375e2c-scripts\") pod \"placement-db-sync-kv5bz\" (UID: \"1098ae71-b794-4670-af66-67bd17375e2c\") " pod="openstack/placement-db-sync-kv5bz" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.360162 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1ab1943-4c64-4083-8947-aa821a7298d2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b1ab1943-4c64-4083-8947-aa821a7298d2\") " pod="openstack/ceilometer-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.361295 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b1ab1943-4c64-4083-8947-aa821a7298d2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b1ab1943-4c64-4083-8947-aa821a7298d2\") " pod="openstack/ceilometer-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.363361 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ncglj\" (UniqueName: \"kubernetes.io/projected/b1ab1943-4c64-4083-8947-aa821a7298d2-kube-api-access-ncglj\") pod \"ceilometer-0\" (UID: \"b1ab1943-4c64-4083-8947-aa821a7298d2\") " pod="openstack/ceilometer-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.373567 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1098ae71-b794-4670-af66-67bd17375e2c-combined-ca-bundle\") pod \"placement-db-sync-kv5bz\" (UID: \"1098ae71-b794-4670-af66-67bd17375e2c\") " pod="openstack/placement-db-sync-kv5bz" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.375025 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1ab1943-4c64-4083-8947-aa821a7298d2-config-data\") pod \"ceilometer-0\" (UID: \"b1ab1943-4c64-4083-8947-aa821a7298d2\") " pod="openstack/ceilometer-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.375277 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9n2wm\" (UniqueName: \"kubernetes.io/projected/1098ae71-b794-4670-af66-67bd17375e2c-kube-api-access-9n2wm\") pod \"placement-db-sync-kv5bz\" (UID: \"1098ae71-b794-4670-af66-67bd17375e2c\") " pod="openstack/placement-db-sync-kv5bz" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.376001 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1098ae71-b794-4670-af66-67bd17375e2c-config-data\") pod \"placement-db-sync-kv5bz\" (UID: \"1098ae71-b794-4670-af66-67bd17375e2c\") " pod="openstack/placement-db-sync-kv5bz" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.430798 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.437938 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.441476 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-tzwn4" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.441678 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.441781 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.442455 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/521d9ffc-606c-4aca-a94d-8a667ce541d3-dns-svc\") pod \"dnsmasq-dns-b97f654c9-9p9vb\" (UID: \"521d9ffc-606c-4aca-a94d-8a667ce541d3\") " pod="openstack/dnsmasq-dns-b97f654c9-9p9vb" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.442573 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/521d9ffc-606c-4aca-a94d-8a667ce541d3-ovsdbserver-sb\") pod \"dnsmasq-dns-b97f654c9-9p9vb\" (UID: \"521d9ffc-606c-4aca-a94d-8a667ce541d3\") " pod="openstack/dnsmasq-dns-b97f654c9-9p9vb" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.442606 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/521d9ffc-606c-4aca-a94d-8a667ce541d3-dns-swift-storage-0\") pod \"dnsmasq-dns-b97f654c9-9p9vb\" (UID: \"521d9ffc-606c-4aca-a94d-8a667ce541d3\") " pod="openstack/dnsmasq-dns-b97f654c9-9p9vb" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.442640 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/521d9ffc-606c-4aca-a94d-8a667ce541d3-config\") pod \"dnsmasq-dns-b97f654c9-9p9vb\" (UID: \"521d9ffc-606c-4aca-a94d-8a667ce541d3\") " pod="openstack/dnsmasq-dns-b97f654c9-9p9vb" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.442674 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lcwzk\" (UniqueName: \"kubernetes.io/projected/521d9ffc-606c-4aca-a94d-8a667ce541d3-kube-api-access-lcwzk\") pod \"dnsmasq-dns-b97f654c9-9p9vb\" (UID: \"521d9ffc-606c-4aca-a94d-8a667ce541d3\") " pod="openstack/dnsmasq-dns-b97f654c9-9p9vb" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.442723 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/521d9ffc-606c-4aca-a94d-8a667ce541d3-ovsdbserver-nb\") pod \"dnsmasq-dns-b97f654c9-9p9vb\" (UID: \"521d9ffc-606c-4aca-a94d-8a667ce541d3\") " pod="openstack/dnsmasq-dns-b97f654c9-9p9vb" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.445702 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/521d9ffc-606c-4aca-a94d-8a667ce541d3-dns-svc\") pod \"dnsmasq-dns-b97f654c9-9p9vb\" (UID: \"521d9ffc-606c-4aca-a94d-8a667ce541d3\") " pod="openstack/dnsmasq-dns-b97f654c9-9p9vb" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.447134 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/521d9ffc-606c-4aca-a94d-8a667ce541d3-dns-swift-storage-0\") pod \"dnsmasq-dns-b97f654c9-9p9vb\" (UID: \"521d9ffc-606c-4aca-a94d-8a667ce541d3\") " pod="openstack/dnsmasq-dns-b97f654c9-9p9vb" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.448147 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/521d9ffc-606c-4aca-a94d-8a667ce541d3-config\") pod \"dnsmasq-dns-b97f654c9-9p9vb\" (UID: \"521d9ffc-606c-4aca-a94d-8a667ce541d3\") " pod="openstack/dnsmasq-dns-b97f654c9-9p9vb" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.449456 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/521d9ffc-606c-4aca-a94d-8a667ce541d3-ovsdbserver-nb\") pod \"dnsmasq-dns-b97f654c9-9p9vb\" (UID: \"521d9ffc-606c-4aca-a94d-8a667ce541d3\") " pod="openstack/dnsmasq-dns-b97f654c9-9p9vb" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.449824 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/521d9ffc-606c-4aca-a94d-8a667ce541d3-ovsdbserver-sb\") pod \"dnsmasq-dns-b97f654c9-9p9vb\" (UID: \"521d9ffc-606c-4aca-a94d-8a667ce541d3\") " pod="openstack/dnsmasq-dns-b97f654c9-9p9vb" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.452363 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.464637 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.488730 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lcwzk\" (UniqueName: \"kubernetes.io/projected/521d9ffc-606c-4aca-a94d-8a667ce541d3-kube-api-access-lcwzk\") pod \"dnsmasq-dns-b97f654c9-9p9vb\" (UID: \"521d9ffc-606c-4aca-a94d-8a667ce541d3\") " pod="openstack/dnsmasq-dns-b97f654c9-9p9vb" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.544187 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-logs\") pod \"glance-default-external-api-0\" (UID: \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.544361 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-config-data\") pod \"glance-default-external-api-0\" (UID: \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.544626 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ffvnf\" (UniqueName: \"kubernetes.io/projected/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-kube-api-access-ffvnf\") pod \"glance-default-external-api-0\" (UID: \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.544672 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.544725 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.544775 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.544800 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.544835 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-scripts\") pod \"glance-default-external-api-0\" (UID: \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.553808 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-kv5bz" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.571387 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.573673 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.578815 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.583821 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-85cf5c47c-97mcd"] Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.597474 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 27 07:15:01 crc kubenswrapper[4971]: W1127 07:15:01.597639 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaae7be1b_5928_4e63_9fc0_f439a1994ce1.slice/crio-771929a43d95aa6090026c00adecf77e5c18682c28976d0a1be33eed16966792 WatchSource:0}: Error finding container 771929a43d95aa6090026c00adecf77e5c18682c28976d0a1be33eed16966792: Status 404 returned error can't find the container with id 771929a43d95aa6090026c00adecf77e5c18682c28976d0a1be33eed16966792 Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.606965 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.622128 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.646968 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d97ee469-4ae1-4045-b91c-b2dad95d2110-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d97ee469-4ae1-4045-b91c-b2dad95d2110\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.647082 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d97ee469-4ae1-4045-b91c-b2dad95d2110-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d97ee469-4ae1-4045-b91c-b2dad95d2110\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.647113 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d97ee469-4ae1-4045-b91c-b2dad95d2110-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"d97ee469-4ae1-4045-b91c-b2dad95d2110\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.648665 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ffvnf\" (UniqueName: \"kubernetes.io/projected/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-kube-api-access-ffvnf\") pod \"glance-default-external-api-0\" (UID: \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.648692 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d97ee469-4ae1-4045-b91c-b2dad95d2110-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d97ee469-4ae1-4045-b91c-b2dad95d2110\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.648720 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.648769 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"d97ee469-4ae1-4045-b91c-b2dad95d2110\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.648806 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d97ee469-4ae1-4045-b91c-b2dad95d2110-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d97ee469-4ae1-4045-b91c-b2dad95d2110\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.648835 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.648881 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.648899 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.648926 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d97ee469-4ae1-4045-b91c-b2dad95d2110-logs\") pod \"glance-default-internal-api-0\" (UID: \"d97ee469-4ae1-4045-b91c-b2dad95d2110\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.648947 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-scripts\") pod \"glance-default-external-api-0\" (UID: \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.648991 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6wzr\" (UniqueName: \"kubernetes.io/projected/d97ee469-4ae1-4045-b91c-b2dad95d2110-kube-api-access-c6wzr\") pod \"glance-default-internal-api-0\" (UID: \"d97ee469-4ae1-4045-b91c-b2dad95d2110\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.649029 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-logs\") pod \"glance-default-external-api-0\" (UID: \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.649068 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-config-data\") pod \"glance-default-external-api-0\" (UID: \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.656097 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.657287 4971 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-external-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.661611 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-config-data\") pod \"glance-default-external-api-0\" (UID: \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.680296 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.680617 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-logs\") pod \"glance-default-external-api-0\" (UID: \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.680838 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b97f654c9-9p9vb" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.688288 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ffvnf\" (UniqueName: \"kubernetes.io/projected/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-kube-api-access-ffvnf\") pod \"glance-default-external-api-0\" (UID: \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.692551 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.693073 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-scripts\") pod \"glance-default-external-api-0\" (UID: \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.747290 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.759903 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85cf5c47c-97mcd" event={"ID":"aae7be1b-5928-4e63-9fc0-f439a1994ce1","Type":"ContainerStarted","Data":"771929a43d95aa6090026c00adecf77e5c18682c28976d0a1be33eed16966792"} Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.762575 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d97ee469-4ae1-4045-b91c-b2dad95d2110-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d97ee469-4ae1-4045-b91c-b2dad95d2110\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.763363 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d97ee469-4ae1-4045-b91c-b2dad95d2110-logs\") pod \"glance-default-internal-api-0\" (UID: \"d97ee469-4ae1-4045-b91c-b2dad95d2110\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.763504 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6wzr\" (UniqueName: \"kubernetes.io/projected/d97ee469-4ae1-4045-b91c-b2dad95d2110-kube-api-access-c6wzr\") pod \"glance-default-internal-api-0\" (UID: \"d97ee469-4ae1-4045-b91c-b2dad95d2110\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.763665 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d97ee469-4ae1-4045-b91c-b2dad95d2110-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d97ee469-4ae1-4045-b91c-b2dad95d2110\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.763705 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d97ee469-4ae1-4045-b91c-b2dad95d2110-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d97ee469-4ae1-4045-b91c-b2dad95d2110\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.763743 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d97ee469-4ae1-4045-b91c-b2dad95d2110-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"d97ee469-4ae1-4045-b91c-b2dad95d2110\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.763833 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d97ee469-4ae1-4045-b91c-b2dad95d2110-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d97ee469-4ae1-4045-b91c-b2dad95d2110\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.763898 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"d97ee469-4ae1-4045-b91c-b2dad95d2110\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.764096 4971 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"d97ee469-4ae1-4045-b91c-b2dad95d2110\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-internal-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.775591 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d97ee469-4ae1-4045-b91c-b2dad95d2110-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d97ee469-4ae1-4045-b91c-b2dad95d2110\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.780545 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d97ee469-4ae1-4045-b91c-b2dad95d2110-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d97ee469-4ae1-4045-b91c-b2dad95d2110\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.783475 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d97ee469-4ae1-4045-b91c-b2dad95d2110-logs\") pod \"glance-default-internal-api-0\" (UID: \"d97ee469-4ae1-4045-b91c-b2dad95d2110\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.796195 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d97ee469-4ae1-4045-b91c-b2dad95d2110-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"d97ee469-4ae1-4045-b91c-b2dad95d2110\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.799231 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d97ee469-4ae1-4045-b91c-b2dad95d2110-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d97ee469-4ae1-4045-b91c-b2dad95d2110\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.800093 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d97ee469-4ae1-4045-b91c-b2dad95d2110-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d97ee469-4ae1-4045-b91c-b2dad95d2110\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.825554 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6wzr\" (UniqueName: \"kubernetes.io/projected/d97ee469-4ae1-4045-b91c-b2dad95d2110-kube-api-access-c6wzr\") pod \"glance-default-internal-api-0\" (UID: \"d97ee469-4ae1-4045-b91c-b2dad95d2110\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.831554 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403795-sb5ts"] Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.833630 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"d97ee469-4ae1-4045-b91c-b2dad95d2110\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.837635 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.865755 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-dfcfh"] Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.958146 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-th4ww"] Nov 27 07:15:01 crc kubenswrapper[4971]: I1127 07:15:01.972870 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-wsg7b"] Nov 27 07:15:01 crc kubenswrapper[4971]: W1127 07:15:01.990473 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod58ccbc84_3120_4e10_a215_07d41267629d.slice/crio-646dd32e727408c002dfdeb44ba6919f77e10b39f5800402e79a69ee2e27a173 WatchSource:0}: Error finding container 646dd32e727408c002dfdeb44ba6919f77e10b39f5800402e79a69ee2e27a173: Status 404 returned error can't find the container with id 646dd32e727408c002dfdeb44ba6919f77e10b39f5800402e79a69ee2e27a173 Nov 27 07:15:02 crc kubenswrapper[4971]: I1127 07:15:02.038367 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 27 07:15:02 crc kubenswrapper[4971]: I1127 07:15:02.082638 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-xpjjx"] Nov 27 07:15:02 crc kubenswrapper[4971]: I1127 07:15:02.210565 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-kv5bz"] Nov 27 07:15:02 crc kubenswrapper[4971]: I1127 07:15:02.220420 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:15:02 crc kubenswrapper[4971]: I1127 07:15:02.405370 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b97f654c9-9p9vb"] Nov 27 07:15:02 crc kubenswrapper[4971]: I1127 07:15:02.708665 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 27 07:15:02 crc kubenswrapper[4971]: W1127 07:15:02.709815 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd97ee469_4ae1_4045_b91c_b2dad95d2110.slice/crio-7d884a1443f9c1d40d0d2acebd6ad90f27d54d3ab31b8918255d2fb4ad0271b5 WatchSource:0}: Error finding container 7d884a1443f9c1d40d0d2acebd6ad90f27d54d3ab31b8918255d2fb4ad0271b5: Status 404 returned error can't find the container with id 7d884a1443f9c1d40d0d2acebd6ad90f27d54d3ab31b8918255d2fb4ad0271b5 Nov 27 07:15:02 crc kubenswrapper[4971]: I1127 07:15:02.770965 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d97ee469-4ae1-4045-b91c-b2dad95d2110","Type":"ContainerStarted","Data":"7d884a1443f9c1d40d0d2acebd6ad90f27d54d3ab31b8918255d2fb4ad0271b5"} Nov 27 07:15:02 crc kubenswrapper[4971]: I1127 07:15:02.772506 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-kv5bz" event={"ID":"1098ae71-b794-4670-af66-67bd17375e2c","Type":"ContainerStarted","Data":"50aeb76b97f9c9f64cf34e83cb3311574d9ffe6921fc36cb33897208082dae4b"} Nov 27 07:15:02 crc kubenswrapper[4971]: I1127 07:15:02.774630 4971 generic.go:334] "Generic (PLEG): container finished" podID="aae7be1b-5928-4e63-9fc0-f439a1994ce1" containerID="8422f32313fd39afcbc8a7c01894f5295aaceab71cbb57d6fcf86e2db272b91e" exitCode=0 Nov 27 07:15:02 crc kubenswrapper[4971]: I1127 07:15:02.774787 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85cf5c47c-97mcd" event={"ID":"aae7be1b-5928-4e63-9fc0-f439a1994ce1","Type":"ContainerDied","Data":"8422f32313fd39afcbc8a7c01894f5295aaceab71cbb57d6fcf86e2db272b91e"} Nov 27 07:15:02 crc kubenswrapper[4971]: I1127 07:15:02.786436 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-xpjjx" event={"ID":"04c6fe9e-2c13-4757-b2f7-237b1b8849f7","Type":"ContainerStarted","Data":"c9fa7351073f38c54dc1aa77c83bae318f8851533db7181c69600eca571a2dfb"} Nov 27 07:15:02 crc kubenswrapper[4971]: I1127 07:15:02.807638 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-th4ww" event={"ID":"cd83d353-e492-4119-8890-77569c78bed2","Type":"ContainerStarted","Data":"8fc23f424b3a2a9f17017a2aeb44fa99e3c8ba29bb9333d7e8cdc48924ab48cc"} Nov 27 07:15:02 crc kubenswrapper[4971]: I1127 07:15:02.818423 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dfcfh" event={"ID":"b4b06d68-ae10-4c05-a95f-f84e6ba3a038","Type":"ContainerStarted","Data":"b055dca2fce241d4dff82cda6eadcdc757cf29563f11d4e106062cc703721e5d"} Nov 27 07:15:02 crc kubenswrapper[4971]: I1127 07:15:02.818476 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dfcfh" event={"ID":"b4b06d68-ae10-4c05-a95f-f84e6ba3a038","Type":"ContainerStarted","Data":"d6d3a53583edac743f83168727271ef7ea982e081c78d6593e484bf3915eec3e"} Nov 27 07:15:02 crc kubenswrapper[4971]: I1127 07:15:02.827606 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b97f654c9-9p9vb" event={"ID":"521d9ffc-606c-4aca-a94d-8a667ce541d3","Type":"ContainerStarted","Data":"aa49b65c947d9bbfd41eb10f0709029b0365f1ad34175e9917d45eb756059302"} Nov 27 07:15:02 crc kubenswrapper[4971]: I1127 07:15:02.829689 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-wsg7b" event={"ID":"58ccbc84-3120-4e10-a215-07d41267629d","Type":"ContainerStarted","Data":"90d75b56cc24ed99876c2a3d94faea49f7cafd9b68b4d01c2a828f69a6df4484"} Nov 27 07:15:02 crc kubenswrapper[4971]: I1127 07:15:02.829719 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-wsg7b" event={"ID":"58ccbc84-3120-4e10-a215-07d41267629d","Type":"ContainerStarted","Data":"646dd32e727408c002dfdeb44ba6919f77e10b39f5800402e79a69ee2e27a173"} Nov 27 07:15:02 crc kubenswrapper[4971]: I1127 07:15:02.836350 4971 generic.go:334] "Generic (PLEG): container finished" podID="8111ff30-6af9-4f0c-95cf-6dc2460ff4bb" containerID="06e5c4798cbcadbeb23060dff3a163709767f5cc0d898def21a29e52aacb95b5" exitCode=0 Nov 27 07:15:02 crc kubenswrapper[4971]: I1127 07:15:02.836621 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403795-sb5ts" event={"ID":"8111ff30-6af9-4f0c-95cf-6dc2460ff4bb","Type":"ContainerDied","Data":"06e5c4798cbcadbeb23060dff3a163709767f5cc0d898def21a29e52aacb95b5"} Nov 27 07:15:02 crc kubenswrapper[4971]: I1127 07:15:02.836701 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403795-sb5ts" event={"ID":"8111ff30-6af9-4f0c-95cf-6dc2460ff4bb","Type":"ContainerStarted","Data":"35844f4e5f1bf03bb061b74eb434243d7d0ae046990c9b2adc600d2998cd1852"} Nov 27 07:15:02 crc kubenswrapper[4971]: I1127 07:15:02.845787 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b1ab1943-4c64-4083-8947-aa821a7298d2","Type":"ContainerStarted","Data":"af98c203026756aee7f3cd6d423aeb5cac208a2a5c1f8c70cf974e218b63bdda"} Nov 27 07:15:02 crc kubenswrapper[4971]: I1127 07:15:02.857549 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-dfcfh" podStartSLOduration=2.857520646 podStartE2EDuration="2.857520646s" podCreationTimestamp="2025-11-27 07:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:15:02.842460821 +0000 UTC m=+1341.034504739" watchObservedRunningTime="2025-11-27 07:15:02.857520646 +0000 UTC m=+1341.049564584" Nov 27 07:15:02 crc kubenswrapper[4971]: I1127 07:15:02.875658 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-wsg7b" podStartSLOduration=2.875632748 podStartE2EDuration="2.875632748s" podCreationTimestamp="2025-11-27 07:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:15:02.863763253 +0000 UTC m=+1341.055807171" watchObservedRunningTime="2025-11-27 07:15:02.875632748 +0000 UTC m=+1341.067676676" Nov 27 07:15:03 crc kubenswrapper[4971]: I1127 07:15:03.344917 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85cf5c47c-97mcd" Nov 27 07:15:03 crc kubenswrapper[4971]: I1127 07:15:03.422862 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2l8lm\" (UniqueName: \"kubernetes.io/projected/aae7be1b-5928-4e63-9fc0-f439a1994ce1-kube-api-access-2l8lm\") pod \"aae7be1b-5928-4e63-9fc0-f439a1994ce1\" (UID: \"aae7be1b-5928-4e63-9fc0-f439a1994ce1\") " Nov 27 07:15:03 crc kubenswrapper[4971]: I1127 07:15:03.429672 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/aae7be1b-5928-4e63-9fc0-f439a1994ce1-dns-svc\") pod \"aae7be1b-5928-4e63-9fc0-f439a1994ce1\" (UID: \"aae7be1b-5928-4e63-9fc0-f439a1994ce1\") " Nov 27 07:15:03 crc kubenswrapper[4971]: I1127 07:15:03.429730 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/aae7be1b-5928-4e63-9fc0-f439a1994ce1-dns-swift-storage-0\") pod \"aae7be1b-5928-4e63-9fc0-f439a1994ce1\" (UID: \"aae7be1b-5928-4e63-9fc0-f439a1994ce1\") " Nov 27 07:15:03 crc kubenswrapper[4971]: I1127 07:15:03.429819 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/aae7be1b-5928-4e63-9fc0-f439a1994ce1-ovsdbserver-sb\") pod \"aae7be1b-5928-4e63-9fc0-f439a1994ce1\" (UID: \"aae7be1b-5928-4e63-9fc0-f439a1994ce1\") " Nov 27 07:15:03 crc kubenswrapper[4971]: I1127 07:15:03.429915 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/aae7be1b-5928-4e63-9fc0-f439a1994ce1-ovsdbserver-nb\") pod \"aae7be1b-5928-4e63-9fc0-f439a1994ce1\" (UID: \"aae7be1b-5928-4e63-9fc0-f439a1994ce1\") " Nov 27 07:15:03 crc kubenswrapper[4971]: I1127 07:15:03.430067 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aae7be1b-5928-4e63-9fc0-f439a1994ce1-config\") pod \"aae7be1b-5928-4e63-9fc0-f439a1994ce1\" (UID: \"aae7be1b-5928-4e63-9fc0-f439a1994ce1\") " Nov 27 07:15:03 crc kubenswrapper[4971]: I1127 07:15:03.436075 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aae7be1b-5928-4e63-9fc0-f439a1994ce1-kube-api-access-2l8lm" (OuterVolumeSpecName: "kube-api-access-2l8lm") pod "aae7be1b-5928-4e63-9fc0-f439a1994ce1" (UID: "aae7be1b-5928-4e63-9fc0-f439a1994ce1"). InnerVolumeSpecName "kube-api-access-2l8lm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:15:03 crc kubenswrapper[4971]: I1127 07:15:03.468464 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aae7be1b-5928-4e63-9fc0-f439a1994ce1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "aae7be1b-5928-4e63-9fc0-f439a1994ce1" (UID: "aae7be1b-5928-4e63-9fc0-f439a1994ce1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:15:03 crc kubenswrapper[4971]: I1127 07:15:03.482727 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aae7be1b-5928-4e63-9fc0-f439a1994ce1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "aae7be1b-5928-4e63-9fc0-f439a1994ce1" (UID: "aae7be1b-5928-4e63-9fc0-f439a1994ce1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:15:03 crc kubenswrapper[4971]: I1127 07:15:03.517011 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aae7be1b-5928-4e63-9fc0-f439a1994ce1-config" (OuterVolumeSpecName: "config") pod "aae7be1b-5928-4e63-9fc0-f439a1994ce1" (UID: "aae7be1b-5928-4e63-9fc0-f439a1994ce1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:15:03 crc kubenswrapper[4971]: I1127 07:15:03.530454 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aae7be1b-5928-4e63-9fc0-f439a1994ce1-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "aae7be1b-5928-4e63-9fc0-f439a1994ce1" (UID: "aae7be1b-5928-4e63-9fc0-f439a1994ce1"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:15:03 crc kubenswrapper[4971]: I1127 07:15:03.534495 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 27 07:15:03 crc kubenswrapper[4971]: I1127 07:15:03.543357 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aae7be1b-5928-4e63-9fc0-f439a1994ce1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "aae7be1b-5928-4e63-9fc0-f439a1994ce1" (UID: "aae7be1b-5928-4e63-9fc0-f439a1994ce1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:15:03 crc kubenswrapper[4971]: I1127 07:15:03.562580 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/aae7be1b-5928-4e63-9fc0-f439a1994ce1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:03 crc kubenswrapper[4971]: I1127 07:15:03.563424 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aae7be1b-5928-4e63-9fc0-f439a1994ce1-config\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:03 crc kubenswrapper[4971]: I1127 07:15:03.563890 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2l8lm\" (UniqueName: \"kubernetes.io/projected/aae7be1b-5928-4e63-9fc0-f439a1994ce1-kube-api-access-2l8lm\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:03 crc kubenswrapper[4971]: I1127 07:15:03.563910 4971 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/aae7be1b-5928-4e63-9fc0-f439a1994ce1-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:03 crc kubenswrapper[4971]: I1127 07:15:03.563921 4971 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/aae7be1b-5928-4e63-9fc0-f439a1994ce1-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:03 crc kubenswrapper[4971]: I1127 07:15:03.563930 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/aae7be1b-5928-4e63-9fc0-f439a1994ce1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:03 crc kubenswrapper[4971]: I1127 07:15:03.873648 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d97ee469-4ae1-4045-b91c-b2dad95d2110","Type":"ContainerStarted","Data":"85439ed0d570a62aab3aadcd3cd25b6d9090c5152e308418101bb768ad360f9f"} Nov 27 07:15:03 crc kubenswrapper[4971]: I1127 07:15:03.876762 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea","Type":"ContainerStarted","Data":"a2a34703a048673224c65cf7e6d55ba563cb7799b642dcc8ce9d23f46dc92c88"} Nov 27 07:15:03 crc kubenswrapper[4971]: I1127 07:15:03.879015 4971 generic.go:334] "Generic (PLEG): container finished" podID="521d9ffc-606c-4aca-a94d-8a667ce541d3" containerID="db3db056e3b55911cf377fa3c05350a21880e42db521f615e561d6412cc41d4d" exitCode=0 Nov 27 07:15:03 crc kubenswrapper[4971]: I1127 07:15:03.879078 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b97f654c9-9p9vb" event={"ID":"521d9ffc-606c-4aca-a94d-8a667ce541d3","Type":"ContainerDied","Data":"db3db056e3b55911cf377fa3c05350a21880e42db521f615e561d6412cc41d4d"} Nov 27 07:15:03 crc kubenswrapper[4971]: I1127 07:15:03.887259 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85cf5c47c-97mcd" event={"ID":"aae7be1b-5928-4e63-9fc0-f439a1994ce1","Type":"ContainerDied","Data":"771929a43d95aa6090026c00adecf77e5c18682c28976d0a1be33eed16966792"} Nov 27 07:15:03 crc kubenswrapper[4971]: I1127 07:15:03.887331 4971 scope.go:117] "RemoveContainer" containerID="8422f32313fd39afcbc8a7c01894f5295aaceab71cbb57d6fcf86e2db272b91e" Nov 27 07:15:03 crc kubenswrapper[4971]: I1127 07:15:03.887651 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85cf5c47c-97mcd" Nov 27 07:15:04 crc kubenswrapper[4971]: I1127 07:15:04.009826 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-85cf5c47c-97mcd"] Nov 27 07:15:04 crc kubenswrapper[4971]: I1127 07:15:04.037492 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 27 07:15:04 crc kubenswrapper[4971]: I1127 07:15:04.050697 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-85cf5c47c-97mcd"] Nov 27 07:15:04 crc kubenswrapper[4971]: I1127 07:15:04.103909 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 27 07:15:04 crc kubenswrapper[4971]: I1127 07:15:04.122070 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:15:04 crc kubenswrapper[4971]: I1127 07:15:04.360738 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403795-sb5ts" Nov 27 07:15:04 crc kubenswrapper[4971]: I1127 07:15:04.493123 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7gbcc\" (UniqueName: \"kubernetes.io/projected/8111ff30-6af9-4f0c-95cf-6dc2460ff4bb-kube-api-access-7gbcc\") pod \"8111ff30-6af9-4f0c-95cf-6dc2460ff4bb\" (UID: \"8111ff30-6af9-4f0c-95cf-6dc2460ff4bb\") " Nov 27 07:15:04 crc kubenswrapper[4971]: I1127 07:15:04.493290 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8111ff30-6af9-4f0c-95cf-6dc2460ff4bb-config-volume\") pod \"8111ff30-6af9-4f0c-95cf-6dc2460ff4bb\" (UID: \"8111ff30-6af9-4f0c-95cf-6dc2460ff4bb\") " Nov 27 07:15:04 crc kubenswrapper[4971]: I1127 07:15:04.493614 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8111ff30-6af9-4f0c-95cf-6dc2460ff4bb-secret-volume\") pod \"8111ff30-6af9-4f0c-95cf-6dc2460ff4bb\" (UID: \"8111ff30-6af9-4f0c-95cf-6dc2460ff4bb\") " Nov 27 07:15:04 crc kubenswrapper[4971]: I1127 07:15:04.495194 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8111ff30-6af9-4f0c-95cf-6dc2460ff4bb-config-volume" (OuterVolumeSpecName: "config-volume") pod "8111ff30-6af9-4f0c-95cf-6dc2460ff4bb" (UID: "8111ff30-6af9-4f0c-95cf-6dc2460ff4bb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:15:04 crc kubenswrapper[4971]: I1127 07:15:04.501690 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8111ff30-6af9-4f0c-95cf-6dc2460ff4bb-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "8111ff30-6af9-4f0c-95cf-6dc2460ff4bb" (UID: "8111ff30-6af9-4f0c-95cf-6dc2460ff4bb"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:15:04 crc kubenswrapper[4971]: I1127 07:15:04.502781 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8111ff30-6af9-4f0c-95cf-6dc2460ff4bb-kube-api-access-7gbcc" (OuterVolumeSpecName: "kube-api-access-7gbcc") pod "8111ff30-6af9-4f0c-95cf-6dc2460ff4bb" (UID: "8111ff30-6af9-4f0c-95cf-6dc2460ff4bb"). InnerVolumeSpecName "kube-api-access-7gbcc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:15:04 crc kubenswrapper[4971]: I1127 07:15:04.568162 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aae7be1b-5928-4e63-9fc0-f439a1994ce1" path="/var/lib/kubelet/pods/aae7be1b-5928-4e63-9fc0-f439a1994ce1/volumes" Nov 27 07:15:04 crc kubenswrapper[4971]: I1127 07:15:04.597461 4971 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8111ff30-6af9-4f0c-95cf-6dc2460ff4bb-config-volume\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:04 crc kubenswrapper[4971]: I1127 07:15:04.597506 4971 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8111ff30-6af9-4f0c-95cf-6dc2460ff4bb-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:04 crc kubenswrapper[4971]: I1127 07:15:04.597519 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7gbcc\" (UniqueName: \"kubernetes.io/projected/8111ff30-6af9-4f0c-95cf-6dc2460ff4bb-kube-api-access-7gbcc\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:04 crc kubenswrapper[4971]: I1127 07:15:04.941699 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403795-sb5ts" Nov 27 07:15:04 crc kubenswrapper[4971]: I1127 07:15:04.941934 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403795-sb5ts" event={"ID":"8111ff30-6af9-4f0c-95cf-6dc2460ff4bb","Type":"ContainerDied","Data":"35844f4e5f1bf03bb061b74eb434243d7d0ae046990c9b2adc600d2998cd1852"} Nov 27 07:15:04 crc kubenswrapper[4971]: I1127 07:15:04.942698 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="35844f4e5f1bf03bb061b74eb434243d7d0ae046990c9b2adc600d2998cd1852" Nov 27 07:15:04 crc kubenswrapper[4971]: I1127 07:15:04.947220 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d97ee469-4ae1-4045-b91c-b2dad95d2110","Type":"ContainerStarted","Data":"2d7ba85855f79ab485bd1745723e69366fec97659a73a95a0239fea68f2ed3e9"} Nov 27 07:15:04 crc kubenswrapper[4971]: I1127 07:15:04.947480 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="d97ee469-4ae1-4045-b91c-b2dad95d2110" containerName="glance-log" containerID="cri-o://85439ed0d570a62aab3aadcd3cd25b6d9090c5152e308418101bb768ad360f9f" gracePeriod=30 Nov 27 07:15:04 crc kubenswrapper[4971]: I1127 07:15:04.947767 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="d97ee469-4ae1-4045-b91c-b2dad95d2110" containerName="glance-httpd" containerID="cri-o://2d7ba85855f79ab485bd1745723e69366fec97659a73a95a0239fea68f2ed3e9" gracePeriod=30 Nov 27 07:15:04 crc kubenswrapper[4971]: I1127 07:15:04.952020 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea","Type":"ContainerStarted","Data":"3345fa2c05d61c6c3b3cf76efbc6465d898235060071298b605f35c78b1df8f2"} Nov 27 07:15:04 crc kubenswrapper[4971]: I1127 07:15:04.958292 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b97f654c9-9p9vb" event={"ID":"521d9ffc-606c-4aca-a94d-8a667ce541d3","Type":"ContainerStarted","Data":"2ff7481e2a724f2221aeb07a54d81c8a62fd3d916a6d1a53e9fbd71ad11f3924"} Nov 27 07:15:04 crc kubenswrapper[4971]: I1127 07:15:04.958666 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b97f654c9-9p9vb" Nov 27 07:15:04 crc kubenswrapper[4971]: I1127 07:15:04.987604 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.987581116 podStartE2EDuration="4.987581116s" podCreationTimestamp="2025-11-27 07:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:15:04.975065923 +0000 UTC m=+1343.167109841" watchObservedRunningTime="2025-11-27 07:15:04.987581116 +0000 UTC m=+1343.179625044" Nov 27 07:15:05 crc kubenswrapper[4971]: I1127 07:15:05.003758 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b97f654c9-9p9vb" podStartSLOduration=5.003735833 podStartE2EDuration="5.003735833s" podCreationTimestamp="2025-11-27 07:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:15:05.000017808 +0000 UTC m=+1343.192061736" watchObservedRunningTime="2025-11-27 07:15:05.003735833 +0000 UTC m=+1343.195779771" Nov 27 07:15:05 crc kubenswrapper[4971]: I1127 07:15:05.506079 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 27 07:15:05 crc kubenswrapper[4971]: I1127 07:15:05.619871 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"d97ee469-4ae1-4045-b91c-b2dad95d2110\" (UID: \"d97ee469-4ae1-4045-b91c-b2dad95d2110\") " Nov 27 07:15:05 crc kubenswrapper[4971]: I1127 07:15:05.620257 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d97ee469-4ae1-4045-b91c-b2dad95d2110-config-data\") pod \"d97ee469-4ae1-4045-b91c-b2dad95d2110\" (UID: \"d97ee469-4ae1-4045-b91c-b2dad95d2110\") " Nov 27 07:15:05 crc kubenswrapper[4971]: I1127 07:15:05.620288 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d97ee469-4ae1-4045-b91c-b2dad95d2110-scripts\") pod \"d97ee469-4ae1-4045-b91c-b2dad95d2110\" (UID: \"d97ee469-4ae1-4045-b91c-b2dad95d2110\") " Nov 27 07:15:05 crc kubenswrapper[4971]: I1127 07:15:05.620322 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d97ee469-4ae1-4045-b91c-b2dad95d2110-httpd-run\") pod \"d97ee469-4ae1-4045-b91c-b2dad95d2110\" (UID: \"d97ee469-4ae1-4045-b91c-b2dad95d2110\") " Nov 27 07:15:05 crc kubenswrapper[4971]: I1127 07:15:05.620370 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d97ee469-4ae1-4045-b91c-b2dad95d2110-logs\") pod \"d97ee469-4ae1-4045-b91c-b2dad95d2110\" (UID: \"d97ee469-4ae1-4045-b91c-b2dad95d2110\") " Nov 27 07:15:05 crc kubenswrapper[4971]: I1127 07:15:05.620402 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c6wzr\" (UniqueName: \"kubernetes.io/projected/d97ee469-4ae1-4045-b91c-b2dad95d2110-kube-api-access-c6wzr\") pod \"d97ee469-4ae1-4045-b91c-b2dad95d2110\" (UID: \"d97ee469-4ae1-4045-b91c-b2dad95d2110\") " Nov 27 07:15:05 crc kubenswrapper[4971]: I1127 07:15:05.620441 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d97ee469-4ae1-4045-b91c-b2dad95d2110-internal-tls-certs\") pod \"d97ee469-4ae1-4045-b91c-b2dad95d2110\" (UID: \"d97ee469-4ae1-4045-b91c-b2dad95d2110\") " Nov 27 07:15:05 crc kubenswrapper[4971]: I1127 07:15:05.620490 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d97ee469-4ae1-4045-b91c-b2dad95d2110-combined-ca-bundle\") pod \"d97ee469-4ae1-4045-b91c-b2dad95d2110\" (UID: \"d97ee469-4ae1-4045-b91c-b2dad95d2110\") " Nov 27 07:15:05 crc kubenswrapper[4971]: I1127 07:15:05.621087 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d97ee469-4ae1-4045-b91c-b2dad95d2110-logs" (OuterVolumeSpecName: "logs") pod "d97ee469-4ae1-4045-b91c-b2dad95d2110" (UID: "d97ee469-4ae1-4045-b91c-b2dad95d2110"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:15:05 crc kubenswrapper[4971]: I1127 07:15:05.621532 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d97ee469-4ae1-4045-b91c-b2dad95d2110-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "d97ee469-4ae1-4045-b91c-b2dad95d2110" (UID: "d97ee469-4ae1-4045-b91c-b2dad95d2110"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:15:05 crc kubenswrapper[4971]: I1127 07:15:05.626245 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d97ee469-4ae1-4045-b91c-b2dad95d2110-scripts" (OuterVolumeSpecName: "scripts") pod "d97ee469-4ae1-4045-b91c-b2dad95d2110" (UID: "d97ee469-4ae1-4045-b91c-b2dad95d2110"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:15:05 crc kubenswrapper[4971]: I1127 07:15:05.638689 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "glance") pod "d97ee469-4ae1-4045-b91c-b2dad95d2110" (UID: "d97ee469-4ae1-4045-b91c-b2dad95d2110"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 27 07:15:05 crc kubenswrapper[4971]: I1127 07:15:05.642744 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d97ee469-4ae1-4045-b91c-b2dad95d2110-kube-api-access-c6wzr" (OuterVolumeSpecName: "kube-api-access-c6wzr") pod "d97ee469-4ae1-4045-b91c-b2dad95d2110" (UID: "d97ee469-4ae1-4045-b91c-b2dad95d2110"). InnerVolumeSpecName "kube-api-access-c6wzr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:15:05 crc kubenswrapper[4971]: I1127 07:15:05.696699 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d97ee469-4ae1-4045-b91c-b2dad95d2110-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d97ee469-4ae1-4045-b91c-b2dad95d2110" (UID: "d97ee469-4ae1-4045-b91c-b2dad95d2110"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:15:05 crc kubenswrapper[4971]: I1127 07:15:05.698928 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d97ee469-4ae1-4045-b91c-b2dad95d2110-config-data" (OuterVolumeSpecName: "config-data") pod "d97ee469-4ae1-4045-b91c-b2dad95d2110" (UID: "d97ee469-4ae1-4045-b91c-b2dad95d2110"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:15:05 crc kubenswrapper[4971]: I1127 07:15:05.699396 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d97ee469-4ae1-4045-b91c-b2dad95d2110-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "d97ee469-4ae1-4045-b91c-b2dad95d2110" (UID: "d97ee469-4ae1-4045-b91c-b2dad95d2110"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:15:05 crc kubenswrapper[4971]: I1127 07:15:05.722687 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d97ee469-4ae1-4045-b91c-b2dad95d2110-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:05 crc kubenswrapper[4971]: I1127 07:15:05.722723 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d97ee469-4ae1-4045-b91c-b2dad95d2110-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:05 crc kubenswrapper[4971]: I1127 07:15:05.722734 4971 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d97ee469-4ae1-4045-b91c-b2dad95d2110-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:05 crc kubenswrapper[4971]: I1127 07:15:05.722743 4971 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d97ee469-4ae1-4045-b91c-b2dad95d2110-logs\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:05 crc kubenswrapper[4971]: I1127 07:15:05.722752 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c6wzr\" (UniqueName: \"kubernetes.io/projected/d97ee469-4ae1-4045-b91c-b2dad95d2110-kube-api-access-c6wzr\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:05 crc kubenswrapper[4971]: I1127 07:15:05.722762 4971 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d97ee469-4ae1-4045-b91c-b2dad95d2110-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:05 crc kubenswrapper[4971]: I1127 07:15:05.722773 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d97ee469-4ae1-4045-b91c-b2dad95d2110-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:05 crc kubenswrapper[4971]: I1127 07:15:05.722809 4971 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Nov 27 07:15:05 crc kubenswrapper[4971]: I1127 07:15:05.750918 4971 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Nov 27 07:15:05 crc kubenswrapper[4971]: I1127 07:15:05.824995 4971 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:05 crc kubenswrapper[4971]: I1127 07:15:05.983156 4971 generic.go:334] "Generic (PLEG): container finished" podID="d97ee469-4ae1-4045-b91c-b2dad95d2110" containerID="2d7ba85855f79ab485bd1745723e69366fec97659a73a95a0239fea68f2ed3e9" exitCode=143 Nov 27 07:15:05 crc kubenswrapper[4971]: I1127 07:15:05.983183 4971 generic.go:334] "Generic (PLEG): container finished" podID="d97ee469-4ae1-4045-b91c-b2dad95d2110" containerID="85439ed0d570a62aab3aadcd3cd25b6d9090c5152e308418101bb768ad360f9f" exitCode=143 Nov 27 07:15:05 crc kubenswrapper[4971]: I1127 07:15:05.983233 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d97ee469-4ae1-4045-b91c-b2dad95d2110","Type":"ContainerDied","Data":"2d7ba85855f79ab485bd1745723e69366fec97659a73a95a0239fea68f2ed3e9"} Nov 27 07:15:05 crc kubenswrapper[4971]: I1127 07:15:05.983261 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d97ee469-4ae1-4045-b91c-b2dad95d2110","Type":"ContainerDied","Data":"85439ed0d570a62aab3aadcd3cd25b6d9090c5152e308418101bb768ad360f9f"} Nov 27 07:15:05 crc kubenswrapper[4971]: I1127 07:15:05.983273 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d97ee469-4ae1-4045-b91c-b2dad95d2110","Type":"ContainerDied","Data":"7d884a1443f9c1d40d0d2acebd6ad90f27d54d3ab31b8918255d2fb4ad0271b5"} Nov 27 07:15:05 crc kubenswrapper[4971]: I1127 07:15:05.983289 4971 scope.go:117] "RemoveContainer" containerID="2d7ba85855f79ab485bd1745723e69366fec97659a73a95a0239fea68f2ed3e9" Nov 27 07:15:05 crc kubenswrapper[4971]: I1127 07:15:05.983503 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 27 07:15:05 crc kubenswrapper[4971]: I1127 07:15:05.996113 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea","Type":"ContainerStarted","Data":"7882e8427a5c545c4627afc3e2cbb86e0147f1ae7136d4680ea6ec08c575d081"} Nov 27 07:15:05 crc kubenswrapper[4971]: I1127 07:15:05.996202 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="b15d1e2e-d53f-4263-8b48-880c0ee4a3ea" containerName="glance-log" containerID="cri-o://3345fa2c05d61c6c3b3cf76efbc6465d898235060071298b605f35c78b1df8f2" gracePeriod=30 Nov 27 07:15:05 crc kubenswrapper[4971]: I1127 07:15:05.996225 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="b15d1e2e-d53f-4263-8b48-880c0ee4a3ea" containerName="glance-httpd" containerID="cri-o://7882e8427a5c545c4627afc3e2cbb86e0147f1ae7136d4680ea6ec08c575d081" gracePeriod=30 Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.029143 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=6.029125763 podStartE2EDuration="6.029125763s" podCreationTimestamp="2025-11-27 07:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:15:06.026332354 +0000 UTC m=+1344.218376272" watchObservedRunningTime="2025-11-27 07:15:06.029125763 +0000 UTC m=+1344.221169701" Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.052106 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.063782 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.080180 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 27 07:15:06 crc kubenswrapper[4971]: E1127 07:15:06.080674 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8111ff30-6af9-4f0c-95cf-6dc2460ff4bb" containerName="collect-profiles" Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.080691 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="8111ff30-6af9-4f0c-95cf-6dc2460ff4bb" containerName="collect-profiles" Nov 27 07:15:06 crc kubenswrapper[4971]: E1127 07:15:06.080711 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aae7be1b-5928-4e63-9fc0-f439a1994ce1" containerName="init" Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.080719 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="aae7be1b-5928-4e63-9fc0-f439a1994ce1" containerName="init" Nov 27 07:15:06 crc kubenswrapper[4971]: E1127 07:15:06.080740 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d97ee469-4ae1-4045-b91c-b2dad95d2110" containerName="glance-httpd" Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.080748 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="d97ee469-4ae1-4045-b91c-b2dad95d2110" containerName="glance-httpd" Nov 27 07:15:06 crc kubenswrapper[4971]: E1127 07:15:06.080760 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d97ee469-4ae1-4045-b91c-b2dad95d2110" containerName="glance-log" Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.080767 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="d97ee469-4ae1-4045-b91c-b2dad95d2110" containerName="glance-log" Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.081919 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="d97ee469-4ae1-4045-b91c-b2dad95d2110" containerName="glance-httpd" Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.081938 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="aae7be1b-5928-4e63-9fc0-f439a1994ce1" containerName="init" Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.081957 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="8111ff30-6af9-4f0c-95cf-6dc2460ff4bb" containerName="collect-profiles" Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.081967 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="d97ee469-4ae1-4045-b91c-b2dad95d2110" containerName="glance-log" Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.083129 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.089391 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.089686 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.097615 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.132785 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8cbb2c33-b8a0-4220-916a-ea0a8d738755-logs\") pod \"glance-default-internal-api-0\" (UID: \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.132937 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cbb2c33-b8a0-4220-916a-ea0a8d738755-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.133087 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8cbb2c33-b8a0-4220-916a-ea0a8d738755-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.133144 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8cbb2c33-b8a0-4220-916a-ea0a8d738755-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.133238 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.133715 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8cbb2c33-b8a0-4220-916a-ea0a8d738755-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.133909 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzvtg\" (UniqueName: \"kubernetes.io/projected/8cbb2c33-b8a0-4220-916a-ea0a8d738755-kube-api-access-lzvtg\") pod \"glance-default-internal-api-0\" (UID: \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.133960 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8cbb2c33-b8a0-4220-916a-ea0a8d738755-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.236008 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8cbb2c33-b8a0-4220-916a-ea0a8d738755-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.236088 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzvtg\" (UniqueName: \"kubernetes.io/projected/8cbb2c33-b8a0-4220-916a-ea0a8d738755-kube-api-access-lzvtg\") pod \"glance-default-internal-api-0\" (UID: \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.236111 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8cbb2c33-b8a0-4220-916a-ea0a8d738755-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.236157 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8cbb2c33-b8a0-4220-916a-ea0a8d738755-logs\") pod \"glance-default-internal-api-0\" (UID: \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.236191 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cbb2c33-b8a0-4220-916a-ea0a8d738755-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.236233 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8cbb2c33-b8a0-4220-916a-ea0a8d738755-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.236260 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8cbb2c33-b8a0-4220-916a-ea0a8d738755-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.236295 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.236793 4971 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-internal-api-0" Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.236884 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8cbb2c33-b8a0-4220-916a-ea0a8d738755-logs\") pod \"glance-default-internal-api-0\" (UID: \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.243704 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8cbb2c33-b8a0-4220-916a-ea0a8d738755-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.249661 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8cbb2c33-b8a0-4220-916a-ea0a8d738755-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.254312 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cbb2c33-b8a0-4220-916a-ea0a8d738755-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.256270 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8cbb2c33-b8a0-4220-916a-ea0a8d738755-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.259868 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzvtg\" (UniqueName: \"kubernetes.io/projected/8cbb2c33-b8a0-4220-916a-ea0a8d738755-kube-api-access-lzvtg\") pod \"glance-default-internal-api-0\" (UID: \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.261485 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8cbb2c33-b8a0-4220-916a-ea0a8d738755-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.279277 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.415206 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 27 07:15:06 crc kubenswrapper[4971]: I1127 07:15:06.562048 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d97ee469-4ae1-4045-b91c-b2dad95d2110" path="/var/lib/kubelet/pods/d97ee469-4ae1-4045-b91c-b2dad95d2110/volumes" Nov 27 07:15:07 crc kubenswrapper[4971]: I1127 07:15:07.010265 4971 generic.go:334] "Generic (PLEG): container finished" podID="b4b06d68-ae10-4c05-a95f-f84e6ba3a038" containerID="b055dca2fce241d4dff82cda6eadcdc757cf29563f11d4e106062cc703721e5d" exitCode=0 Nov 27 07:15:07 crc kubenswrapper[4971]: I1127 07:15:07.010436 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dfcfh" event={"ID":"b4b06d68-ae10-4c05-a95f-f84e6ba3a038","Type":"ContainerDied","Data":"b055dca2fce241d4dff82cda6eadcdc757cf29563f11d4e106062cc703721e5d"} Nov 27 07:15:07 crc kubenswrapper[4971]: I1127 07:15:07.014055 4971 generic.go:334] "Generic (PLEG): container finished" podID="b15d1e2e-d53f-4263-8b48-880c0ee4a3ea" containerID="7882e8427a5c545c4627afc3e2cbb86e0147f1ae7136d4680ea6ec08c575d081" exitCode=0 Nov 27 07:15:07 crc kubenswrapper[4971]: I1127 07:15:07.014080 4971 generic.go:334] "Generic (PLEG): container finished" podID="b15d1e2e-d53f-4263-8b48-880c0ee4a3ea" containerID="3345fa2c05d61c6c3b3cf76efbc6465d898235060071298b605f35c78b1df8f2" exitCode=143 Nov 27 07:15:07 crc kubenswrapper[4971]: I1127 07:15:07.014099 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea","Type":"ContainerDied","Data":"7882e8427a5c545c4627afc3e2cbb86e0147f1ae7136d4680ea6ec08c575d081"} Nov 27 07:15:07 crc kubenswrapper[4971]: I1127 07:15:07.014119 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea","Type":"ContainerDied","Data":"3345fa2c05d61c6c3b3cf76efbc6465d898235060071298b605f35c78b1df8f2"} Nov 27 07:15:11 crc kubenswrapper[4971]: I1127 07:15:11.682697 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-b97f654c9-9p9vb" Nov 27 07:15:11 crc kubenswrapper[4971]: I1127 07:15:11.755413 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-fd79dd9f9-z2dsj"] Nov 27 07:15:11 crc kubenswrapper[4971]: I1127 07:15:11.755645 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-fd79dd9f9-z2dsj" podUID="46ee0072-fbf4-40fa-b9bc-40fae95462c3" containerName="dnsmasq-dns" containerID="cri-o://dff606b7b165d6268b04e3c652abf9420dd9caabe77ed3c1f0c431beda8c9de5" gracePeriod=10 Nov 27 07:15:12 crc kubenswrapper[4971]: I1127 07:15:12.060044 4971 generic.go:334] "Generic (PLEG): container finished" podID="46ee0072-fbf4-40fa-b9bc-40fae95462c3" containerID="dff606b7b165d6268b04e3c652abf9420dd9caabe77ed3c1f0c431beda8c9de5" exitCode=0 Nov 27 07:15:12 crc kubenswrapper[4971]: I1127 07:15:12.060137 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fd79dd9f9-z2dsj" event={"ID":"46ee0072-fbf4-40fa-b9bc-40fae95462c3","Type":"ContainerDied","Data":"dff606b7b165d6268b04e3c652abf9420dd9caabe77ed3c1f0c431beda8c9de5"} Nov 27 07:15:12 crc kubenswrapper[4971]: I1127 07:15:12.954894 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dfcfh" Nov 27 07:15:13 crc kubenswrapper[4971]: I1127 07:15:13.069150 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dfcfh" event={"ID":"b4b06d68-ae10-4c05-a95f-f84e6ba3a038","Type":"ContainerDied","Data":"d6d3a53583edac743f83168727271ef7ea982e081c78d6593e484bf3915eec3e"} Nov 27 07:15:13 crc kubenswrapper[4971]: I1127 07:15:13.069189 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d6d3a53583edac743f83168727271ef7ea982e081c78d6593e484bf3915eec3e" Nov 27 07:15:13 crc kubenswrapper[4971]: I1127 07:15:13.069240 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dfcfh" Nov 27 07:15:13 crc kubenswrapper[4971]: I1127 07:15:13.072639 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b4b06d68-ae10-4c05-a95f-f84e6ba3a038-credential-keys\") pod \"b4b06d68-ae10-4c05-a95f-f84e6ba3a038\" (UID: \"b4b06d68-ae10-4c05-a95f-f84e6ba3a038\") " Nov 27 07:15:13 crc kubenswrapper[4971]: I1127 07:15:13.072760 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zvdbd\" (UniqueName: \"kubernetes.io/projected/b4b06d68-ae10-4c05-a95f-f84e6ba3a038-kube-api-access-zvdbd\") pod \"b4b06d68-ae10-4c05-a95f-f84e6ba3a038\" (UID: \"b4b06d68-ae10-4c05-a95f-f84e6ba3a038\") " Nov 27 07:15:13 crc kubenswrapper[4971]: I1127 07:15:13.072820 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4b06d68-ae10-4c05-a95f-f84e6ba3a038-combined-ca-bundle\") pod \"b4b06d68-ae10-4c05-a95f-f84e6ba3a038\" (UID: \"b4b06d68-ae10-4c05-a95f-f84e6ba3a038\") " Nov 27 07:15:13 crc kubenswrapper[4971]: I1127 07:15:13.072907 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4b06d68-ae10-4c05-a95f-f84e6ba3a038-scripts\") pod \"b4b06d68-ae10-4c05-a95f-f84e6ba3a038\" (UID: \"b4b06d68-ae10-4c05-a95f-f84e6ba3a038\") " Nov 27 07:15:13 crc kubenswrapper[4971]: I1127 07:15:13.072969 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b4b06d68-ae10-4c05-a95f-f84e6ba3a038-fernet-keys\") pod \"b4b06d68-ae10-4c05-a95f-f84e6ba3a038\" (UID: \"b4b06d68-ae10-4c05-a95f-f84e6ba3a038\") " Nov 27 07:15:13 crc kubenswrapper[4971]: I1127 07:15:13.073017 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4b06d68-ae10-4c05-a95f-f84e6ba3a038-config-data\") pod \"b4b06d68-ae10-4c05-a95f-f84e6ba3a038\" (UID: \"b4b06d68-ae10-4c05-a95f-f84e6ba3a038\") " Nov 27 07:15:13 crc kubenswrapper[4971]: I1127 07:15:13.079984 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4b06d68-ae10-4c05-a95f-f84e6ba3a038-scripts" (OuterVolumeSpecName: "scripts") pod "b4b06d68-ae10-4c05-a95f-f84e6ba3a038" (UID: "b4b06d68-ae10-4c05-a95f-f84e6ba3a038"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:15:13 crc kubenswrapper[4971]: I1127 07:15:13.080302 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4b06d68-ae10-4c05-a95f-f84e6ba3a038-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "b4b06d68-ae10-4c05-a95f-f84e6ba3a038" (UID: "b4b06d68-ae10-4c05-a95f-f84e6ba3a038"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:15:13 crc kubenswrapper[4971]: I1127 07:15:13.080692 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4b06d68-ae10-4c05-a95f-f84e6ba3a038-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "b4b06d68-ae10-4c05-a95f-f84e6ba3a038" (UID: "b4b06d68-ae10-4c05-a95f-f84e6ba3a038"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:15:13 crc kubenswrapper[4971]: I1127 07:15:13.082350 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4b06d68-ae10-4c05-a95f-f84e6ba3a038-kube-api-access-zvdbd" (OuterVolumeSpecName: "kube-api-access-zvdbd") pod "b4b06d68-ae10-4c05-a95f-f84e6ba3a038" (UID: "b4b06d68-ae10-4c05-a95f-f84e6ba3a038"). InnerVolumeSpecName "kube-api-access-zvdbd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:15:13 crc kubenswrapper[4971]: I1127 07:15:13.107498 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4b06d68-ae10-4c05-a95f-f84e6ba3a038-config-data" (OuterVolumeSpecName: "config-data") pod "b4b06d68-ae10-4c05-a95f-f84e6ba3a038" (UID: "b4b06d68-ae10-4c05-a95f-f84e6ba3a038"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:15:13 crc kubenswrapper[4971]: I1127 07:15:13.114493 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4b06d68-ae10-4c05-a95f-f84e6ba3a038-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b4b06d68-ae10-4c05-a95f-f84e6ba3a038" (UID: "b4b06d68-ae10-4c05-a95f-f84e6ba3a038"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:15:13 crc kubenswrapper[4971]: I1127 07:15:13.189128 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zvdbd\" (UniqueName: \"kubernetes.io/projected/b4b06d68-ae10-4c05-a95f-f84e6ba3a038-kube-api-access-zvdbd\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:13 crc kubenswrapper[4971]: I1127 07:15:13.189177 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4b06d68-ae10-4c05-a95f-f84e6ba3a038-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:13 crc kubenswrapper[4971]: I1127 07:15:13.189189 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4b06d68-ae10-4c05-a95f-f84e6ba3a038-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:13 crc kubenswrapper[4971]: I1127 07:15:13.189198 4971 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b4b06d68-ae10-4c05-a95f-f84e6ba3a038-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:13 crc kubenswrapper[4971]: I1127 07:15:13.189210 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4b06d68-ae10-4c05-a95f-f84e6ba3a038-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:13 crc kubenswrapper[4971]: I1127 07:15:13.189224 4971 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b4b06d68-ae10-4c05-a95f-f84e6ba3a038-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:14 crc kubenswrapper[4971]: I1127 07:15:14.052689 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-dfcfh"] Nov 27 07:15:14 crc kubenswrapper[4971]: I1127 07:15:14.060200 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-dfcfh"] Nov 27 07:15:14 crc kubenswrapper[4971]: I1127 07:15:14.136924 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-v825j"] Nov 27 07:15:14 crc kubenswrapper[4971]: E1127 07:15:14.137324 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4b06d68-ae10-4c05-a95f-f84e6ba3a038" containerName="keystone-bootstrap" Nov 27 07:15:14 crc kubenswrapper[4971]: I1127 07:15:14.137339 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4b06d68-ae10-4c05-a95f-f84e6ba3a038" containerName="keystone-bootstrap" Nov 27 07:15:14 crc kubenswrapper[4971]: I1127 07:15:14.137525 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4b06d68-ae10-4c05-a95f-f84e6ba3a038" containerName="keystone-bootstrap" Nov 27 07:15:14 crc kubenswrapper[4971]: I1127 07:15:14.138127 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-v825j" Nov 27 07:15:14 crc kubenswrapper[4971]: I1127 07:15:14.140491 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Nov 27 07:15:14 crc kubenswrapper[4971]: I1127 07:15:14.141888 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Nov 27 07:15:14 crc kubenswrapper[4971]: I1127 07:15:14.142084 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Nov 27 07:15:14 crc kubenswrapper[4971]: I1127 07:15:14.142154 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Nov 27 07:15:14 crc kubenswrapper[4971]: I1127 07:15:14.142570 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-gcnm2" Nov 27 07:15:14 crc kubenswrapper[4971]: I1127 07:15:14.145373 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-fd79dd9f9-z2dsj" podUID="46ee0072-fbf4-40fa-b9bc-40fae95462c3" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.128:5353: connect: connection refused" Nov 27 07:15:14 crc kubenswrapper[4971]: I1127 07:15:14.161874 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-v825j"] Nov 27 07:15:14 crc kubenswrapper[4971]: I1127 07:15:14.208775 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41a0569f-4523-4dc9-a40f-bc22d113d523-scripts\") pod \"keystone-bootstrap-v825j\" (UID: \"41a0569f-4523-4dc9-a40f-bc22d113d523\") " pod="openstack/keystone-bootstrap-v825j" Nov 27 07:15:14 crc kubenswrapper[4971]: I1127 07:15:14.208827 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/41a0569f-4523-4dc9-a40f-bc22d113d523-fernet-keys\") pod \"keystone-bootstrap-v825j\" (UID: \"41a0569f-4523-4dc9-a40f-bc22d113d523\") " pod="openstack/keystone-bootstrap-v825j" Nov 27 07:15:14 crc kubenswrapper[4971]: I1127 07:15:14.208864 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41a0569f-4523-4dc9-a40f-bc22d113d523-combined-ca-bundle\") pod \"keystone-bootstrap-v825j\" (UID: \"41a0569f-4523-4dc9-a40f-bc22d113d523\") " pod="openstack/keystone-bootstrap-v825j" Nov 27 07:15:14 crc kubenswrapper[4971]: I1127 07:15:14.208901 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/41a0569f-4523-4dc9-a40f-bc22d113d523-credential-keys\") pod \"keystone-bootstrap-v825j\" (UID: \"41a0569f-4523-4dc9-a40f-bc22d113d523\") " pod="openstack/keystone-bootstrap-v825j" Nov 27 07:15:14 crc kubenswrapper[4971]: I1127 07:15:14.208921 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2qqg\" (UniqueName: \"kubernetes.io/projected/41a0569f-4523-4dc9-a40f-bc22d113d523-kube-api-access-x2qqg\") pod \"keystone-bootstrap-v825j\" (UID: \"41a0569f-4523-4dc9-a40f-bc22d113d523\") " pod="openstack/keystone-bootstrap-v825j" Nov 27 07:15:14 crc kubenswrapper[4971]: I1127 07:15:14.209146 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41a0569f-4523-4dc9-a40f-bc22d113d523-config-data\") pod \"keystone-bootstrap-v825j\" (UID: \"41a0569f-4523-4dc9-a40f-bc22d113d523\") " pod="openstack/keystone-bootstrap-v825j" Nov 27 07:15:14 crc kubenswrapper[4971]: I1127 07:15:14.311336 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41a0569f-4523-4dc9-a40f-bc22d113d523-scripts\") pod \"keystone-bootstrap-v825j\" (UID: \"41a0569f-4523-4dc9-a40f-bc22d113d523\") " pod="openstack/keystone-bootstrap-v825j" Nov 27 07:15:14 crc kubenswrapper[4971]: I1127 07:15:14.311395 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/41a0569f-4523-4dc9-a40f-bc22d113d523-fernet-keys\") pod \"keystone-bootstrap-v825j\" (UID: \"41a0569f-4523-4dc9-a40f-bc22d113d523\") " pod="openstack/keystone-bootstrap-v825j" Nov 27 07:15:14 crc kubenswrapper[4971]: I1127 07:15:14.311440 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41a0569f-4523-4dc9-a40f-bc22d113d523-combined-ca-bundle\") pod \"keystone-bootstrap-v825j\" (UID: \"41a0569f-4523-4dc9-a40f-bc22d113d523\") " pod="openstack/keystone-bootstrap-v825j" Nov 27 07:15:14 crc kubenswrapper[4971]: I1127 07:15:14.311474 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/41a0569f-4523-4dc9-a40f-bc22d113d523-credential-keys\") pod \"keystone-bootstrap-v825j\" (UID: \"41a0569f-4523-4dc9-a40f-bc22d113d523\") " pod="openstack/keystone-bootstrap-v825j" Nov 27 07:15:14 crc kubenswrapper[4971]: I1127 07:15:14.311494 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2qqg\" (UniqueName: \"kubernetes.io/projected/41a0569f-4523-4dc9-a40f-bc22d113d523-kube-api-access-x2qqg\") pod \"keystone-bootstrap-v825j\" (UID: \"41a0569f-4523-4dc9-a40f-bc22d113d523\") " pod="openstack/keystone-bootstrap-v825j" Nov 27 07:15:14 crc kubenswrapper[4971]: I1127 07:15:14.311884 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41a0569f-4523-4dc9-a40f-bc22d113d523-config-data\") pod \"keystone-bootstrap-v825j\" (UID: \"41a0569f-4523-4dc9-a40f-bc22d113d523\") " pod="openstack/keystone-bootstrap-v825j" Nov 27 07:15:14 crc kubenswrapper[4971]: I1127 07:15:14.324449 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/41a0569f-4523-4dc9-a40f-bc22d113d523-credential-keys\") pod \"keystone-bootstrap-v825j\" (UID: \"41a0569f-4523-4dc9-a40f-bc22d113d523\") " pod="openstack/keystone-bootstrap-v825j" Nov 27 07:15:14 crc kubenswrapper[4971]: I1127 07:15:14.325201 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/41a0569f-4523-4dc9-a40f-bc22d113d523-fernet-keys\") pod \"keystone-bootstrap-v825j\" (UID: \"41a0569f-4523-4dc9-a40f-bc22d113d523\") " pod="openstack/keystone-bootstrap-v825j" Nov 27 07:15:14 crc kubenswrapper[4971]: I1127 07:15:14.341031 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2qqg\" (UniqueName: \"kubernetes.io/projected/41a0569f-4523-4dc9-a40f-bc22d113d523-kube-api-access-x2qqg\") pod \"keystone-bootstrap-v825j\" (UID: \"41a0569f-4523-4dc9-a40f-bc22d113d523\") " pod="openstack/keystone-bootstrap-v825j" Nov 27 07:15:14 crc kubenswrapper[4971]: I1127 07:15:14.343959 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41a0569f-4523-4dc9-a40f-bc22d113d523-scripts\") pod \"keystone-bootstrap-v825j\" (UID: \"41a0569f-4523-4dc9-a40f-bc22d113d523\") " pod="openstack/keystone-bootstrap-v825j" Nov 27 07:15:14 crc kubenswrapper[4971]: I1127 07:15:14.347264 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41a0569f-4523-4dc9-a40f-bc22d113d523-combined-ca-bundle\") pod \"keystone-bootstrap-v825j\" (UID: \"41a0569f-4523-4dc9-a40f-bc22d113d523\") " pod="openstack/keystone-bootstrap-v825j" Nov 27 07:15:14 crc kubenswrapper[4971]: I1127 07:15:14.362796 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41a0569f-4523-4dc9-a40f-bc22d113d523-config-data\") pod \"keystone-bootstrap-v825j\" (UID: \"41a0569f-4523-4dc9-a40f-bc22d113d523\") " pod="openstack/keystone-bootstrap-v825j" Nov 27 07:15:14 crc kubenswrapper[4971]: I1127 07:15:14.457054 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-v825j" Nov 27 07:15:14 crc kubenswrapper[4971]: I1127 07:15:14.561768 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b4b06d68-ae10-4c05-a95f-f84e6ba3a038" path="/var/lib/kubelet/pods/b4b06d68-ae10-4c05-a95f-f84e6ba3a038/volumes" Nov 27 07:15:19 crc kubenswrapper[4971]: I1127 07:15:19.149087 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-fd79dd9f9-z2dsj" podUID="46ee0072-fbf4-40fa-b9bc-40fae95462c3" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.128:5353: connect: connection refused" Nov 27 07:15:23 crc kubenswrapper[4971]: E1127 07:15:23.408299 4971 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:3a56b50437a0c9a9a7b30c10f5e43bbdb7d9a94b723c70d36f0b01ff545e00eb" Nov 27 07:15:23 crc kubenswrapper[4971]: E1127 07:15:23.409047 4971 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:3a56b50437a0c9a9a7b30c10f5e43bbdb7d9a94b723c70d36f0b01ff545e00eb,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fpt96,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-xpjjx_openstack(04c6fe9e-2c13-4757-b2f7-237b1b8849f7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 27 07:15:23 crc kubenswrapper[4971]: E1127 07:15:23.410246 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-xpjjx" podUID="04c6fe9e-2c13-4757-b2f7-237b1b8849f7" Nov 27 07:15:23 crc kubenswrapper[4971]: E1127 07:15:23.696639 4971 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ceilometer-central@sha256:2051e26a441f1ce22aeb8daa0137559d89bded994db8141c11dd580ae6d07a23" Nov 27 07:15:23 crc kubenswrapper[4971]: E1127 07:15:23.696829 4971 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.io/podified-antelope-centos9/openstack-ceilometer-central@sha256:2051e26a441f1ce22aeb8daa0137559d89bded994db8141c11dd580ae6d07a23,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n96h5dfh56h5b9h5d9hcdh5bdh8hfh654h5ffhfbh97h54bh68hdbh5c5hc8hfchbfh8dhdchf5h679h576h54ch5f7h645h86h59fh5b4h696q,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ncglj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(b1ab1943-4c64-4083-8947-aa821a7298d2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 27 07:15:23 crc kubenswrapper[4971]: I1127 07:15:23.714689 4971 scope.go:117] "RemoveContainer" containerID="85439ed0d570a62aab3aadcd3cd25b6d9090c5152e308418101bb768ad360f9f" Nov 27 07:15:23 crc kubenswrapper[4971]: I1127 07:15:23.786777 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 27 07:15:23 crc kubenswrapper[4971]: I1127 07:15:23.911958 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-logs\") pod \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\" (UID: \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\") " Nov 27 07:15:23 crc kubenswrapper[4971]: I1127 07:15:23.912281 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-scripts\") pod \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\" (UID: \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\") " Nov 27 07:15:23 crc kubenswrapper[4971]: I1127 07:15:23.912365 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ffvnf\" (UniqueName: \"kubernetes.io/projected/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-kube-api-access-ffvnf\") pod \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\" (UID: \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\") " Nov 27 07:15:23 crc kubenswrapper[4971]: I1127 07:15:23.912403 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-public-tls-certs\") pod \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\" (UID: \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\") " Nov 27 07:15:23 crc kubenswrapper[4971]: I1127 07:15:23.912436 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-combined-ca-bundle\") pod \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\" (UID: \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\") " Nov 27 07:15:23 crc kubenswrapper[4971]: I1127 07:15:23.912476 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-config-data\") pod \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\" (UID: \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\") " Nov 27 07:15:23 crc kubenswrapper[4971]: I1127 07:15:23.912512 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\" (UID: \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\") " Nov 27 07:15:23 crc kubenswrapper[4971]: I1127 07:15:23.912558 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-httpd-run\") pod \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\" (UID: \"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea\") " Nov 27 07:15:23 crc kubenswrapper[4971]: I1127 07:15:23.912595 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-logs" (OuterVolumeSpecName: "logs") pod "b15d1e2e-d53f-4263-8b48-880c0ee4a3ea" (UID: "b15d1e2e-d53f-4263-8b48-880c0ee4a3ea"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:15:23 crc kubenswrapper[4971]: I1127 07:15:23.913030 4971 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-logs\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:23 crc kubenswrapper[4971]: I1127 07:15:23.913315 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "b15d1e2e-d53f-4263-8b48-880c0ee4a3ea" (UID: "b15d1e2e-d53f-4263-8b48-880c0ee4a3ea"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:15:23 crc kubenswrapper[4971]: I1127 07:15:23.916864 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-kube-api-access-ffvnf" (OuterVolumeSpecName: "kube-api-access-ffvnf") pod "b15d1e2e-d53f-4263-8b48-880c0ee4a3ea" (UID: "b15d1e2e-d53f-4263-8b48-880c0ee4a3ea"). InnerVolumeSpecName "kube-api-access-ffvnf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:15:23 crc kubenswrapper[4971]: I1127 07:15:23.917043 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-scripts" (OuterVolumeSpecName: "scripts") pod "b15d1e2e-d53f-4263-8b48-880c0ee4a3ea" (UID: "b15d1e2e-d53f-4263-8b48-880c0ee4a3ea"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:15:23 crc kubenswrapper[4971]: I1127 07:15:23.935341 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "b15d1e2e-d53f-4263-8b48-880c0ee4a3ea" (UID: "b15d1e2e-d53f-4263-8b48-880c0ee4a3ea"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 27 07:15:23 crc kubenswrapper[4971]: I1127 07:15:23.946194 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b15d1e2e-d53f-4263-8b48-880c0ee4a3ea" (UID: "b15d1e2e-d53f-4263-8b48-880c0ee4a3ea"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:15:23 crc kubenswrapper[4971]: I1127 07:15:23.973173 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-config-data" (OuterVolumeSpecName: "config-data") pod "b15d1e2e-d53f-4263-8b48-880c0ee4a3ea" (UID: "b15d1e2e-d53f-4263-8b48-880c0ee4a3ea"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:15:23 crc kubenswrapper[4971]: I1127 07:15:23.986913 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "b15d1e2e-d53f-4263-8b48-880c0ee4a3ea" (UID: "b15d1e2e-d53f-4263-8b48-880c0ee4a3ea"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.015202 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.015236 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ffvnf\" (UniqueName: \"kubernetes.io/projected/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-kube-api-access-ffvnf\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.015247 4971 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.015256 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.015265 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.015295 4971 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.015305 4971 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.033831 4971 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.117128 4971 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.194178 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.194178 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b15d1e2e-d53f-4263-8b48-880c0ee4a3ea","Type":"ContainerDied","Data":"a2a34703a048673224c65cf7e6d55ba563cb7799b642dcc8ce9d23f46dc92c88"} Nov 27 07:15:24 crc kubenswrapper[4971]: E1127 07:15:24.199859 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:3a56b50437a0c9a9a7b30c10f5e43bbdb7d9a94b723c70d36f0b01ff545e00eb\\\"\"" pod="openstack/barbican-db-sync-xpjjx" podUID="04c6fe9e-2c13-4757-b2f7-237b1b8849f7" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.243723 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.256371 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.272499 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Nov 27 07:15:24 crc kubenswrapper[4971]: E1127 07:15:24.273090 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b15d1e2e-d53f-4263-8b48-880c0ee4a3ea" containerName="glance-httpd" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.273108 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b15d1e2e-d53f-4263-8b48-880c0ee4a3ea" containerName="glance-httpd" Nov 27 07:15:24 crc kubenswrapper[4971]: E1127 07:15:24.273127 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b15d1e2e-d53f-4263-8b48-880c0ee4a3ea" containerName="glance-log" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.273135 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b15d1e2e-d53f-4263-8b48-880c0ee4a3ea" containerName="glance-log" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.273364 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="b15d1e2e-d53f-4263-8b48-880c0ee4a3ea" containerName="glance-httpd" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.273382 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="b15d1e2e-d53f-4263-8b48-880c0ee4a3ea" containerName="glance-log" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.274390 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.276873 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.276879 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.283238 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.421966 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-scripts\") pod \"glance-default-external-api-0\" (UID: \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.422056 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-logs\") pod \"glance-default-external-api-0\" (UID: \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.422323 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-config-data\") pod \"glance-default-external-api-0\" (UID: \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.422652 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.422743 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.422850 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gkwt5\" (UniqueName: \"kubernetes.io/projected/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-kube-api-access-gkwt5\") pod \"glance-default-external-api-0\" (UID: \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.422901 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.422950 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.524739 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-scripts\") pod \"glance-default-external-api-0\" (UID: \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.524803 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-logs\") pod \"glance-default-external-api-0\" (UID: \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.524846 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-config-data\") pod \"glance-default-external-api-0\" (UID: \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.524898 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.524929 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.524957 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gkwt5\" (UniqueName: \"kubernetes.io/projected/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-kube-api-access-gkwt5\") pod \"glance-default-external-api-0\" (UID: \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.524980 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.525006 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.525123 4971 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-external-api-0" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.525543 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.525823 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-logs\") pod \"glance-default-external-api-0\" (UID: \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.533246 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-scripts\") pod \"glance-default-external-api-0\" (UID: \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.533276 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.533324 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.534158 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-config-data\") pod \"glance-default-external-api-0\" (UID: \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.544990 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gkwt5\" (UniqueName: \"kubernetes.io/projected/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-kube-api-access-gkwt5\") pod \"glance-default-external-api-0\" (UID: \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.555847 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\") " pod="openstack/glance-default-external-api-0" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.560494 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b15d1e2e-d53f-4263-8b48-880c0ee4a3ea" path="/var/lib/kubelet/pods/b15d1e2e-d53f-4263-8b48-880c0ee4a3ea/volumes" Nov 27 07:15:24 crc kubenswrapper[4971]: I1127 07:15:24.605406 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 27 07:15:25 crc kubenswrapper[4971]: E1127 07:15:25.129268 4971 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b5266c9a26766fce2b92f95dff52d362a760f7baf1474cdcb33bd68570e096c0" Nov 27 07:15:25 crc kubenswrapper[4971]: E1127 07:15:25.129500 4971 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b5266c9a26766fce2b92f95dff52d362a760f7baf1474cdcb33bd68570e096c0,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gt7fq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-th4ww_openstack(cd83d353-e492-4119-8890-77569c78bed2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 27 07:15:25 crc kubenswrapper[4971]: I1127 07:15:25.132863 4971 scope.go:117] "RemoveContainer" containerID="2d7ba85855f79ab485bd1745723e69366fec97659a73a95a0239fea68f2ed3e9" Nov 27 07:15:25 crc kubenswrapper[4971]: E1127 07:15:25.133263 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-th4ww" podUID="cd83d353-e492-4119-8890-77569c78bed2" Nov 27 07:15:25 crc kubenswrapper[4971]: E1127 07:15:25.133336 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d7ba85855f79ab485bd1745723e69366fec97659a73a95a0239fea68f2ed3e9\": container with ID starting with 2d7ba85855f79ab485bd1745723e69366fec97659a73a95a0239fea68f2ed3e9 not found: ID does not exist" containerID="2d7ba85855f79ab485bd1745723e69366fec97659a73a95a0239fea68f2ed3e9" Nov 27 07:15:25 crc kubenswrapper[4971]: I1127 07:15:25.133369 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d7ba85855f79ab485bd1745723e69366fec97659a73a95a0239fea68f2ed3e9"} err="failed to get container status \"2d7ba85855f79ab485bd1745723e69366fec97659a73a95a0239fea68f2ed3e9\": rpc error: code = NotFound desc = could not find container \"2d7ba85855f79ab485bd1745723e69366fec97659a73a95a0239fea68f2ed3e9\": container with ID starting with 2d7ba85855f79ab485bd1745723e69366fec97659a73a95a0239fea68f2ed3e9 not found: ID does not exist" Nov 27 07:15:25 crc kubenswrapper[4971]: I1127 07:15:25.133392 4971 scope.go:117] "RemoveContainer" containerID="85439ed0d570a62aab3aadcd3cd25b6d9090c5152e308418101bb768ad360f9f" Nov 27 07:15:25 crc kubenswrapper[4971]: E1127 07:15:25.133775 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85439ed0d570a62aab3aadcd3cd25b6d9090c5152e308418101bb768ad360f9f\": container with ID starting with 85439ed0d570a62aab3aadcd3cd25b6d9090c5152e308418101bb768ad360f9f not found: ID does not exist" containerID="85439ed0d570a62aab3aadcd3cd25b6d9090c5152e308418101bb768ad360f9f" Nov 27 07:15:25 crc kubenswrapper[4971]: I1127 07:15:25.133796 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85439ed0d570a62aab3aadcd3cd25b6d9090c5152e308418101bb768ad360f9f"} err="failed to get container status \"85439ed0d570a62aab3aadcd3cd25b6d9090c5152e308418101bb768ad360f9f\": rpc error: code = NotFound desc = could not find container \"85439ed0d570a62aab3aadcd3cd25b6d9090c5152e308418101bb768ad360f9f\": container with ID starting with 85439ed0d570a62aab3aadcd3cd25b6d9090c5152e308418101bb768ad360f9f not found: ID does not exist" Nov 27 07:15:25 crc kubenswrapper[4971]: I1127 07:15:25.133811 4971 scope.go:117] "RemoveContainer" containerID="2d7ba85855f79ab485bd1745723e69366fec97659a73a95a0239fea68f2ed3e9" Nov 27 07:15:25 crc kubenswrapper[4971]: I1127 07:15:25.134214 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d7ba85855f79ab485bd1745723e69366fec97659a73a95a0239fea68f2ed3e9"} err="failed to get container status \"2d7ba85855f79ab485bd1745723e69366fec97659a73a95a0239fea68f2ed3e9\": rpc error: code = NotFound desc = could not find container \"2d7ba85855f79ab485bd1745723e69366fec97659a73a95a0239fea68f2ed3e9\": container with ID starting with 2d7ba85855f79ab485bd1745723e69366fec97659a73a95a0239fea68f2ed3e9 not found: ID does not exist" Nov 27 07:15:25 crc kubenswrapper[4971]: I1127 07:15:25.135842 4971 scope.go:117] "RemoveContainer" containerID="85439ed0d570a62aab3aadcd3cd25b6d9090c5152e308418101bb768ad360f9f" Nov 27 07:15:25 crc kubenswrapper[4971]: I1127 07:15:25.136178 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85439ed0d570a62aab3aadcd3cd25b6d9090c5152e308418101bb768ad360f9f"} err="failed to get container status \"85439ed0d570a62aab3aadcd3cd25b6d9090c5152e308418101bb768ad360f9f\": rpc error: code = NotFound desc = could not find container \"85439ed0d570a62aab3aadcd3cd25b6d9090c5152e308418101bb768ad360f9f\": container with ID starting with 85439ed0d570a62aab3aadcd3cd25b6d9090c5152e308418101bb768ad360f9f not found: ID does not exist" Nov 27 07:15:25 crc kubenswrapper[4971]: I1127 07:15:25.136384 4971 scope.go:117] "RemoveContainer" containerID="7882e8427a5c545c4627afc3e2cbb86e0147f1ae7136d4680ea6ec08c575d081" Nov 27 07:15:25 crc kubenswrapper[4971]: I1127 07:15:25.210901 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fd79dd9f9-z2dsj" event={"ID":"46ee0072-fbf4-40fa-b9bc-40fae95462c3","Type":"ContainerDied","Data":"655c579ca0015c7ef8afb8e457b57c67be56196409feccf039f0d0efb0406200"} Nov 27 07:15:25 crc kubenswrapper[4971]: I1127 07:15:25.211226 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="655c579ca0015c7ef8afb8e457b57c67be56196409feccf039f0d0efb0406200" Nov 27 07:15:25 crc kubenswrapper[4971]: I1127 07:15:25.229771 4971 generic.go:334] "Generic (PLEG): container finished" podID="58ccbc84-3120-4e10-a215-07d41267629d" containerID="90d75b56cc24ed99876c2a3d94faea49f7cafd9b68b4d01c2a828f69a6df4484" exitCode=0 Nov 27 07:15:25 crc kubenswrapper[4971]: I1127 07:15:25.230900 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-wsg7b" event={"ID":"58ccbc84-3120-4e10-a215-07d41267629d","Type":"ContainerDied","Data":"90d75b56cc24ed99876c2a3d94faea49f7cafd9b68b4d01c2a828f69a6df4484"} Nov 27 07:15:25 crc kubenswrapper[4971]: E1127 07:15:25.232304 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b5266c9a26766fce2b92f95dff52d362a760f7baf1474cdcb33bd68570e096c0\\\"\"" pod="openstack/cinder-db-sync-th4ww" podUID="cd83d353-e492-4119-8890-77569c78bed2" Nov 27 07:15:25 crc kubenswrapper[4971]: I1127 07:15:25.293682 4971 scope.go:117] "RemoveContainer" containerID="3345fa2c05d61c6c3b3cf76efbc6465d898235060071298b605f35c78b1df8f2" Nov 27 07:15:25 crc kubenswrapper[4971]: I1127 07:15:25.319853 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fd79dd9f9-z2dsj" Nov 27 07:15:25 crc kubenswrapper[4971]: I1127 07:15:25.443467 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/46ee0072-fbf4-40fa-b9bc-40fae95462c3-ovsdbserver-sb\") pod \"46ee0072-fbf4-40fa-b9bc-40fae95462c3\" (UID: \"46ee0072-fbf4-40fa-b9bc-40fae95462c3\") " Nov 27 07:15:25 crc kubenswrapper[4971]: I1127 07:15:25.443566 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v682b\" (UniqueName: \"kubernetes.io/projected/46ee0072-fbf4-40fa-b9bc-40fae95462c3-kube-api-access-v682b\") pod \"46ee0072-fbf4-40fa-b9bc-40fae95462c3\" (UID: \"46ee0072-fbf4-40fa-b9bc-40fae95462c3\") " Nov 27 07:15:25 crc kubenswrapper[4971]: I1127 07:15:25.443620 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/46ee0072-fbf4-40fa-b9bc-40fae95462c3-dns-swift-storage-0\") pod \"46ee0072-fbf4-40fa-b9bc-40fae95462c3\" (UID: \"46ee0072-fbf4-40fa-b9bc-40fae95462c3\") " Nov 27 07:15:25 crc kubenswrapper[4971]: I1127 07:15:25.443705 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/46ee0072-fbf4-40fa-b9bc-40fae95462c3-dns-svc\") pod \"46ee0072-fbf4-40fa-b9bc-40fae95462c3\" (UID: \"46ee0072-fbf4-40fa-b9bc-40fae95462c3\") " Nov 27 07:15:25 crc kubenswrapper[4971]: I1127 07:15:25.443774 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46ee0072-fbf4-40fa-b9bc-40fae95462c3-config\") pod \"46ee0072-fbf4-40fa-b9bc-40fae95462c3\" (UID: \"46ee0072-fbf4-40fa-b9bc-40fae95462c3\") " Nov 27 07:15:25 crc kubenswrapper[4971]: I1127 07:15:25.444038 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/46ee0072-fbf4-40fa-b9bc-40fae95462c3-ovsdbserver-nb\") pod \"46ee0072-fbf4-40fa-b9bc-40fae95462c3\" (UID: \"46ee0072-fbf4-40fa-b9bc-40fae95462c3\") " Nov 27 07:15:25 crc kubenswrapper[4971]: I1127 07:15:25.460934 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46ee0072-fbf4-40fa-b9bc-40fae95462c3-kube-api-access-v682b" (OuterVolumeSpecName: "kube-api-access-v682b") pod "46ee0072-fbf4-40fa-b9bc-40fae95462c3" (UID: "46ee0072-fbf4-40fa-b9bc-40fae95462c3"). InnerVolumeSpecName "kube-api-access-v682b". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:15:25 crc kubenswrapper[4971]: I1127 07:15:25.499187 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46ee0072-fbf4-40fa-b9bc-40fae95462c3-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "46ee0072-fbf4-40fa-b9bc-40fae95462c3" (UID: "46ee0072-fbf4-40fa-b9bc-40fae95462c3"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:15:25 crc kubenswrapper[4971]: I1127 07:15:25.504451 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46ee0072-fbf4-40fa-b9bc-40fae95462c3-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "46ee0072-fbf4-40fa-b9bc-40fae95462c3" (UID: "46ee0072-fbf4-40fa-b9bc-40fae95462c3"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:15:25 crc kubenswrapper[4971]: I1127 07:15:25.505556 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46ee0072-fbf4-40fa-b9bc-40fae95462c3-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "46ee0072-fbf4-40fa-b9bc-40fae95462c3" (UID: "46ee0072-fbf4-40fa-b9bc-40fae95462c3"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:15:25 crc kubenswrapper[4971]: I1127 07:15:25.523164 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46ee0072-fbf4-40fa-b9bc-40fae95462c3-config" (OuterVolumeSpecName: "config") pod "46ee0072-fbf4-40fa-b9bc-40fae95462c3" (UID: "46ee0072-fbf4-40fa-b9bc-40fae95462c3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:15:25 crc kubenswrapper[4971]: I1127 07:15:25.529137 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46ee0072-fbf4-40fa-b9bc-40fae95462c3-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "46ee0072-fbf4-40fa-b9bc-40fae95462c3" (UID: "46ee0072-fbf4-40fa-b9bc-40fae95462c3"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:15:25 crc kubenswrapper[4971]: I1127 07:15:25.546940 4971 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/46ee0072-fbf4-40fa-b9bc-40fae95462c3-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:25 crc kubenswrapper[4971]: I1127 07:15:25.546996 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46ee0072-fbf4-40fa-b9bc-40fae95462c3-config\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:25 crc kubenswrapper[4971]: I1127 07:15:25.547007 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/46ee0072-fbf4-40fa-b9bc-40fae95462c3-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:25 crc kubenswrapper[4971]: I1127 07:15:25.547020 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/46ee0072-fbf4-40fa-b9bc-40fae95462c3-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:25 crc kubenswrapper[4971]: I1127 07:15:25.547031 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v682b\" (UniqueName: \"kubernetes.io/projected/46ee0072-fbf4-40fa-b9bc-40fae95462c3-kube-api-access-v682b\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:25 crc kubenswrapper[4971]: I1127 07:15:25.547040 4971 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/46ee0072-fbf4-40fa-b9bc-40fae95462c3-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:25 crc kubenswrapper[4971]: I1127 07:15:25.649396 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 27 07:15:25 crc kubenswrapper[4971]: I1127 07:15:25.752694 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-v825j"] Nov 27 07:15:25 crc kubenswrapper[4971]: I1127 07:15:25.825147 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 27 07:15:25 crc kubenswrapper[4971]: W1127 07:15:25.869024 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod02f2a8a6_cfc5_4973_9f54_e4647a8e4552.slice/crio-29966680f44886cf8df7f44cf45c781f5b1f7f9c1991c08deb7e4bf4901a8a69 WatchSource:0}: Error finding container 29966680f44886cf8df7f44cf45c781f5b1f7f9c1991c08deb7e4bf4901a8a69: Status 404 returned error can't find the container with id 29966680f44886cf8df7f44cf45c781f5b1f7f9c1991c08deb7e4bf4901a8a69 Nov 27 07:15:25 crc kubenswrapper[4971]: W1127 07:15:25.871603 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod41a0569f_4523_4dc9_a40f_bc22d113d523.slice/crio-05fd7646752740789a3e947aeb64fc2f1f6763faeb243b5bfc03f16ff4cb4691 WatchSource:0}: Error finding container 05fd7646752740789a3e947aeb64fc2f1f6763faeb243b5bfc03f16ff4cb4691: Status 404 returned error can't find the container with id 05fd7646752740789a3e947aeb64fc2f1f6763faeb243b5bfc03f16ff4cb4691 Nov 27 07:15:25 crc kubenswrapper[4971]: W1127 07:15:25.874029 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8cbb2c33_b8a0_4220_916a_ea0a8d738755.slice/crio-c7a47c0dd688f261ce9c99e58714d85b6aae87df2552888de94bfbe81c4c217f WatchSource:0}: Error finding container c7a47c0dd688f261ce9c99e58714d85b6aae87df2552888de94bfbe81c4c217f: Status 404 returned error can't find the container with id c7a47c0dd688f261ce9c99e58714d85b6aae87df2552888de94bfbe81c4c217f Nov 27 07:15:26 crc kubenswrapper[4971]: I1127 07:15:26.239606 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"02f2a8a6-cfc5-4973-9f54-e4647a8e4552","Type":"ContainerStarted","Data":"29966680f44886cf8df7f44cf45c781f5b1f7f9c1991c08deb7e4bf4901a8a69"} Nov 27 07:15:26 crc kubenswrapper[4971]: I1127 07:15:26.244093 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-kv5bz" event={"ID":"1098ae71-b794-4670-af66-67bd17375e2c","Type":"ContainerStarted","Data":"4d487d859ddb54b8c37233d551712b8c0af4822104b48abd901d36f7a1eddd1c"} Nov 27 07:15:26 crc kubenswrapper[4971]: I1127 07:15:26.247664 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-v825j" event={"ID":"41a0569f-4523-4dc9-a40f-bc22d113d523","Type":"ContainerStarted","Data":"7c752ad17fdde3f68fd7a467e7e60367d073bc3eb47a408353424a2055983e41"} Nov 27 07:15:26 crc kubenswrapper[4971]: I1127 07:15:26.247684 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-v825j" event={"ID":"41a0569f-4523-4dc9-a40f-bc22d113d523","Type":"ContainerStarted","Data":"05fd7646752740789a3e947aeb64fc2f1f6763faeb243b5bfc03f16ff4cb4691"} Nov 27 07:15:26 crc kubenswrapper[4971]: I1127 07:15:26.250414 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8cbb2c33-b8a0-4220-916a-ea0a8d738755","Type":"ContainerStarted","Data":"c7a47c0dd688f261ce9c99e58714d85b6aae87df2552888de94bfbe81c4c217f"} Nov 27 07:15:26 crc kubenswrapper[4971]: I1127 07:15:26.251960 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fd79dd9f9-z2dsj" Nov 27 07:15:26 crc kubenswrapper[4971]: I1127 07:15:26.252728 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b1ab1943-4c64-4083-8947-aa821a7298d2","Type":"ContainerStarted","Data":"61e437269c525a53ab08083843781fe1ad1d8d262e56a60813bfea706731a494"} Nov 27 07:15:26 crc kubenswrapper[4971]: I1127 07:15:26.271100 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-kv5bz" podStartSLOduration=4.850495474 podStartE2EDuration="26.27108409s" podCreationTimestamp="2025-11-27 07:15:00 +0000 UTC" firstStartedPulling="2025-11-27 07:15:02.279993964 +0000 UTC m=+1340.472037882" lastFinishedPulling="2025-11-27 07:15:23.70058258 +0000 UTC m=+1361.892626498" observedRunningTime="2025-11-27 07:15:26.262121468 +0000 UTC m=+1364.454165386" watchObservedRunningTime="2025-11-27 07:15:26.27108409 +0000 UTC m=+1364.463128008" Nov 27 07:15:26 crc kubenswrapper[4971]: I1127 07:15:26.293910 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-v825j" podStartSLOduration=12.293896665 podStartE2EDuration="12.293896665s" podCreationTimestamp="2025-11-27 07:15:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:15:26.286820014 +0000 UTC m=+1364.478863952" watchObservedRunningTime="2025-11-27 07:15:26.293896665 +0000 UTC m=+1364.485940583" Nov 27 07:15:26 crc kubenswrapper[4971]: I1127 07:15:26.315070 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-fd79dd9f9-z2dsj"] Nov 27 07:15:26 crc kubenswrapper[4971]: I1127 07:15:26.322691 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-fd79dd9f9-z2dsj"] Nov 27 07:15:26 crc kubenswrapper[4971]: I1127 07:15:26.413410 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 07:15:26 crc kubenswrapper[4971]: I1127 07:15:26.413469 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 07:15:26 crc kubenswrapper[4971]: I1127 07:15:26.413522 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 07:15:26 crc kubenswrapper[4971]: I1127 07:15:26.414507 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5137c970e6cdfd6e2022087bc7914ffa98a593d65bc63eec37cd087c8fcec32d"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 07:15:26 crc kubenswrapper[4971]: I1127 07:15:26.414577 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://5137c970e6cdfd6e2022087bc7914ffa98a593d65bc63eec37cd087c8fcec32d" gracePeriod=600 Nov 27 07:15:26 crc kubenswrapper[4971]: I1127 07:15:26.567077 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46ee0072-fbf4-40fa-b9bc-40fae95462c3" path="/var/lib/kubelet/pods/46ee0072-fbf4-40fa-b9bc-40fae95462c3/volumes" Nov 27 07:15:26 crc kubenswrapper[4971]: I1127 07:15:26.567290 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-wsg7b" Nov 27 07:15:26 crc kubenswrapper[4971]: I1127 07:15:26.679280 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/58ccbc84-3120-4e10-a215-07d41267629d-config\") pod \"58ccbc84-3120-4e10-a215-07d41267629d\" (UID: \"58ccbc84-3120-4e10-a215-07d41267629d\") " Nov 27 07:15:26 crc kubenswrapper[4971]: I1127 07:15:26.679363 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58ccbc84-3120-4e10-a215-07d41267629d-combined-ca-bundle\") pod \"58ccbc84-3120-4e10-a215-07d41267629d\" (UID: \"58ccbc84-3120-4e10-a215-07d41267629d\") " Nov 27 07:15:26 crc kubenswrapper[4971]: I1127 07:15:26.679435 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c76dw\" (UniqueName: \"kubernetes.io/projected/58ccbc84-3120-4e10-a215-07d41267629d-kube-api-access-c76dw\") pod \"58ccbc84-3120-4e10-a215-07d41267629d\" (UID: \"58ccbc84-3120-4e10-a215-07d41267629d\") " Nov 27 07:15:26 crc kubenswrapper[4971]: I1127 07:15:26.684170 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58ccbc84-3120-4e10-a215-07d41267629d-kube-api-access-c76dw" (OuterVolumeSpecName: "kube-api-access-c76dw") pod "58ccbc84-3120-4e10-a215-07d41267629d" (UID: "58ccbc84-3120-4e10-a215-07d41267629d"). InnerVolumeSpecName "kube-api-access-c76dw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:15:26 crc kubenswrapper[4971]: I1127 07:15:26.726919 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58ccbc84-3120-4e10-a215-07d41267629d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "58ccbc84-3120-4e10-a215-07d41267629d" (UID: "58ccbc84-3120-4e10-a215-07d41267629d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:15:26 crc kubenswrapper[4971]: I1127 07:15:26.733814 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58ccbc84-3120-4e10-a215-07d41267629d-config" (OuterVolumeSpecName: "config") pod "58ccbc84-3120-4e10-a215-07d41267629d" (UID: "58ccbc84-3120-4e10-a215-07d41267629d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:15:26 crc kubenswrapper[4971]: I1127 07:15:26.813500 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/58ccbc84-3120-4e10-a215-07d41267629d-config\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:26 crc kubenswrapper[4971]: I1127 07:15:26.813564 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58ccbc84-3120-4e10-a215-07d41267629d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:26 crc kubenswrapper[4971]: I1127 07:15:26.813580 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c76dw\" (UniqueName: \"kubernetes.io/projected/58ccbc84-3120-4e10-a215-07d41267629d-kube-api-access-c76dw\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.320387 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="5137c970e6cdfd6e2022087bc7914ffa98a593d65bc63eec37cd087c8fcec32d" exitCode=0 Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.320460 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"5137c970e6cdfd6e2022087bc7914ffa98a593d65bc63eec37cd087c8fcec32d"} Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.320739 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"0cad2c6a32c384b5b56d689d3a634854a392f78c534cb53e6b7da7260dd94a04"} Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.320781 4971 scope.go:117] "RemoveContainer" containerID="30867a48d7e31eebb704511f63aa767a3965e3de0a82008135ef08d2702adfac" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.325061 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"02f2a8a6-cfc5-4973-9f54-e4647a8e4552","Type":"ContainerStarted","Data":"77ce808b9ead19bdd862809257befb60bc498510116c4ba5427f504d52563215"} Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.330689 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8cbb2c33-b8a0-4220-916a-ea0a8d738755","Type":"ContainerStarted","Data":"aa0a8caacb07764852e7600c71557c1dc12f7c2b4791c9c93c1bf945ac233ecc"} Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.338501 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-wsg7b" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.338596 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-wsg7b" event={"ID":"58ccbc84-3120-4e10-a215-07d41267629d","Type":"ContainerDied","Data":"646dd32e727408c002dfdeb44ba6919f77e10b39f5800402e79a69ee2e27a173"} Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.338631 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="646dd32e727408c002dfdeb44ba6919f77e10b39f5800402e79a69ee2e27a173" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.489268 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-ccf55cbcc-jsxv8"] Nov 27 07:15:27 crc kubenswrapper[4971]: E1127 07:15:27.490588 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58ccbc84-3120-4e10-a215-07d41267629d" containerName="neutron-db-sync" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.490639 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="58ccbc84-3120-4e10-a215-07d41267629d" containerName="neutron-db-sync" Nov 27 07:15:27 crc kubenswrapper[4971]: E1127 07:15:27.490673 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46ee0072-fbf4-40fa-b9bc-40fae95462c3" containerName="dnsmasq-dns" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.490684 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="46ee0072-fbf4-40fa-b9bc-40fae95462c3" containerName="dnsmasq-dns" Nov 27 07:15:27 crc kubenswrapper[4971]: E1127 07:15:27.490733 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46ee0072-fbf4-40fa-b9bc-40fae95462c3" containerName="init" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.490743 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="46ee0072-fbf4-40fa-b9bc-40fae95462c3" containerName="init" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.490996 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="58ccbc84-3120-4e10-a215-07d41267629d" containerName="neutron-db-sync" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.491018 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="46ee0072-fbf4-40fa-b9bc-40fae95462c3" containerName="dnsmasq-dns" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.492407 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-ccf55cbcc-jsxv8" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.515987 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-ccf55cbcc-jsxv8"] Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.537944 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4794dbe4-7057-46d2-90d6-11a2bda30e6a-ovsdbserver-nb\") pod \"dnsmasq-dns-ccf55cbcc-jsxv8\" (UID: \"4794dbe4-7057-46d2-90d6-11a2bda30e6a\") " pod="openstack/dnsmasq-dns-ccf55cbcc-jsxv8" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.537994 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4794dbe4-7057-46d2-90d6-11a2bda30e6a-dns-swift-storage-0\") pod \"dnsmasq-dns-ccf55cbcc-jsxv8\" (UID: \"4794dbe4-7057-46d2-90d6-11a2bda30e6a\") " pod="openstack/dnsmasq-dns-ccf55cbcc-jsxv8" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.538088 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxpjz\" (UniqueName: \"kubernetes.io/projected/4794dbe4-7057-46d2-90d6-11a2bda30e6a-kube-api-access-rxpjz\") pod \"dnsmasq-dns-ccf55cbcc-jsxv8\" (UID: \"4794dbe4-7057-46d2-90d6-11a2bda30e6a\") " pod="openstack/dnsmasq-dns-ccf55cbcc-jsxv8" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.538170 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4794dbe4-7057-46d2-90d6-11a2bda30e6a-config\") pod \"dnsmasq-dns-ccf55cbcc-jsxv8\" (UID: \"4794dbe4-7057-46d2-90d6-11a2bda30e6a\") " pod="openstack/dnsmasq-dns-ccf55cbcc-jsxv8" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.538204 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4794dbe4-7057-46d2-90d6-11a2bda30e6a-dns-svc\") pod \"dnsmasq-dns-ccf55cbcc-jsxv8\" (UID: \"4794dbe4-7057-46d2-90d6-11a2bda30e6a\") " pod="openstack/dnsmasq-dns-ccf55cbcc-jsxv8" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.538235 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4794dbe4-7057-46d2-90d6-11a2bda30e6a-ovsdbserver-sb\") pod \"dnsmasq-dns-ccf55cbcc-jsxv8\" (UID: \"4794dbe4-7057-46d2-90d6-11a2bda30e6a\") " pod="openstack/dnsmasq-dns-ccf55cbcc-jsxv8" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.623503 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-797b967b5b-7wfhc"] Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.625488 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-797b967b5b-7wfhc" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.637496 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-5qpx5" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.637866 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.637922 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.638030 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.640146 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9ksw\" (UniqueName: \"kubernetes.io/projected/cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d-kube-api-access-c9ksw\") pod \"neutron-797b967b5b-7wfhc\" (UID: \"cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d\") " pod="openstack/neutron-797b967b5b-7wfhc" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.640217 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4794dbe4-7057-46d2-90d6-11a2bda30e6a-ovsdbserver-nb\") pod \"dnsmasq-dns-ccf55cbcc-jsxv8\" (UID: \"4794dbe4-7057-46d2-90d6-11a2bda30e6a\") " pod="openstack/dnsmasq-dns-ccf55cbcc-jsxv8" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.640247 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4794dbe4-7057-46d2-90d6-11a2bda30e6a-dns-swift-storage-0\") pod \"dnsmasq-dns-ccf55cbcc-jsxv8\" (UID: \"4794dbe4-7057-46d2-90d6-11a2bda30e6a\") " pod="openstack/dnsmasq-dns-ccf55cbcc-jsxv8" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.640341 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d-config\") pod \"neutron-797b967b5b-7wfhc\" (UID: \"cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d\") " pod="openstack/neutron-797b967b5b-7wfhc" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.640430 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d-httpd-config\") pod \"neutron-797b967b5b-7wfhc\" (UID: \"cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d\") " pod="openstack/neutron-797b967b5b-7wfhc" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.640456 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxpjz\" (UniqueName: \"kubernetes.io/projected/4794dbe4-7057-46d2-90d6-11a2bda30e6a-kube-api-access-rxpjz\") pod \"dnsmasq-dns-ccf55cbcc-jsxv8\" (UID: \"4794dbe4-7057-46d2-90d6-11a2bda30e6a\") " pod="openstack/dnsmasq-dns-ccf55cbcc-jsxv8" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.640487 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d-ovndb-tls-certs\") pod \"neutron-797b967b5b-7wfhc\" (UID: \"cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d\") " pod="openstack/neutron-797b967b5b-7wfhc" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.641976 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4794dbe4-7057-46d2-90d6-11a2bda30e6a-ovsdbserver-nb\") pod \"dnsmasq-dns-ccf55cbcc-jsxv8\" (UID: \"4794dbe4-7057-46d2-90d6-11a2bda30e6a\") " pod="openstack/dnsmasq-dns-ccf55cbcc-jsxv8" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.642634 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4794dbe4-7057-46d2-90d6-11a2bda30e6a-dns-swift-storage-0\") pod \"dnsmasq-dns-ccf55cbcc-jsxv8\" (UID: \"4794dbe4-7057-46d2-90d6-11a2bda30e6a\") " pod="openstack/dnsmasq-dns-ccf55cbcc-jsxv8" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.672873 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4794dbe4-7057-46d2-90d6-11a2bda30e6a-config\") pod \"dnsmasq-dns-ccf55cbcc-jsxv8\" (UID: \"4794dbe4-7057-46d2-90d6-11a2bda30e6a\") " pod="openstack/dnsmasq-dns-ccf55cbcc-jsxv8" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.672951 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4794dbe4-7057-46d2-90d6-11a2bda30e6a-dns-svc\") pod \"dnsmasq-dns-ccf55cbcc-jsxv8\" (UID: \"4794dbe4-7057-46d2-90d6-11a2bda30e6a\") " pod="openstack/dnsmasq-dns-ccf55cbcc-jsxv8" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.672979 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d-combined-ca-bundle\") pod \"neutron-797b967b5b-7wfhc\" (UID: \"cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d\") " pod="openstack/neutron-797b967b5b-7wfhc" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.673044 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4794dbe4-7057-46d2-90d6-11a2bda30e6a-ovsdbserver-sb\") pod \"dnsmasq-dns-ccf55cbcc-jsxv8\" (UID: \"4794dbe4-7057-46d2-90d6-11a2bda30e6a\") " pod="openstack/dnsmasq-dns-ccf55cbcc-jsxv8" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.673926 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4794dbe4-7057-46d2-90d6-11a2bda30e6a-config\") pod \"dnsmasq-dns-ccf55cbcc-jsxv8\" (UID: \"4794dbe4-7057-46d2-90d6-11a2bda30e6a\") " pod="openstack/dnsmasq-dns-ccf55cbcc-jsxv8" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.674110 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4794dbe4-7057-46d2-90d6-11a2bda30e6a-ovsdbserver-sb\") pod \"dnsmasq-dns-ccf55cbcc-jsxv8\" (UID: \"4794dbe4-7057-46d2-90d6-11a2bda30e6a\") " pod="openstack/dnsmasq-dns-ccf55cbcc-jsxv8" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.674474 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4794dbe4-7057-46d2-90d6-11a2bda30e6a-dns-svc\") pod \"dnsmasq-dns-ccf55cbcc-jsxv8\" (UID: \"4794dbe4-7057-46d2-90d6-11a2bda30e6a\") " pod="openstack/dnsmasq-dns-ccf55cbcc-jsxv8" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.684253 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-797b967b5b-7wfhc"] Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.722637 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxpjz\" (UniqueName: \"kubernetes.io/projected/4794dbe4-7057-46d2-90d6-11a2bda30e6a-kube-api-access-rxpjz\") pod \"dnsmasq-dns-ccf55cbcc-jsxv8\" (UID: \"4794dbe4-7057-46d2-90d6-11a2bda30e6a\") " pod="openstack/dnsmasq-dns-ccf55cbcc-jsxv8" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.778019 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d-config\") pod \"neutron-797b967b5b-7wfhc\" (UID: \"cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d\") " pod="openstack/neutron-797b967b5b-7wfhc" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.778415 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d-httpd-config\") pod \"neutron-797b967b5b-7wfhc\" (UID: \"cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d\") " pod="openstack/neutron-797b967b5b-7wfhc" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.778459 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d-ovndb-tls-certs\") pod \"neutron-797b967b5b-7wfhc\" (UID: \"cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d\") " pod="openstack/neutron-797b967b5b-7wfhc" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.779088 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d-combined-ca-bundle\") pod \"neutron-797b967b5b-7wfhc\" (UID: \"cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d\") " pod="openstack/neutron-797b967b5b-7wfhc" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.779168 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9ksw\" (UniqueName: \"kubernetes.io/projected/cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d-kube-api-access-c9ksw\") pod \"neutron-797b967b5b-7wfhc\" (UID: \"cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d\") " pod="openstack/neutron-797b967b5b-7wfhc" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.782822 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d-httpd-config\") pod \"neutron-797b967b5b-7wfhc\" (UID: \"cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d\") " pod="openstack/neutron-797b967b5b-7wfhc" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.784569 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d-config\") pod \"neutron-797b967b5b-7wfhc\" (UID: \"cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d\") " pod="openstack/neutron-797b967b5b-7wfhc" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.788138 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d-combined-ca-bundle\") pod \"neutron-797b967b5b-7wfhc\" (UID: \"cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d\") " pod="openstack/neutron-797b967b5b-7wfhc" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.799001 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9ksw\" (UniqueName: \"kubernetes.io/projected/cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d-kube-api-access-c9ksw\") pod \"neutron-797b967b5b-7wfhc\" (UID: \"cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d\") " pod="openstack/neutron-797b967b5b-7wfhc" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.813199 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d-ovndb-tls-certs\") pod \"neutron-797b967b5b-7wfhc\" (UID: \"cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d\") " pod="openstack/neutron-797b967b5b-7wfhc" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.842747 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-ccf55cbcc-jsxv8" Nov 27 07:15:27 crc kubenswrapper[4971]: I1127 07:15:27.987222 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-797b967b5b-7wfhc" Nov 27 07:15:28 crc kubenswrapper[4971]: I1127 07:15:28.358812 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8cbb2c33-b8a0-4220-916a-ea0a8d738755","Type":"ContainerStarted","Data":"1cfd5131dc75f97aea2ccb4abbcadf7829f00887f56b2a68f2cc459a21f9b7d0"} Nov 27 07:15:28 crc kubenswrapper[4971]: I1127 07:15:28.387477 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=22.387454324 podStartE2EDuration="22.387454324s" podCreationTimestamp="2025-11-27 07:15:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:15:28.384940716 +0000 UTC m=+1366.576984664" watchObservedRunningTime="2025-11-27 07:15:28.387454324 +0000 UTC m=+1366.579498242" Nov 27 07:15:28 crc kubenswrapper[4971]: I1127 07:15:28.410561 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"02f2a8a6-cfc5-4973-9f54-e4647a8e4552","Type":"ContainerStarted","Data":"295d83fb85f340aebaee5fe652faaafa4bfaa16381ff8087ec2330b20ed3fd1d"} Nov 27 07:15:28 crc kubenswrapper[4971]: I1127 07:15:28.414674 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-ccf55cbcc-jsxv8"] Nov 27 07:15:28 crc kubenswrapper[4971]: I1127 07:15:28.415180 4971 generic.go:334] "Generic (PLEG): container finished" podID="1098ae71-b794-4670-af66-67bd17375e2c" containerID="4d487d859ddb54b8c37233d551712b8c0af4822104b48abd901d36f7a1eddd1c" exitCode=0 Nov 27 07:15:28 crc kubenswrapper[4971]: I1127 07:15:28.415270 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-kv5bz" event={"ID":"1098ae71-b794-4670-af66-67bd17375e2c","Type":"ContainerDied","Data":"4d487d859ddb54b8c37233d551712b8c0af4822104b48abd901d36f7a1eddd1c"} Nov 27 07:15:28 crc kubenswrapper[4971]: W1127 07:15:28.440306 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4794dbe4_7057_46d2_90d6_11a2bda30e6a.slice/crio-f7c89d0b923349e7eed8e259371421a1224afe56de834fdd14e939fa06daa4a8 WatchSource:0}: Error finding container f7c89d0b923349e7eed8e259371421a1224afe56de834fdd14e939fa06daa4a8: Status 404 returned error can't find the container with id f7c89d0b923349e7eed8e259371421a1224afe56de834fdd14e939fa06daa4a8 Nov 27 07:15:28 crc kubenswrapper[4971]: I1127 07:15:28.442657 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.442632512 podStartE2EDuration="4.442632512s" podCreationTimestamp="2025-11-27 07:15:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:15:28.430228997 +0000 UTC m=+1366.622272935" watchObservedRunningTime="2025-11-27 07:15:28.442632512 +0000 UTC m=+1366.634676430" Nov 27 07:15:28 crc kubenswrapper[4971]: I1127 07:15:28.705190 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-797b967b5b-7wfhc"] Nov 27 07:15:29 crc kubenswrapper[4971]: I1127 07:15:29.145198 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-fd79dd9f9-z2dsj" podUID="46ee0072-fbf4-40fa-b9bc-40fae95462c3" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.128:5353: i/o timeout" Nov 27 07:15:29 crc kubenswrapper[4971]: I1127 07:15:29.430706 4971 generic.go:334] "Generic (PLEG): container finished" podID="4794dbe4-7057-46d2-90d6-11a2bda30e6a" containerID="bebd79ccf252c4009ab7d34445f34c6b773dd2e45cf68ac93647621c0b23d9b5" exitCode=0 Nov 27 07:15:29 crc kubenswrapper[4971]: I1127 07:15:29.430817 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-ccf55cbcc-jsxv8" event={"ID":"4794dbe4-7057-46d2-90d6-11a2bda30e6a","Type":"ContainerDied","Data":"bebd79ccf252c4009ab7d34445f34c6b773dd2e45cf68ac93647621c0b23d9b5"} Nov 27 07:15:29 crc kubenswrapper[4971]: I1127 07:15:29.431071 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-ccf55cbcc-jsxv8" event={"ID":"4794dbe4-7057-46d2-90d6-11a2bda30e6a","Type":"ContainerStarted","Data":"f7c89d0b923349e7eed8e259371421a1224afe56de834fdd14e939fa06daa4a8"} Nov 27 07:15:29 crc kubenswrapper[4971]: I1127 07:15:29.433084 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-797b967b5b-7wfhc" event={"ID":"cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d","Type":"ContainerStarted","Data":"619b29157e835d81ce9cbe2e7c2420eae8c918ffa9f80d969ea45c6be45769d8"} Nov 27 07:15:29 crc kubenswrapper[4971]: I1127 07:15:29.433143 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-797b967b5b-7wfhc" event={"ID":"cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d","Type":"ContainerStarted","Data":"4709702de716b12d745d76424c14b3427d0845081a7da6c4031024fe395ac4ef"} Nov 27 07:15:29 crc kubenswrapper[4971]: I1127 07:15:29.433162 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-797b967b5b-7wfhc" event={"ID":"cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d","Type":"ContainerStarted","Data":"5034f48f0bffa9b460c5305537f686d48abcdf0c4094489ea3c5ce109016f206"} Nov 27 07:15:29 crc kubenswrapper[4971]: I1127 07:15:29.504437 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-797b967b5b-7wfhc" podStartSLOduration=2.504415826 podStartE2EDuration="2.504415826s" podCreationTimestamp="2025-11-27 07:15:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:15:29.484036567 +0000 UTC m=+1367.676080495" watchObservedRunningTime="2025-11-27 07:15:29.504415826 +0000 UTC m=+1367.696459744" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.033075 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-kv5bz" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.146567 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1098ae71-b794-4670-af66-67bd17375e2c-combined-ca-bundle\") pod \"1098ae71-b794-4670-af66-67bd17375e2c\" (UID: \"1098ae71-b794-4670-af66-67bd17375e2c\") " Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.146653 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1098ae71-b794-4670-af66-67bd17375e2c-logs\") pod \"1098ae71-b794-4670-af66-67bd17375e2c\" (UID: \"1098ae71-b794-4670-af66-67bd17375e2c\") " Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.146904 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1098ae71-b794-4670-af66-67bd17375e2c-scripts\") pod \"1098ae71-b794-4670-af66-67bd17375e2c\" (UID: \"1098ae71-b794-4670-af66-67bd17375e2c\") " Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.146978 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9n2wm\" (UniqueName: \"kubernetes.io/projected/1098ae71-b794-4670-af66-67bd17375e2c-kube-api-access-9n2wm\") pod \"1098ae71-b794-4670-af66-67bd17375e2c\" (UID: \"1098ae71-b794-4670-af66-67bd17375e2c\") " Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.147078 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1098ae71-b794-4670-af66-67bd17375e2c-config-data\") pod \"1098ae71-b794-4670-af66-67bd17375e2c\" (UID: \"1098ae71-b794-4670-af66-67bd17375e2c\") " Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.149162 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1098ae71-b794-4670-af66-67bd17375e2c-logs" (OuterVolumeSpecName: "logs") pod "1098ae71-b794-4670-af66-67bd17375e2c" (UID: "1098ae71-b794-4670-af66-67bd17375e2c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.158731 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1098ae71-b794-4670-af66-67bd17375e2c-scripts" (OuterVolumeSpecName: "scripts") pod "1098ae71-b794-4670-af66-67bd17375e2c" (UID: "1098ae71-b794-4670-af66-67bd17375e2c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.164593 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1098ae71-b794-4670-af66-67bd17375e2c-kube-api-access-9n2wm" (OuterVolumeSpecName: "kube-api-access-9n2wm") pod "1098ae71-b794-4670-af66-67bd17375e2c" (UID: "1098ae71-b794-4670-af66-67bd17375e2c"). InnerVolumeSpecName "kube-api-access-9n2wm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.174855 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1098ae71-b794-4670-af66-67bd17375e2c-config-data" (OuterVolumeSpecName: "config-data") pod "1098ae71-b794-4670-af66-67bd17375e2c" (UID: "1098ae71-b794-4670-af66-67bd17375e2c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.178196 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1098ae71-b794-4670-af66-67bd17375e2c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1098ae71-b794-4670-af66-67bd17375e2c" (UID: "1098ae71-b794-4670-af66-67bd17375e2c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.250252 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9n2wm\" (UniqueName: \"kubernetes.io/projected/1098ae71-b794-4670-af66-67bd17375e2c-kube-api-access-9n2wm\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.250291 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1098ae71-b794-4670-af66-67bd17375e2c-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.250303 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1098ae71-b794-4670-af66-67bd17375e2c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.250312 4971 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1098ae71-b794-4670-af66-67bd17375e2c-logs\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.250321 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1098ae71-b794-4670-af66-67bd17375e2c-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.297657 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-7b8cfffdbf-92cj6"] Nov 27 07:15:30 crc kubenswrapper[4971]: E1127 07:15:30.298511 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1098ae71-b794-4670-af66-67bd17375e2c" containerName="placement-db-sync" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.298547 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="1098ae71-b794-4670-af66-67bd17375e2c" containerName="placement-db-sync" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.298946 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="1098ae71-b794-4670-af66-67bd17375e2c" containerName="placement-db-sync" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.309979 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7b8cfffdbf-92cj6" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.321421 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.321546 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.326937 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7b8cfffdbf-92cj6"] Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.351442 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vtz2h\" (UniqueName: \"kubernetes.io/projected/622e028f-779d-4306-923c-ee204fdef6b0-kube-api-access-vtz2h\") pod \"neutron-7b8cfffdbf-92cj6\" (UID: \"622e028f-779d-4306-923c-ee204fdef6b0\") " pod="openstack/neutron-7b8cfffdbf-92cj6" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.351495 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/622e028f-779d-4306-923c-ee204fdef6b0-config\") pod \"neutron-7b8cfffdbf-92cj6\" (UID: \"622e028f-779d-4306-923c-ee204fdef6b0\") " pod="openstack/neutron-7b8cfffdbf-92cj6" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.351521 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/622e028f-779d-4306-923c-ee204fdef6b0-ovndb-tls-certs\") pod \"neutron-7b8cfffdbf-92cj6\" (UID: \"622e028f-779d-4306-923c-ee204fdef6b0\") " pod="openstack/neutron-7b8cfffdbf-92cj6" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.351653 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/622e028f-779d-4306-923c-ee204fdef6b0-combined-ca-bundle\") pod \"neutron-7b8cfffdbf-92cj6\" (UID: \"622e028f-779d-4306-923c-ee204fdef6b0\") " pod="openstack/neutron-7b8cfffdbf-92cj6" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.351727 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/622e028f-779d-4306-923c-ee204fdef6b0-public-tls-certs\") pod \"neutron-7b8cfffdbf-92cj6\" (UID: \"622e028f-779d-4306-923c-ee204fdef6b0\") " pod="openstack/neutron-7b8cfffdbf-92cj6" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.351948 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/622e028f-779d-4306-923c-ee204fdef6b0-httpd-config\") pod \"neutron-7b8cfffdbf-92cj6\" (UID: \"622e028f-779d-4306-923c-ee204fdef6b0\") " pod="openstack/neutron-7b8cfffdbf-92cj6" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.352103 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/622e028f-779d-4306-923c-ee204fdef6b0-internal-tls-certs\") pod \"neutron-7b8cfffdbf-92cj6\" (UID: \"622e028f-779d-4306-923c-ee204fdef6b0\") " pod="openstack/neutron-7b8cfffdbf-92cj6" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.448629 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-ccf55cbcc-jsxv8" event={"ID":"4794dbe4-7057-46d2-90d6-11a2bda30e6a","Type":"ContainerStarted","Data":"9277a9bdb5129fc609fd9f09229bde9c450696e154ed58abc17c9bd971e52f93"} Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.449651 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-ccf55cbcc-jsxv8" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.454706 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/622e028f-779d-4306-923c-ee204fdef6b0-combined-ca-bundle\") pod \"neutron-7b8cfffdbf-92cj6\" (UID: \"622e028f-779d-4306-923c-ee204fdef6b0\") " pod="openstack/neutron-7b8cfffdbf-92cj6" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.454812 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/622e028f-779d-4306-923c-ee204fdef6b0-public-tls-certs\") pod \"neutron-7b8cfffdbf-92cj6\" (UID: \"622e028f-779d-4306-923c-ee204fdef6b0\") " pod="openstack/neutron-7b8cfffdbf-92cj6" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.454861 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/622e028f-779d-4306-923c-ee204fdef6b0-httpd-config\") pod \"neutron-7b8cfffdbf-92cj6\" (UID: \"622e028f-779d-4306-923c-ee204fdef6b0\") " pod="openstack/neutron-7b8cfffdbf-92cj6" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.454911 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/622e028f-779d-4306-923c-ee204fdef6b0-internal-tls-certs\") pod \"neutron-7b8cfffdbf-92cj6\" (UID: \"622e028f-779d-4306-923c-ee204fdef6b0\") " pod="openstack/neutron-7b8cfffdbf-92cj6" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.454942 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vtz2h\" (UniqueName: \"kubernetes.io/projected/622e028f-779d-4306-923c-ee204fdef6b0-kube-api-access-vtz2h\") pod \"neutron-7b8cfffdbf-92cj6\" (UID: \"622e028f-779d-4306-923c-ee204fdef6b0\") " pod="openstack/neutron-7b8cfffdbf-92cj6" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.454964 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/622e028f-779d-4306-923c-ee204fdef6b0-config\") pod \"neutron-7b8cfffdbf-92cj6\" (UID: \"622e028f-779d-4306-923c-ee204fdef6b0\") " pod="openstack/neutron-7b8cfffdbf-92cj6" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.454987 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/622e028f-779d-4306-923c-ee204fdef6b0-ovndb-tls-certs\") pod \"neutron-7b8cfffdbf-92cj6\" (UID: \"622e028f-779d-4306-923c-ee204fdef6b0\") " pod="openstack/neutron-7b8cfffdbf-92cj6" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.459435 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-kv5bz" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.459895 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-kv5bz" event={"ID":"1098ae71-b794-4670-af66-67bd17375e2c","Type":"ContainerDied","Data":"50aeb76b97f9c9f64cf34e83cb3311574d9ffe6921fc36cb33897208082dae4b"} Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.459925 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="50aeb76b97f9c9f64cf34e83cb3311574d9ffe6921fc36cb33897208082dae4b" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.459947 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-797b967b5b-7wfhc" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.463150 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/622e028f-779d-4306-923c-ee204fdef6b0-ovndb-tls-certs\") pod \"neutron-7b8cfffdbf-92cj6\" (UID: \"622e028f-779d-4306-923c-ee204fdef6b0\") " pod="openstack/neutron-7b8cfffdbf-92cj6" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.463760 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/622e028f-779d-4306-923c-ee204fdef6b0-combined-ca-bundle\") pod \"neutron-7b8cfffdbf-92cj6\" (UID: \"622e028f-779d-4306-923c-ee204fdef6b0\") " pod="openstack/neutron-7b8cfffdbf-92cj6" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.473244 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/622e028f-779d-4306-923c-ee204fdef6b0-internal-tls-certs\") pod \"neutron-7b8cfffdbf-92cj6\" (UID: \"622e028f-779d-4306-923c-ee204fdef6b0\") " pod="openstack/neutron-7b8cfffdbf-92cj6" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.473721 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/622e028f-779d-4306-923c-ee204fdef6b0-config\") pod \"neutron-7b8cfffdbf-92cj6\" (UID: \"622e028f-779d-4306-923c-ee204fdef6b0\") " pod="openstack/neutron-7b8cfffdbf-92cj6" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.474152 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/622e028f-779d-4306-923c-ee204fdef6b0-httpd-config\") pod \"neutron-7b8cfffdbf-92cj6\" (UID: \"622e028f-779d-4306-923c-ee204fdef6b0\") " pod="openstack/neutron-7b8cfffdbf-92cj6" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.475077 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/622e028f-779d-4306-923c-ee204fdef6b0-public-tls-certs\") pod \"neutron-7b8cfffdbf-92cj6\" (UID: \"622e028f-779d-4306-923c-ee204fdef6b0\") " pod="openstack/neutron-7b8cfffdbf-92cj6" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.494196 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vtz2h\" (UniqueName: \"kubernetes.io/projected/622e028f-779d-4306-923c-ee204fdef6b0-kube-api-access-vtz2h\") pod \"neutron-7b8cfffdbf-92cj6\" (UID: \"622e028f-779d-4306-923c-ee204fdef6b0\") " pod="openstack/neutron-7b8cfffdbf-92cj6" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.496367 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-ccf55cbcc-jsxv8" podStartSLOduration=3.496344828 podStartE2EDuration="3.496344828s" podCreationTimestamp="2025-11-27 07:15:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:15:30.475308461 +0000 UTC m=+1368.667352389" watchObservedRunningTime="2025-11-27 07:15:30.496344828 +0000 UTC m=+1368.688388766" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.582422 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-65cf5bcbb-zf65t"] Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.589327 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-65cf5bcbb-zf65t" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.592505 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.592881 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.592970 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-74s52" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.593519 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.594158 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.622110 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-65cf5bcbb-zf65t"] Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.645359 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7b8cfffdbf-92cj6" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.659814 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c88f9c00-b02f-4070-b81e-733009e44691-config-data\") pod \"placement-65cf5bcbb-zf65t\" (UID: \"c88f9c00-b02f-4070-b81e-733009e44691\") " pod="openstack/placement-65cf5bcbb-zf65t" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.659911 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c88f9c00-b02f-4070-b81e-733009e44691-logs\") pod \"placement-65cf5bcbb-zf65t\" (UID: \"c88f9c00-b02f-4070-b81e-733009e44691\") " pod="openstack/placement-65cf5bcbb-zf65t" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.659940 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c88f9c00-b02f-4070-b81e-733009e44691-public-tls-certs\") pod \"placement-65cf5bcbb-zf65t\" (UID: \"c88f9c00-b02f-4070-b81e-733009e44691\") " pod="openstack/placement-65cf5bcbb-zf65t" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.659989 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c88f9c00-b02f-4070-b81e-733009e44691-internal-tls-certs\") pod \"placement-65cf5bcbb-zf65t\" (UID: \"c88f9c00-b02f-4070-b81e-733009e44691\") " pod="openstack/placement-65cf5bcbb-zf65t" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.660013 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c88f9c00-b02f-4070-b81e-733009e44691-scripts\") pod \"placement-65cf5bcbb-zf65t\" (UID: \"c88f9c00-b02f-4070-b81e-733009e44691\") " pod="openstack/placement-65cf5bcbb-zf65t" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.660084 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b9dqn\" (UniqueName: \"kubernetes.io/projected/c88f9c00-b02f-4070-b81e-733009e44691-kube-api-access-b9dqn\") pod \"placement-65cf5bcbb-zf65t\" (UID: \"c88f9c00-b02f-4070-b81e-733009e44691\") " pod="openstack/placement-65cf5bcbb-zf65t" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.660128 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c88f9c00-b02f-4070-b81e-733009e44691-combined-ca-bundle\") pod \"placement-65cf5bcbb-zf65t\" (UID: \"c88f9c00-b02f-4070-b81e-733009e44691\") " pod="openstack/placement-65cf5bcbb-zf65t" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.762011 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c88f9c00-b02f-4070-b81e-733009e44691-logs\") pod \"placement-65cf5bcbb-zf65t\" (UID: \"c88f9c00-b02f-4070-b81e-733009e44691\") " pod="openstack/placement-65cf5bcbb-zf65t" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.762066 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c88f9c00-b02f-4070-b81e-733009e44691-public-tls-certs\") pod \"placement-65cf5bcbb-zf65t\" (UID: \"c88f9c00-b02f-4070-b81e-733009e44691\") " pod="openstack/placement-65cf5bcbb-zf65t" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.762110 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c88f9c00-b02f-4070-b81e-733009e44691-internal-tls-certs\") pod \"placement-65cf5bcbb-zf65t\" (UID: \"c88f9c00-b02f-4070-b81e-733009e44691\") " pod="openstack/placement-65cf5bcbb-zf65t" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.762133 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c88f9c00-b02f-4070-b81e-733009e44691-scripts\") pod \"placement-65cf5bcbb-zf65t\" (UID: \"c88f9c00-b02f-4070-b81e-733009e44691\") " pod="openstack/placement-65cf5bcbb-zf65t" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.762172 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b9dqn\" (UniqueName: \"kubernetes.io/projected/c88f9c00-b02f-4070-b81e-733009e44691-kube-api-access-b9dqn\") pod \"placement-65cf5bcbb-zf65t\" (UID: \"c88f9c00-b02f-4070-b81e-733009e44691\") " pod="openstack/placement-65cf5bcbb-zf65t" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.762203 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c88f9c00-b02f-4070-b81e-733009e44691-combined-ca-bundle\") pod \"placement-65cf5bcbb-zf65t\" (UID: \"c88f9c00-b02f-4070-b81e-733009e44691\") " pod="openstack/placement-65cf5bcbb-zf65t" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.762249 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c88f9c00-b02f-4070-b81e-733009e44691-config-data\") pod \"placement-65cf5bcbb-zf65t\" (UID: \"c88f9c00-b02f-4070-b81e-733009e44691\") " pod="openstack/placement-65cf5bcbb-zf65t" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.763133 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c88f9c00-b02f-4070-b81e-733009e44691-logs\") pod \"placement-65cf5bcbb-zf65t\" (UID: \"c88f9c00-b02f-4070-b81e-733009e44691\") " pod="openstack/placement-65cf5bcbb-zf65t" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.766446 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c88f9c00-b02f-4070-b81e-733009e44691-scripts\") pod \"placement-65cf5bcbb-zf65t\" (UID: \"c88f9c00-b02f-4070-b81e-733009e44691\") " pod="openstack/placement-65cf5bcbb-zf65t" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.766708 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c88f9c00-b02f-4070-b81e-733009e44691-config-data\") pod \"placement-65cf5bcbb-zf65t\" (UID: \"c88f9c00-b02f-4070-b81e-733009e44691\") " pod="openstack/placement-65cf5bcbb-zf65t" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.769547 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c88f9c00-b02f-4070-b81e-733009e44691-public-tls-certs\") pod \"placement-65cf5bcbb-zf65t\" (UID: \"c88f9c00-b02f-4070-b81e-733009e44691\") " pod="openstack/placement-65cf5bcbb-zf65t" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.769571 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c88f9c00-b02f-4070-b81e-733009e44691-combined-ca-bundle\") pod \"placement-65cf5bcbb-zf65t\" (UID: \"c88f9c00-b02f-4070-b81e-733009e44691\") " pod="openstack/placement-65cf5bcbb-zf65t" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.773027 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c88f9c00-b02f-4070-b81e-733009e44691-internal-tls-certs\") pod \"placement-65cf5bcbb-zf65t\" (UID: \"c88f9c00-b02f-4070-b81e-733009e44691\") " pod="openstack/placement-65cf5bcbb-zf65t" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.784748 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b9dqn\" (UniqueName: \"kubernetes.io/projected/c88f9c00-b02f-4070-b81e-733009e44691-kube-api-access-b9dqn\") pod \"placement-65cf5bcbb-zf65t\" (UID: \"c88f9c00-b02f-4070-b81e-733009e44691\") " pod="openstack/placement-65cf5bcbb-zf65t" Nov 27 07:15:30 crc kubenswrapper[4971]: I1127 07:15:30.918905 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-65cf5bcbb-zf65t" Nov 27 07:15:31 crc kubenswrapper[4971]: I1127 07:15:31.469950 4971 generic.go:334] "Generic (PLEG): container finished" podID="41a0569f-4523-4dc9-a40f-bc22d113d523" containerID="7c752ad17fdde3f68fd7a467e7e60367d073bc3eb47a408353424a2055983e41" exitCode=0 Nov 27 07:15:31 crc kubenswrapper[4971]: I1127 07:15:31.470040 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-v825j" event={"ID":"41a0569f-4523-4dc9-a40f-bc22d113d523","Type":"ContainerDied","Data":"7c752ad17fdde3f68fd7a467e7e60367d073bc3eb47a408353424a2055983e41"} Nov 27 07:15:34 crc kubenswrapper[4971]: I1127 07:15:34.426184 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-v825j" Nov 27 07:15:34 crc kubenswrapper[4971]: I1127 07:15:34.448674 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41a0569f-4523-4dc9-a40f-bc22d113d523-combined-ca-bundle\") pod \"41a0569f-4523-4dc9-a40f-bc22d113d523\" (UID: \"41a0569f-4523-4dc9-a40f-bc22d113d523\") " Nov 27 07:15:34 crc kubenswrapper[4971]: I1127 07:15:34.448742 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41a0569f-4523-4dc9-a40f-bc22d113d523-config-data\") pod \"41a0569f-4523-4dc9-a40f-bc22d113d523\" (UID: \"41a0569f-4523-4dc9-a40f-bc22d113d523\") " Nov 27 07:15:34 crc kubenswrapper[4971]: I1127 07:15:34.448837 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/41a0569f-4523-4dc9-a40f-bc22d113d523-credential-keys\") pod \"41a0569f-4523-4dc9-a40f-bc22d113d523\" (UID: \"41a0569f-4523-4dc9-a40f-bc22d113d523\") " Nov 27 07:15:34 crc kubenswrapper[4971]: I1127 07:15:34.448966 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41a0569f-4523-4dc9-a40f-bc22d113d523-scripts\") pod \"41a0569f-4523-4dc9-a40f-bc22d113d523\" (UID: \"41a0569f-4523-4dc9-a40f-bc22d113d523\") " Nov 27 07:15:34 crc kubenswrapper[4971]: I1127 07:15:34.449001 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/41a0569f-4523-4dc9-a40f-bc22d113d523-fernet-keys\") pod \"41a0569f-4523-4dc9-a40f-bc22d113d523\" (UID: \"41a0569f-4523-4dc9-a40f-bc22d113d523\") " Nov 27 07:15:34 crc kubenswrapper[4971]: I1127 07:15:34.449118 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2qqg\" (UniqueName: \"kubernetes.io/projected/41a0569f-4523-4dc9-a40f-bc22d113d523-kube-api-access-x2qqg\") pod \"41a0569f-4523-4dc9-a40f-bc22d113d523\" (UID: \"41a0569f-4523-4dc9-a40f-bc22d113d523\") " Nov 27 07:15:34 crc kubenswrapper[4971]: I1127 07:15:34.464571 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41a0569f-4523-4dc9-a40f-bc22d113d523-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "41a0569f-4523-4dc9-a40f-bc22d113d523" (UID: "41a0569f-4523-4dc9-a40f-bc22d113d523"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:15:34 crc kubenswrapper[4971]: I1127 07:15:34.466255 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41a0569f-4523-4dc9-a40f-bc22d113d523-kube-api-access-x2qqg" (OuterVolumeSpecName: "kube-api-access-x2qqg") pod "41a0569f-4523-4dc9-a40f-bc22d113d523" (UID: "41a0569f-4523-4dc9-a40f-bc22d113d523"). InnerVolumeSpecName "kube-api-access-x2qqg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:15:34 crc kubenswrapper[4971]: I1127 07:15:34.468393 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41a0569f-4523-4dc9-a40f-bc22d113d523-scripts" (OuterVolumeSpecName: "scripts") pod "41a0569f-4523-4dc9-a40f-bc22d113d523" (UID: "41a0569f-4523-4dc9-a40f-bc22d113d523"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:15:34 crc kubenswrapper[4971]: I1127 07:15:34.489884 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41a0569f-4523-4dc9-a40f-bc22d113d523-config-data" (OuterVolumeSpecName: "config-data") pod "41a0569f-4523-4dc9-a40f-bc22d113d523" (UID: "41a0569f-4523-4dc9-a40f-bc22d113d523"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:15:34 crc kubenswrapper[4971]: I1127 07:15:34.501872 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41a0569f-4523-4dc9-a40f-bc22d113d523-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "41a0569f-4523-4dc9-a40f-bc22d113d523" (UID: "41a0569f-4523-4dc9-a40f-bc22d113d523"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:15:34 crc kubenswrapper[4971]: I1127 07:15:34.509734 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41a0569f-4523-4dc9-a40f-bc22d113d523-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "41a0569f-4523-4dc9-a40f-bc22d113d523" (UID: "41a0569f-4523-4dc9-a40f-bc22d113d523"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:15:34 crc kubenswrapper[4971]: I1127 07:15:34.527019 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-v825j" event={"ID":"41a0569f-4523-4dc9-a40f-bc22d113d523","Type":"ContainerDied","Data":"05fd7646752740789a3e947aeb64fc2f1f6763faeb243b5bfc03f16ff4cb4691"} Nov 27 07:15:34 crc kubenswrapper[4971]: I1127 07:15:34.527057 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="05fd7646752740789a3e947aeb64fc2f1f6763faeb243b5bfc03f16ff4cb4691" Nov 27 07:15:34 crc kubenswrapper[4971]: I1127 07:15:34.527110 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-v825j" Nov 27 07:15:34 crc kubenswrapper[4971]: I1127 07:15:34.553261 4971 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/41a0569f-4523-4dc9-a40f-bc22d113d523-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:34 crc kubenswrapper[4971]: I1127 07:15:34.553296 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41a0569f-4523-4dc9-a40f-bc22d113d523-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:34 crc kubenswrapper[4971]: I1127 07:15:34.553305 4971 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/41a0569f-4523-4dc9-a40f-bc22d113d523-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:34 crc kubenswrapper[4971]: I1127 07:15:34.553317 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2qqg\" (UniqueName: \"kubernetes.io/projected/41a0569f-4523-4dc9-a40f-bc22d113d523-kube-api-access-x2qqg\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:34 crc kubenswrapper[4971]: I1127 07:15:34.553328 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41a0569f-4523-4dc9-a40f-bc22d113d523-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:34 crc kubenswrapper[4971]: I1127 07:15:34.553336 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41a0569f-4523-4dc9-a40f-bc22d113d523-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:34 crc kubenswrapper[4971]: I1127 07:15:34.605808 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Nov 27 07:15:34 crc kubenswrapper[4971]: I1127 07:15:34.605856 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Nov 27 07:15:34 crc kubenswrapper[4971]: I1127 07:15:34.637055 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Nov 27 07:15:34 crc kubenswrapper[4971]: I1127 07:15:34.664467 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Nov 27 07:15:34 crc kubenswrapper[4971]: I1127 07:15:34.764068 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7b8cfffdbf-92cj6"] Nov 27 07:15:34 crc kubenswrapper[4971]: I1127 07:15:34.775408 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-65cf5bcbb-zf65t"] Nov 27 07:15:34 crc kubenswrapper[4971]: W1127 07:15:34.778150 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc88f9c00_b02f_4070_b81e_733009e44691.slice/crio-4cf6cd20fef7aacfff33103e531d9c68aff3e165bcc56d39a875302d06e90d6f WatchSource:0}: Error finding container 4cf6cd20fef7aacfff33103e531d9c68aff3e165bcc56d39a875302d06e90d6f: Status 404 returned error can't find the container with id 4cf6cd20fef7aacfff33103e531d9c68aff3e165bcc56d39a875302d06e90d6f Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.536900 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-65cf5bcbb-zf65t" event={"ID":"c88f9c00-b02f-4070-b81e-733009e44691","Type":"ContainerStarted","Data":"669e2c2b2985b62934235229da4195fd416e9129758e938e024eadd9d158c9c5"} Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.536946 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-65cf5bcbb-zf65t" event={"ID":"c88f9c00-b02f-4070-b81e-733009e44691","Type":"ContainerStarted","Data":"e70c98903785fe4279c39fe33060ba6d64e42416ec0207f04ea6c26602ecfd68"} Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.536961 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-65cf5bcbb-zf65t" event={"ID":"c88f9c00-b02f-4070-b81e-733009e44691","Type":"ContainerStarted","Data":"4cf6cd20fef7aacfff33103e531d9c68aff3e165bcc56d39a875302d06e90d6f"} Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.537077 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-65cf5bcbb-zf65t" Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.539914 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7b8cfffdbf-92cj6" event={"ID":"622e028f-779d-4306-923c-ee204fdef6b0","Type":"ContainerStarted","Data":"360d9e165c0c9e2b9b0b1cee20e9ddcb63854ee168b8dc44e7daf6d665e3532e"} Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.539971 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7b8cfffdbf-92cj6" event={"ID":"622e028f-779d-4306-923c-ee204fdef6b0","Type":"ContainerStarted","Data":"2685d0639fa1ae15b7a9608c81aa2b95f5d3f50c5aefc4159568fe3afd5b0c33"} Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.539984 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7b8cfffdbf-92cj6" event={"ID":"622e028f-779d-4306-923c-ee204fdef6b0","Type":"ContainerStarted","Data":"cfacdbff5ae68f2b65336fa8989a19c54e1983372c976d51adf9f4847b8f7be3"} Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.540042 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-7b8cfffdbf-92cj6" Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.546378 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b1ab1943-4c64-4083-8947-aa821a7298d2","Type":"ContainerStarted","Data":"71f99c90b42bb9aeabd87ed414835968d4a4c3b05fa9dae4f2ccd861e23a5d2f"} Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.546734 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.546784 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.568308 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-65cf5bcbb-zf65t" podStartSLOduration=5.56828848 podStartE2EDuration="5.56828848s" podCreationTimestamp="2025-11-27 07:15:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:15:35.557115909 +0000 UTC m=+1373.749159827" watchObservedRunningTime="2025-11-27 07:15:35.56828848 +0000 UTC m=+1373.760332398" Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.585096 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-7b8cfffdbf-92cj6" podStartSLOduration=5.585074203 podStartE2EDuration="5.585074203s" podCreationTimestamp="2025-11-27 07:15:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:15:35.578862155 +0000 UTC m=+1373.770906073" watchObservedRunningTime="2025-11-27 07:15:35.585074203 +0000 UTC m=+1373.777118121" Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.619806 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-55bd67bdbd-llwzk"] Nov 27 07:15:35 crc kubenswrapper[4971]: E1127 07:15:35.620837 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41a0569f-4523-4dc9-a40f-bc22d113d523" containerName="keystone-bootstrap" Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.620863 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="41a0569f-4523-4dc9-a40f-bc22d113d523" containerName="keystone-bootstrap" Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.625295 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="41a0569f-4523-4dc9-a40f-bc22d113d523" containerName="keystone-bootstrap" Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.631756 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-55bd67bdbd-llwzk" Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.632975 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-55bd67bdbd-llwzk"] Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.639354 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.639694 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.640378 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.640617 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.640738 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-gcnm2" Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.647836 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.678957 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-config-data\") pod \"keystone-55bd67bdbd-llwzk\" (UID: \"b2ac1dd8-824c-482d-8c0e-47573535f172\") " pod="openstack/keystone-55bd67bdbd-llwzk" Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.679030 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-internal-tls-certs\") pod \"keystone-55bd67bdbd-llwzk\" (UID: \"b2ac1dd8-824c-482d-8c0e-47573535f172\") " pod="openstack/keystone-55bd67bdbd-llwzk" Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.679064 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v95vq\" (UniqueName: \"kubernetes.io/projected/b2ac1dd8-824c-482d-8c0e-47573535f172-kube-api-access-v95vq\") pod \"keystone-55bd67bdbd-llwzk\" (UID: \"b2ac1dd8-824c-482d-8c0e-47573535f172\") " pod="openstack/keystone-55bd67bdbd-llwzk" Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.679156 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-combined-ca-bundle\") pod \"keystone-55bd67bdbd-llwzk\" (UID: \"b2ac1dd8-824c-482d-8c0e-47573535f172\") " pod="openstack/keystone-55bd67bdbd-llwzk" Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.679194 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-credential-keys\") pod \"keystone-55bd67bdbd-llwzk\" (UID: \"b2ac1dd8-824c-482d-8c0e-47573535f172\") " pod="openstack/keystone-55bd67bdbd-llwzk" Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.679285 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-public-tls-certs\") pod \"keystone-55bd67bdbd-llwzk\" (UID: \"b2ac1dd8-824c-482d-8c0e-47573535f172\") " pod="openstack/keystone-55bd67bdbd-llwzk" Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.679454 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-fernet-keys\") pod \"keystone-55bd67bdbd-llwzk\" (UID: \"b2ac1dd8-824c-482d-8c0e-47573535f172\") " pod="openstack/keystone-55bd67bdbd-llwzk" Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.679510 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-scripts\") pod \"keystone-55bd67bdbd-llwzk\" (UID: \"b2ac1dd8-824c-482d-8c0e-47573535f172\") " pod="openstack/keystone-55bd67bdbd-llwzk" Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.781527 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-credential-keys\") pod \"keystone-55bd67bdbd-llwzk\" (UID: \"b2ac1dd8-824c-482d-8c0e-47573535f172\") " pod="openstack/keystone-55bd67bdbd-llwzk" Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.782517 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-public-tls-certs\") pod \"keystone-55bd67bdbd-llwzk\" (UID: \"b2ac1dd8-824c-482d-8c0e-47573535f172\") " pod="openstack/keystone-55bd67bdbd-llwzk" Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.782626 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-fernet-keys\") pod \"keystone-55bd67bdbd-llwzk\" (UID: \"b2ac1dd8-824c-482d-8c0e-47573535f172\") " pod="openstack/keystone-55bd67bdbd-llwzk" Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.782659 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-scripts\") pod \"keystone-55bd67bdbd-llwzk\" (UID: \"b2ac1dd8-824c-482d-8c0e-47573535f172\") " pod="openstack/keystone-55bd67bdbd-llwzk" Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.782918 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-config-data\") pod \"keystone-55bd67bdbd-llwzk\" (UID: \"b2ac1dd8-824c-482d-8c0e-47573535f172\") " pod="openstack/keystone-55bd67bdbd-llwzk" Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.783022 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-internal-tls-certs\") pod \"keystone-55bd67bdbd-llwzk\" (UID: \"b2ac1dd8-824c-482d-8c0e-47573535f172\") " pod="openstack/keystone-55bd67bdbd-llwzk" Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.783109 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v95vq\" (UniqueName: \"kubernetes.io/projected/b2ac1dd8-824c-482d-8c0e-47573535f172-kube-api-access-v95vq\") pod \"keystone-55bd67bdbd-llwzk\" (UID: \"b2ac1dd8-824c-482d-8c0e-47573535f172\") " pod="openstack/keystone-55bd67bdbd-llwzk" Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.783180 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-combined-ca-bundle\") pod \"keystone-55bd67bdbd-llwzk\" (UID: \"b2ac1dd8-824c-482d-8c0e-47573535f172\") " pod="openstack/keystone-55bd67bdbd-llwzk" Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.785249 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-credential-keys\") pod \"keystone-55bd67bdbd-llwzk\" (UID: \"b2ac1dd8-824c-482d-8c0e-47573535f172\") " pod="openstack/keystone-55bd67bdbd-llwzk" Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.785890 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-scripts\") pod \"keystone-55bd67bdbd-llwzk\" (UID: \"b2ac1dd8-824c-482d-8c0e-47573535f172\") " pod="openstack/keystone-55bd67bdbd-llwzk" Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.786104 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-public-tls-certs\") pod \"keystone-55bd67bdbd-llwzk\" (UID: \"b2ac1dd8-824c-482d-8c0e-47573535f172\") " pod="openstack/keystone-55bd67bdbd-llwzk" Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.786851 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-internal-tls-certs\") pod \"keystone-55bd67bdbd-llwzk\" (UID: \"b2ac1dd8-824c-482d-8c0e-47573535f172\") " pod="openstack/keystone-55bd67bdbd-llwzk" Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.787215 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-combined-ca-bundle\") pod \"keystone-55bd67bdbd-llwzk\" (UID: \"b2ac1dd8-824c-482d-8c0e-47573535f172\") " pod="openstack/keystone-55bd67bdbd-llwzk" Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.789036 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-fernet-keys\") pod \"keystone-55bd67bdbd-llwzk\" (UID: \"b2ac1dd8-824c-482d-8c0e-47573535f172\") " pod="openstack/keystone-55bd67bdbd-llwzk" Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.796210 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-config-data\") pod \"keystone-55bd67bdbd-llwzk\" (UID: \"b2ac1dd8-824c-482d-8c0e-47573535f172\") " pod="openstack/keystone-55bd67bdbd-llwzk" Nov 27 07:15:35 crc kubenswrapper[4971]: I1127 07:15:35.818294 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v95vq\" (UniqueName: \"kubernetes.io/projected/b2ac1dd8-824c-482d-8c0e-47573535f172-kube-api-access-v95vq\") pod \"keystone-55bd67bdbd-llwzk\" (UID: \"b2ac1dd8-824c-482d-8c0e-47573535f172\") " pod="openstack/keystone-55bd67bdbd-llwzk" Nov 27 07:15:36 crc kubenswrapper[4971]: I1127 07:15:36.016381 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-55bd67bdbd-llwzk" Nov 27 07:15:36 crc kubenswrapper[4971]: I1127 07:15:36.416864 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Nov 27 07:15:36 crc kubenswrapper[4971]: I1127 07:15:36.417204 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Nov 27 07:15:36 crc kubenswrapper[4971]: I1127 07:15:36.420780 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Nov 27 07:15:36 crc kubenswrapper[4971]: I1127 07:15:36.420824 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Nov 27 07:15:36 crc kubenswrapper[4971]: I1127 07:15:36.535427 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Nov 27 07:15:36 crc kubenswrapper[4971]: I1127 07:15:36.537799 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Nov 27 07:15:36 crc kubenswrapper[4971]: I1127 07:15:36.588987 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-65cf5bcbb-zf65t" Nov 27 07:15:36 crc kubenswrapper[4971]: I1127 07:15:36.716357 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-55bd67bdbd-llwzk"] Nov 27 07:15:37 crc kubenswrapper[4971]: I1127 07:15:37.582010 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-55bd67bdbd-llwzk" event={"ID":"b2ac1dd8-824c-482d-8c0e-47573535f172","Type":"ContainerStarted","Data":"e21d5793819283c5f8059801c98537ef24923751613488a4fef7ec0596a380e9"} Nov 27 07:15:37 crc kubenswrapper[4971]: I1127 07:15:37.582338 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-55bd67bdbd-llwzk" event={"ID":"b2ac1dd8-824c-482d-8c0e-47573535f172","Type":"ContainerStarted","Data":"769d7df66c9874c427870af3be46b762847efa06d0b942d45f7c0691a477006f"} Nov 27 07:15:37 crc kubenswrapper[4971]: I1127 07:15:37.582359 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-55bd67bdbd-llwzk" Nov 27 07:15:37 crc kubenswrapper[4971]: I1127 07:15:37.610248 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-55bd67bdbd-llwzk" podStartSLOduration=2.610227699 podStartE2EDuration="2.610227699s" podCreationTimestamp="2025-11-27 07:15:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:15:37.601586396 +0000 UTC m=+1375.793630314" watchObservedRunningTime="2025-11-27 07:15:37.610227699 +0000 UTC m=+1375.802271617" Nov 27 07:15:37 crc kubenswrapper[4971]: I1127 07:15:37.844684 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-ccf55cbcc-jsxv8" Nov 27 07:15:37 crc kubenswrapper[4971]: I1127 07:15:37.935350 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b97f654c9-9p9vb"] Nov 27 07:15:37 crc kubenswrapper[4971]: I1127 07:15:37.935664 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b97f654c9-9p9vb" podUID="521d9ffc-606c-4aca-a94d-8a667ce541d3" containerName="dnsmasq-dns" containerID="cri-o://2ff7481e2a724f2221aeb07a54d81c8a62fd3d916a6d1a53e9fbd71ad11f3924" gracePeriod=10 Nov 27 07:15:38 crc kubenswrapper[4971]: I1127 07:15:38.245313 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Nov 27 07:15:38 crc kubenswrapper[4971]: I1127 07:15:38.245711 4971 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 27 07:15:38 crc kubenswrapper[4971]: I1127 07:15:38.252211 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Nov 27 07:15:38 crc kubenswrapper[4971]: I1127 07:15:38.482433 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b97f654c9-9p9vb" Nov 27 07:15:38 crc kubenswrapper[4971]: I1127 07:15:38.604973 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lcwzk\" (UniqueName: \"kubernetes.io/projected/521d9ffc-606c-4aca-a94d-8a667ce541d3-kube-api-access-lcwzk\") pod \"521d9ffc-606c-4aca-a94d-8a667ce541d3\" (UID: \"521d9ffc-606c-4aca-a94d-8a667ce541d3\") " Nov 27 07:15:38 crc kubenswrapper[4971]: I1127 07:15:38.605062 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/521d9ffc-606c-4aca-a94d-8a667ce541d3-dns-svc\") pod \"521d9ffc-606c-4aca-a94d-8a667ce541d3\" (UID: \"521d9ffc-606c-4aca-a94d-8a667ce541d3\") " Nov 27 07:15:38 crc kubenswrapper[4971]: I1127 07:15:38.605148 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/521d9ffc-606c-4aca-a94d-8a667ce541d3-dns-swift-storage-0\") pod \"521d9ffc-606c-4aca-a94d-8a667ce541d3\" (UID: \"521d9ffc-606c-4aca-a94d-8a667ce541d3\") " Nov 27 07:15:38 crc kubenswrapper[4971]: I1127 07:15:38.605189 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/521d9ffc-606c-4aca-a94d-8a667ce541d3-ovsdbserver-nb\") pod \"521d9ffc-606c-4aca-a94d-8a667ce541d3\" (UID: \"521d9ffc-606c-4aca-a94d-8a667ce541d3\") " Nov 27 07:15:38 crc kubenswrapper[4971]: I1127 07:15:38.605226 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/521d9ffc-606c-4aca-a94d-8a667ce541d3-ovsdbserver-sb\") pod \"521d9ffc-606c-4aca-a94d-8a667ce541d3\" (UID: \"521d9ffc-606c-4aca-a94d-8a667ce541d3\") " Nov 27 07:15:38 crc kubenswrapper[4971]: I1127 07:15:38.605255 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/521d9ffc-606c-4aca-a94d-8a667ce541d3-config\") pod \"521d9ffc-606c-4aca-a94d-8a667ce541d3\" (UID: \"521d9ffc-606c-4aca-a94d-8a667ce541d3\") " Nov 27 07:15:38 crc kubenswrapper[4971]: I1127 07:15:38.611794 4971 generic.go:334] "Generic (PLEG): container finished" podID="521d9ffc-606c-4aca-a94d-8a667ce541d3" containerID="2ff7481e2a724f2221aeb07a54d81c8a62fd3d916a6d1a53e9fbd71ad11f3924" exitCode=0 Nov 27 07:15:38 crc kubenswrapper[4971]: I1127 07:15:38.613229 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b97f654c9-9p9vb" Nov 27 07:15:38 crc kubenswrapper[4971]: I1127 07:15:38.613437 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b97f654c9-9p9vb" event={"ID":"521d9ffc-606c-4aca-a94d-8a667ce541d3","Type":"ContainerDied","Data":"2ff7481e2a724f2221aeb07a54d81c8a62fd3d916a6d1a53e9fbd71ad11f3924"} Nov 27 07:15:38 crc kubenswrapper[4971]: I1127 07:15:38.613516 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b97f654c9-9p9vb" event={"ID":"521d9ffc-606c-4aca-a94d-8a667ce541d3","Type":"ContainerDied","Data":"aa49b65c947d9bbfd41eb10f0709029b0365f1ad34175e9917d45eb756059302"} Nov 27 07:15:38 crc kubenswrapper[4971]: I1127 07:15:38.613559 4971 scope.go:117] "RemoveContainer" containerID="2ff7481e2a724f2221aeb07a54d81c8a62fd3d916a6d1a53e9fbd71ad11f3924" Nov 27 07:15:38 crc kubenswrapper[4971]: I1127 07:15:38.626757 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/521d9ffc-606c-4aca-a94d-8a667ce541d3-kube-api-access-lcwzk" (OuterVolumeSpecName: "kube-api-access-lcwzk") pod "521d9ffc-606c-4aca-a94d-8a667ce541d3" (UID: "521d9ffc-606c-4aca-a94d-8a667ce541d3"). InnerVolumeSpecName "kube-api-access-lcwzk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:15:38 crc kubenswrapper[4971]: I1127 07:15:38.689890 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/521d9ffc-606c-4aca-a94d-8a667ce541d3-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "521d9ffc-606c-4aca-a94d-8a667ce541d3" (UID: "521d9ffc-606c-4aca-a94d-8a667ce541d3"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:15:38 crc kubenswrapper[4971]: I1127 07:15:38.697467 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/521d9ffc-606c-4aca-a94d-8a667ce541d3-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "521d9ffc-606c-4aca-a94d-8a667ce541d3" (UID: "521d9ffc-606c-4aca-a94d-8a667ce541d3"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:15:38 crc kubenswrapper[4971]: I1127 07:15:38.700475 4971 scope.go:117] "RemoveContainer" containerID="db3db056e3b55911cf377fa3c05350a21880e42db521f615e561d6412cc41d4d" Nov 27 07:15:38 crc kubenswrapper[4971]: I1127 07:15:38.708799 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lcwzk\" (UniqueName: \"kubernetes.io/projected/521d9ffc-606c-4aca-a94d-8a667ce541d3-kube-api-access-lcwzk\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:38 crc kubenswrapper[4971]: I1127 07:15:38.708838 4971 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/521d9ffc-606c-4aca-a94d-8a667ce541d3-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:38 crc kubenswrapper[4971]: I1127 07:15:38.708851 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/521d9ffc-606c-4aca-a94d-8a667ce541d3-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:38 crc kubenswrapper[4971]: I1127 07:15:38.712210 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/521d9ffc-606c-4aca-a94d-8a667ce541d3-config" (OuterVolumeSpecName: "config") pod "521d9ffc-606c-4aca-a94d-8a667ce541d3" (UID: "521d9ffc-606c-4aca-a94d-8a667ce541d3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:15:38 crc kubenswrapper[4971]: I1127 07:15:38.714915 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/521d9ffc-606c-4aca-a94d-8a667ce541d3-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "521d9ffc-606c-4aca-a94d-8a667ce541d3" (UID: "521d9ffc-606c-4aca-a94d-8a667ce541d3"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:15:38 crc kubenswrapper[4971]: I1127 07:15:38.722627 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/521d9ffc-606c-4aca-a94d-8a667ce541d3-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "521d9ffc-606c-4aca-a94d-8a667ce541d3" (UID: "521d9ffc-606c-4aca-a94d-8a667ce541d3"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:15:38 crc kubenswrapper[4971]: I1127 07:15:38.733109 4971 scope.go:117] "RemoveContainer" containerID="2ff7481e2a724f2221aeb07a54d81c8a62fd3d916a6d1a53e9fbd71ad11f3924" Nov 27 07:15:38 crc kubenswrapper[4971]: E1127 07:15:38.733888 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ff7481e2a724f2221aeb07a54d81c8a62fd3d916a6d1a53e9fbd71ad11f3924\": container with ID starting with 2ff7481e2a724f2221aeb07a54d81c8a62fd3d916a6d1a53e9fbd71ad11f3924 not found: ID does not exist" containerID="2ff7481e2a724f2221aeb07a54d81c8a62fd3d916a6d1a53e9fbd71ad11f3924" Nov 27 07:15:38 crc kubenswrapper[4971]: I1127 07:15:38.733949 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ff7481e2a724f2221aeb07a54d81c8a62fd3d916a6d1a53e9fbd71ad11f3924"} err="failed to get container status \"2ff7481e2a724f2221aeb07a54d81c8a62fd3d916a6d1a53e9fbd71ad11f3924\": rpc error: code = NotFound desc = could not find container \"2ff7481e2a724f2221aeb07a54d81c8a62fd3d916a6d1a53e9fbd71ad11f3924\": container with ID starting with 2ff7481e2a724f2221aeb07a54d81c8a62fd3d916a6d1a53e9fbd71ad11f3924 not found: ID does not exist" Nov 27 07:15:38 crc kubenswrapper[4971]: I1127 07:15:38.733988 4971 scope.go:117] "RemoveContainer" containerID="db3db056e3b55911cf377fa3c05350a21880e42db521f615e561d6412cc41d4d" Nov 27 07:15:38 crc kubenswrapper[4971]: E1127 07:15:38.734436 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db3db056e3b55911cf377fa3c05350a21880e42db521f615e561d6412cc41d4d\": container with ID starting with db3db056e3b55911cf377fa3c05350a21880e42db521f615e561d6412cc41d4d not found: ID does not exist" containerID="db3db056e3b55911cf377fa3c05350a21880e42db521f615e561d6412cc41d4d" Nov 27 07:15:38 crc kubenswrapper[4971]: I1127 07:15:38.734474 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db3db056e3b55911cf377fa3c05350a21880e42db521f615e561d6412cc41d4d"} err="failed to get container status \"db3db056e3b55911cf377fa3c05350a21880e42db521f615e561d6412cc41d4d\": rpc error: code = NotFound desc = could not find container \"db3db056e3b55911cf377fa3c05350a21880e42db521f615e561d6412cc41d4d\": container with ID starting with db3db056e3b55911cf377fa3c05350a21880e42db521f615e561d6412cc41d4d not found: ID does not exist" Nov 27 07:15:38 crc kubenswrapper[4971]: I1127 07:15:38.810699 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/521d9ffc-606c-4aca-a94d-8a667ce541d3-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:38 crc kubenswrapper[4971]: I1127 07:15:38.810735 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/521d9ffc-606c-4aca-a94d-8a667ce541d3-config\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:38 crc kubenswrapper[4971]: I1127 07:15:38.810745 4971 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/521d9ffc-606c-4aca-a94d-8a667ce541d3-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:38 crc kubenswrapper[4971]: I1127 07:15:38.955367 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b97f654c9-9p9vb"] Nov 27 07:15:38 crc kubenswrapper[4971]: I1127 07:15:38.965892 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b97f654c9-9p9vb"] Nov 27 07:15:39 crc kubenswrapper[4971]: I1127 07:15:39.042633 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Nov 27 07:15:39 crc kubenswrapper[4971]: I1127 07:15:39.042752 4971 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 27 07:15:39 crc kubenswrapper[4971]: I1127 07:15:39.398621 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Nov 27 07:15:39 crc kubenswrapper[4971]: I1127 07:15:39.657741 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-th4ww" event={"ID":"cd83d353-e492-4119-8890-77569c78bed2","Type":"ContainerStarted","Data":"76df4ac7c1f082519f7ee5545a6be65bf6a51cf8772adc725260109bee894240"} Nov 27 07:15:39 crc kubenswrapper[4971]: I1127 07:15:39.686579 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-th4ww" podStartSLOduration=3.457814048 podStartE2EDuration="39.686561484s" podCreationTimestamp="2025-11-27 07:15:00 +0000 UTC" firstStartedPulling="2025-11-27 07:15:01.981813187 +0000 UTC m=+1340.173857105" lastFinishedPulling="2025-11-27 07:15:38.210560623 +0000 UTC m=+1376.402604541" observedRunningTime="2025-11-27 07:15:39.685829984 +0000 UTC m=+1377.877873902" watchObservedRunningTime="2025-11-27 07:15:39.686561484 +0000 UTC m=+1377.878605412" Nov 27 07:15:40 crc kubenswrapper[4971]: I1127 07:15:40.564201 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="521d9ffc-606c-4aca-a94d-8a667ce541d3" path="/var/lib/kubelet/pods/521d9ffc-606c-4aca-a94d-8a667ce541d3/volumes" Nov 27 07:15:44 crc kubenswrapper[4971]: I1127 07:15:44.722658 4971 generic.go:334] "Generic (PLEG): container finished" podID="cd83d353-e492-4119-8890-77569c78bed2" containerID="76df4ac7c1f082519f7ee5545a6be65bf6a51cf8772adc725260109bee894240" exitCode=0 Nov 27 07:15:44 crc kubenswrapper[4971]: I1127 07:15:44.722779 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-th4ww" event={"ID":"cd83d353-e492-4119-8890-77569c78bed2","Type":"ContainerDied","Data":"76df4ac7c1f082519f7ee5545a6be65bf6a51cf8772adc725260109bee894240"} Nov 27 07:15:45 crc kubenswrapper[4971]: E1127 07:15:45.190716 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="b1ab1943-4c64-4083-8947-aa821a7298d2" Nov 27 07:15:45 crc kubenswrapper[4971]: I1127 07:15:45.734288 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-xpjjx" event={"ID":"04c6fe9e-2c13-4757-b2f7-237b1b8849f7","Type":"ContainerStarted","Data":"c9e6b5f718d9ab4322f8be2590e93ec413c8974b46389cc43520e7f166ab9a2d"} Nov 27 07:15:45 crc kubenswrapper[4971]: I1127 07:15:45.738042 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b1ab1943-4c64-4083-8947-aa821a7298d2","Type":"ContainerStarted","Data":"16b942a0f93378767fd502e8b27d4e29699ec3623fb1a5f84918b26b2ae2016a"} Nov 27 07:15:45 crc kubenswrapper[4971]: I1127 07:15:45.738408 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 27 07:15:45 crc kubenswrapper[4971]: I1127 07:15:45.738215 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b1ab1943-4c64-4083-8947-aa821a7298d2" containerName="proxy-httpd" containerID="cri-o://16b942a0f93378767fd502e8b27d4e29699ec3623fb1a5f84918b26b2ae2016a" gracePeriod=30 Nov 27 07:15:45 crc kubenswrapper[4971]: I1127 07:15:45.738158 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b1ab1943-4c64-4083-8947-aa821a7298d2" containerName="ceilometer-notification-agent" containerID="cri-o://61e437269c525a53ab08083843781fe1ad1d8d262e56a60813bfea706731a494" gracePeriod=30 Nov 27 07:15:45 crc kubenswrapper[4971]: I1127 07:15:45.738264 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b1ab1943-4c64-4083-8947-aa821a7298d2" containerName="sg-core" containerID="cri-o://71f99c90b42bb9aeabd87ed414835968d4a4c3b05fa9dae4f2ccd861e23a5d2f" gracePeriod=30 Nov 27 07:15:45 crc kubenswrapper[4971]: I1127 07:15:45.771094 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-xpjjx" podStartSLOduration=7.021835986 podStartE2EDuration="45.771074755s" podCreationTimestamp="2025-11-27 07:15:00 +0000 UTC" firstStartedPulling="2025-11-27 07:15:02.102016774 +0000 UTC m=+1340.294060692" lastFinishedPulling="2025-11-27 07:15:40.851255543 +0000 UTC m=+1379.043299461" observedRunningTime="2025-11-27 07:15:45.765943377 +0000 UTC m=+1383.957987295" watchObservedRunningTime="2025-11-27 07:15:45.771074755 +0000 UTC m=+1383.963118673" Nov 27 07:15:46 crc kubenswrapper[4971]: I1127 07:15:46.144069 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-th4ww" Nov 27 07:15:46 crc kubenswrapper[4971]: I1127 07:15:46.275346 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd83d353-e492-4119-8890-77569c78bed2-config-data\") pod \"cd83d353-e492-4119-8890-77569c78bed2\" (UID: \"cd83d353-e492-4119-8890-77569c78bed2\") " Nov 27 07:15:46 crc kubenswrapper[4971]: I1127 07:15:46.275488 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd83d353-e492-4119-8890-77569c78bed2-scripts\") pod \"cd83d353-e492-4119-8890-77569c78bed2\" (UID: \"cd83d353-e492-4119-8890-77569c78bed2\") " Nov 27 07:15:46 crc kubenswrapper[4971]: I1127 07:15:46.275591 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd83d353-e492-4119-8890-77569c78bed2-combined-ca-bundle\") pod \"cd83d353-e492-4119-8890-77569c78bed2\" (UID: \"cd83d353-e492-4119-8890-77569c78bed2\") " Nov 27 07:15:46 crc kubenswrapper[4971]: I1127 07:15:46.275750 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cd83d353-e492-4119-8890-77569c78bed2-etc-machine-id\") pod \"cd83d353-e492-4119-8890-77569c78bed2\" (UID: \"cd83d353-e492-4119-8890-77569c78bed2\") " Nov 27 07:15:46 crc kubenswrapper[4971]: I1127 07:15:46.275787 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/cd83d353-e492-4119-8890-77569c78bed2-db-sync-config-data\") pod \"cd83d353-e492-4119-8890-77569c78bed2\" (UID: \"cd83d353-e492-4119-8890-77569c78bed2\") " Nov 27 07:15:46 crc kubenswrapper[4971]: I1127 07:15:46.275863 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gt7fq\" (UniqueName: \"kubernetes.io/projected/cd83d353-e492-4119-8890-77569c78bed2-kube-api-access-gt7fq\") pod \"cd83d353-e492-4119-8890-77569c78bed2\" (UID: \"cd83d353-e492-4119-8890-77569c78bed2\") " Nov 27 07:15:46 crc kubenswrapper[4971]: I1127 07:15:46.275927 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cd83d353-e492-4119-8890-77569c78bed2-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "cd83d353-e492-4119-8890-77569c78bed2" (UID: "cd83d353-e492-4119-8890-77569c78bed2"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 07:15:46 crc kubenswrapper[4971]: I1127 07:15:46.276544 4971 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cd83d353-e492-4119-8890-77569c78bed2-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:46 crc kubenswrapper[4971]: I1127 07:15:46.280668 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd83d353-e492-4119-8890-77569c78bed2-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "cd83d353-e492-4119-8890-77569c78bed2" (UID: "cd83d353-e492-4119-8890-77569c78bed2"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:15:46 crc kubenswrapper[4971]: I1127 07:15:46.281039 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd83d353-e492-4119-8890-77569c78bed2-kube-api-access-gt7fq" (OuterVolumeSpecName: "kube-api-access-gt7fq") pod "cd83d353-e492-4119-8890-77569c78bed2" (UID: "cd83d353-e492-4119-8890-77569c78bed2"). InnerVolumeSpecName "kube-api-access-gt7fq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:15:46 crc kubenswrapper[4971]: I1127 07:15:46.290479 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd83d353-e492-4119-8890-77569c78bed2-scripts" (OuterVolumeSpecName: "scripts") pod "cd83d353-e492-4119-8890-77569c78bed2" (UID: "cd83d353-e492-4119-8890-77569c78bed2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:15:46 crc kubenswrapper[4971]: I1127 07:15:46.302847 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd83d353-e492-4119-8890-77569c78bed2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cd83d353-e492-4119-8890-77569c78bed2" (UID: "cd83d353-e492-4119-8890-77569c78bed2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:15:46 crc kubenswrapper[4971]: I1127 07:15:46.327547 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd83d353-e492-4119-8890-77569c78bed2-config-data" (OuterVolumeSpecName: "config-data") pod "cd83d353-e492-4119-8890-77569c78bed2" (UID: "cd83d353-e492-4119-8890-77569c78bed2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:15:46 crc kubenswrapper[4971]: I1127 07:15:46.379100 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd83d353-e492-4119-8890-77569c78bed2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:46 crc kubenswrapper[4971]: I1127 07:15:46.379176 4971 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/cd83d353-e492-4119-8890-77569c78bed2-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:46 crc kubenswrapper[4971]: I1127 07:15:46.379199 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gt7fq\" (UniqueName: \"kubernetes.io/projected/cd83d353-e492-4119-8890-77569c78bed2-kube-api-access-gt7fq\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:46 crc kubenswrapper[4971]: I1127 07:15:46.379218 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd83d353-e492-4119-8890-77569c78bed2-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:46 crc kubenswrapper[4971]: I1127 07:15:46.379236 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd83d353-e492-4119-8890-77569c78bed2-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:46 crc kubenswrapper[4971]: I1127 07:15:46.747294 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-th4ww" event={"ID":"cd83d353-e492-4119-8890-77569c78bed2","Type":"ContainerDied","Data":"8fc23f424b3a2a9f17017a2aeb44fa99e3c8ba29bb9333d7e8cdc48924ab48cc"} Nov 27 07:15:46 crc kubenswrapper[4971]: I1127 07:15:46.747335 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8fc23f424b3a2a9f17017a2aeb44fa99e3c8ba29bb9333d7e8cdc48924ab48cc" Nov 27 07:15:46 crc kubenswrapper[4971]: I1127 07:15:46.747367 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-th4ww" Nov 27 07:15:46 crc kubenswrapper[4971]: I1127 07:15:46.750431 4971 generic.go:334] "Generic (PLEG): container finished" podID="b1ab1943-4c64-4083-8947-aa821a7298d2" containerID="16b942a0f93378767fd502e8b27d4e29699ec3623fb1a5f84918b26b2ae2016a" exitCode=0 Nov 27 07:15:46 crc kubenswrapper[4971]: I1127 07:15:46.750459 4971 generic.go:334] "Generic (PLEG): container finished" podID="b1ab1943-4c64-4083-8947-aa821a7298d2" containerID="71f99c90b42bb9aeabd87ed414835968d4a4c3b05fa9dae4f2ccd861e23a5d2f" exitCode=2 Nov 27 07:15:46 crc kubenswrapper[4971]: I1127 07:15:46.750465 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b1ab1943-4c64-4083-8947-aa821a7298d2","Type":"ContainerDied","Data":"16b942a0f93378767fd502e8b27d4e29699ec3623fb1a5f84918b26b2ae2016a"} Nov 27 07:15:46 crc kubenswrapper[4971]: I1127 07:15:46.750490 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b1ab1943-4c64-4083-8947-aa821a7298d2","Type":"ContainerDied","Data":"71f99c90b42bb9aeabd87ed414835968d4a4c3b05fa9dae4f2ccd861e23a5d2f"} Nov 27 07:15:46 crc kubenswrapper[4971]: I1127 07:15:46.999433 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Nov 27 07:15:46 crc kubenswrapper[4971]: E1127 07:15:46.999877 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd83d353-e492-4119-8890-77569c78bed2" containerName="cinder-db-sync" Nov 27 07:15:46 crc kubenswrapper[4971]: I1127 07:15:46.999899 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd83d353-e492-4119-8890-77569c78bed2" containerName="cinder-db-sync" Nov 27 07:15:46 crc kubenswrapper[4971]: E1127 07:15:46.999924 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="521d9ffc-606c-4aca-a94d-8a667ce541d3" containerName="dnsmasq-dns" Nov 27 07:15:46 crc kubenswrapper[4971]: I1127 07:15:46.999931 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="521d9ffc-606c-4aca-a94d-8a667ce541d3" containerName="dnsmasq-dns" Nov 27 07:15:46 crc kubenswrapper[4971]: E1127 07:15:46.999951 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="521d9ffc-606c-4aca-a94d-8a667ce541d3" containerName="init" Nov 27 07:15:46 crc kubenswrapper[4971]: I1127 07:15:46.999958 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="521d9ffc-606c-4aca-a94d-8a667ce541d3" containerName="init" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.000159 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd83d353-e492-4119-8890-77569c78bed2" containerName="cinder-db-sync" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.000177 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="521d9ffc-606c-4aca-a94d-8a667ce541d3" containerName="dnsmasq-dns" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.001189 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.003824 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.004029 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.004196 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.017750 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-7v5sc" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.018206 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.093361 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ffb1ef88-bf93-4f0c-a64a-952aeef1269d-scripts\") pod \"cinder-scheduler-0\" (UID: \"ffb1ef88-bf93-4f0c-a64a-952aeef1269d\") " pod="openstack/cinder-scheduler-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.093409 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ffb1ef88-bf93-4f0c-a64a-952aeef1269d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ffb1ef88-bf93-4f0c-a64a-952aeef1269d\") " pod="openstack/cinder-scheduler-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.093436 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ffb1ef88-bf93-4f0c-a64a-952aeef1269d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ffb1ef88-bf93-4f0c-a64a-952aeef1269d\") " pod="openstack/cinder-scheduler-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.093498 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffb1ef88-bf93-4f0c-a64a-952aeef1269d-config-data\") pod \"cinder-scheduler-0\" (UID: \"ffb1ef88-bf93-4f0c-a64a-952aeef1269d\") " pod="openstack/cinder-scheduler-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.093542 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffb1ef88-bf93-4f0c-a64a-952aeef1269d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ffb1ef88-bf93-4f0c-a64a-952aeef1269d\") " pod="openstack/cinder-scheduler-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.093584 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vrpzb\" (UniqueName: \"kubernetes.io/projected/ffb1ef88-bf93-4f0c-a64a-952aeef1269d-kube-api-access-vrpzb\") pod \"cinder-scheduler-0\" (UID: \"ffb1ef88-bf93-4f0c-a64a-952aeef1269d\") " pod="openstack/cinder-scheduler-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.116151 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7f8f75f555-mxcqw"] Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.117850 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f8f75f555-mxcqw" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.138918 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7f8f75f555-mxcqw"] Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.195853 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7qzpz\" (UniqueName: \"kubernetes.io/projected/5deb9f08-1de8-4f62-98cc-3e80232f17fc-kube-api-access-7qzpz\") pod \"dnsmasq-dns-7f8f75f555-mxcqw\" (UID: \"5deb9f08-1de8-4f62-98cc-3e80232f17fc\") " pod="openstack/dnsmasq-dns-7f8f75f555-mxcqw" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.195932 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5deb9f08-1de8-4f62-98cc-3e80232f17fc-config\") pod \"dnsmasq-dns-7f8f75f555-mxcqw\" (UID: \"5deb9f08-1de8-4f62-98cc-3e80232f17fc\") " pod="openstack/dnsmasq-dns-7f8f75f555-mxcqw" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.195986 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ffb1ef88-bf93-4f0c-a64a-952aeef1269d-scripts\") pod \"cinder-scheduler-0\" (UID: \"ffb1ef88-bf93-4f0c-a64a-952aeef1269d\") " pod="openstack/cinder-scheduler-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.196004 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ffb1ef88-bf93-4f0c-a64a-952aeef1269d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ffb1ef88-bf93-4f0c-a64a-952aeef1269d\") " pod="openstack/cinder-scheduler-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.196068 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ffb1ef88-bf93-4f0c-a64a-952aeef1269d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ffb1ef88-bf93-4f0c-a64a-952aeef1269d\") " pod="openstack/cinder-scheduler-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.196088 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5deb9f08-1de8-4f62-98cc-3e80232f17fc-ovsdbserver-sb\") pod \"dnsmasq-dns-7f8f75f555-mxcqw\" (UID: \"5deb9f08-1de8-4f62-98cc-3e80232f17fc\") " pod="openstack/dnsmasq-dns-7f8f75f555-mxcqw" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.196121 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5deb9f08-1de8-4f62-98cc-3e80232f17fc-dns-swift-storage-0\") pod \"dnsmasq-dns-7f8f75f555-mxcqw\" (UID: \"5deb9f08-1de8-4f62-98cc-3e80232f17fc\") " pod="openstack/dnsmasq-dns-7f8f75f555-mxcqw" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.196162 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5deb9f08-1de8-4f62-98cc-3e80232f17fc-dns-svc\") pod \"dnsmasq-dns-7f8f75f555-mxcqw\" (UID: \"5deb9f08-1de8-4f62-98cc-3e80232f17fc\") " pod="openstack/dnsmasq-dns-7f8f75f555-mxcqw" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.196181 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffb1ef88-bf93-4f0c-a64a-952aeef1269d-config-data\") pod \"cinder-scheduler-0\" (UID: \"ffb1ef88-bf93-4f0c-a64a-952aeef1269d\") " pod="openstack/cinder-scheduler-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.196203 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5deb9f08-1de8-4f62-98cc-3e80232f17fc-ovsdbserver-nb\") pod \"dnsmasq-dns-7f8f75f555-mxcqw\" (UID: \"5deb9f08-1de8-4f62-98cc-3e80232f17fc\") " pod="openstack/dnsmasq-dns-7f8f75f555-mxcqw" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.196227 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffb1ef88-bf93-4f0c-a64a-952aeef1269d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ffb1ef88-bf93-4f0c-a64a-952aeef1269d\") " pod="openstack/cinder-scheduler-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.196261 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vrpzb\" (UniqueName: \"kubernetes.io/projected/ffb1ef88-bf93-4f0c-a64a-952aeef1269d-kube-api-access-vrpzb\") pod \"cinder-scheduler-0\" (UID: \"ffb1ef88-bf93-4f0c-a64a-952aeef1269d\") " pod="openstack/cinder-scheduler-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.201122 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ffb1ef88-bf93-4f0c-a64a-952aeef1269d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ffb1ef88-bf93-4f0c-a64a-952aeef1269d\") " pod="openstack/cinder-scheduler-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.201217 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ffb1ef88-bf93-4f0c-a64a-952aeef1269d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ffb1ef88-bf93-4f0c-a64a-952aeef1269d\") " pod="openstack/cinder-scheduler-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.206577 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffb1ef88-bf93-4f0c-a64a-952aeef1269d-config-data\") pod \"cinder-scheduler-0\" (UID: \"ffb1ef88-bf93-4f0c-a64a-952aeef1269d\") " pod="openstack/cinder-scheduler-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.206725 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ffb1ef88-bf93-4f0c-a64a-952aeef1269d-scripts\") pod \"cinder-scheduler-0\" (UID: \"ffb1ef88-bf93-4f0c-a64a-952aeef1269d\") " pod="openstack/cinder-scheduler-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.209712 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffb1ef88-bf93-4f0c-a64a-952aeef1269d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ffb1ef88-bf93-4f0c-a64a-952aeef1269d\") " pod="openstack/cinder-scheduler-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.224837 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vrpzb\" (UniqueName: \"kubernetes.io/projected/ffb1ef88-bf93-4f0c-a64a-952aeef1269d-kube-api-access-vrpzb\") pod \"cinder-scheduler-0\" (UID: \"ffb1ef88-bf93-4f0c-a64a-952aeef1269d\") " pod="openstack/cinder-scheduler-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.245800 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.251093 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.262727 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.282679 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.323077 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.342054 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"35f86c0e-94fb-4ffa-babb-fca5b707fb8f\") " pod="openstack/cinder-api-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.342120 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ljpf8\" (UniqueName: \"kubernetes.io/projected/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-kube-api-access-ljpf8\") pod \"cinder-api-0\" (UID: \"35f86c0e-94fb-4ffa-babb-fca5b707fb8f\") " pod="openstack/cinder-api-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.342167 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-scripts\") pod \"cinder-api-0\" (UID: \"35f86c0e-94fb-4ffa-babb-fca5b707fb8f\") " pod="openstack/cinder-api-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.342229 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5deb9f08-1de8-4f62-98cc-3e80232f17fc-dns-swift-storage-0\") pod \"dnsmasq-dns-7f8f75f555-mxcqw\" (UID: \"5deb9f08-1de8-4f62-98cc-3e80232f17fc\") " pod="openstack/dnsmasq-dns-7f8f75f555-mxcqw" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.342299 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5deb9f08-1de8-4f62-98cc-3e80232f17fc-dns-svc\") pod \"dnsmasq-dns-7f8f75f555-mxcqw\" (UID: \"5deb9f08-1de8-4f62-98cc-3e80232f17fc\") " pod="openstack/dnsmasq-dns-7f8f75f555-mxcqw" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.342334 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-etc-machine-id\") pod \"cinder-api-0\" (UID: \"35f86c0e-94fb-4ffa-babb-fca5b707fb8f\") " pod="openstack/cinder-api-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.342363 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5deb9f08-1de8-4f62-98cc-3e80232f17fc-ovsdbserver-nb\") pod \"dnsmasq-dns-7f8f75f555-mxcqw\" (UID: \"5deb9f08-1de8-4f62-98cc-3e80232f17fc\") " pod="openstack/dnsmasq-dns-7f8f75f555-mxcqw" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.342412 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-config-data-custom\") pod \"cinder-api-0\" (UID: \"35f86c0e-94fb-4ffa-babb-fca5b707fb8f\") " pod="openstack/cinder-api-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.342449 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-logs\") pod \"cinder-api-0\" (UID: \"35f86c0e-94fb-4ffa-babb-fca5b707fb8f\") " pod="openstack/cinder-api-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.342961 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7qzpz\" (UniqueName: \"kubernetes.io/projected/5deb9f08-1de8-4f62-98cc-3e80232f17fc-kube-api-access-7qzpz\") pod \"dnsmasq-dns-7f8f75f555-mxcqw\" (UID: \"5deb9f08-1de8-4f62-98cc-3e80232f17fc\") " pod="openstack/dnsmasq-dns-7f8f75f555-mxcqw" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.343338 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-config-data\") pod \"cinder-api-0\" (UID: \"35f86c0e-94fb-4ffa-babb-fca5b707fb8f\") " pod="openstack/cinder-api-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.343712 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5deb9f08-1de8-4f62-98cc-3e80232f17fc-ovsdbserver-nb\") pod \"dnsmasq-dns-7f8f75f555-mxcqw\" (UID: \"5deb9f08-1de8-4f62-98cc-3e80232f17fc\") " pod="openstack/dnsmasq-dns-7f8f75f555-mxcqw" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.344047 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5deb9f08-1de8-4f62-98cc-3e80232f17fc-dns-svc\") pod \"dnsmasq-dns-7f8f75f555-mxcqw\" (UID: \"5deb9f08-1de8-4f62-98cc-3e80232f17fc\") " pod="openstack/dnsmasq-dns-7f8f75f555-mxcqw" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.344053 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5deb9f08-1de8-4f62-98cc-3e80232f17fc-dns-swift-storage-0\") pod \"dnsmasq-dns-7f8f75f555-mxcqw\" (UID: \"5deb9f08-1de8-4f62-98cc-3e80232f17fc\") " pod="openstack/dnsmasq-dns-7f8f75f555-mxcqw" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.347669 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5deb9f08-1de8-4f62-98cc-3e80232f17fc-config\") pod \"dnsmasq-dns-7f8f75f555-mxcqw\" (UID: \"5deb9f08-1de8-4f62-98cc-3e80232f17fc\") " pod="openstack/dnsmasq-dns-7f8f75f555-mxcqw" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.348245 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5deb9f08-1de8-4f62-98cc-3e80232f17fc-config\") pod \"dnsmasq-dns-7f8f75f555-mxcqw\" (UID: \"5deb9f08-1de8-4f62-98cc-3e80232f17fc\") " pod="openstack/dnsmasq-dns-7f8f75f555-mxcqw" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.352731 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5deb9f08-1de8-4f62-98cc-3e80232f17fc-ovsdbserver-sb\") pod \"dnsmasq-dns-7f8f75f555-mxcqw\" (UID: \"5deb9f08-1de8-4f62-98cc-3e80232f17fc\") " pod="openstack/dnsmasq-dns-7f8f75f555-mxcqw" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.354574 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5deb9f08-1de8-4f62-98cc-3e80232f17fc-ovsdbserver-sb\") pod \"dnsmasq-dns-7f8f75f555-mxcqw\" (UID: \"5deb9f08-1de8-4f62-98cc-3e80232f17fc\") " pod="openstack/dnsmasq-dns-7f8f75f555-mxcqw" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.374397 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7qzpz\" (UniqueName: \"kubernetes.io/projected/5deb9f08-1de8-4f62-98cc-3e80232f17fc-kube-api-access-7qzpz\") pod \"dnsmasq-dns-7f8f75f555-mxcqw\" (UID: \"5deb9f08-1de8-4f62-98cc-3e80232f17fc\") " pod="openstack/dnsmasq-dns-7f8f75f555-mxcqw" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.454345 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f8f75f555-mxcqw" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.459692 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-config-data-custom\") pod \"cinder-api-0\" (UID: \"35f86c0e-94fb-4ffa-babb-fca5b707fb8f\") " pod="openstack/cinder-api-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.459738 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-logs\") pod \"cinder-api-0\" (UID: \"35f86c0e-94fb-4ffa-babb-fca5b707fb8f\") " pod="openstack/cinder-api-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.459838 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-config-data\") pod \"cinder-api-0\" (UID: \"35f86c0e-94fb-4ffa-babb-fca5b707fb8f\") " pod="openstack/cinder-api-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.459924 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"35f86c0e-94fb-4ffa-babb-fca5b707fb8f\") " pod="openstack/cinder-api-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.459951 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ljpf8\" (UniqueName: \"kubernetes.io/projected/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-kube-api-access-ljpf8\") pod \"cinder-api-0\" (UID: \"35f86c0e-94fb-4ffa-babb-fca5b707fb8f\") " pod="openstack/cinder-api-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.459989 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-scripts\") pod \"cinder-api-0\" (UID: \"35f86c0e-94fb-4ffa-babb-fca5b707fb8f\") " pod="openstack/cinder-api-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.460055 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-etc-machine-id\") pod \"cinder-api-0\" (UID: \"35f86c0e-94fb-4ffa-babb-fca5b707fb8f\") " pod="openstack/cinder-api-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.460175 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-etc-machine-id\") pod \"cinder-api-0\" (UID: \"35f86c0e-94fb-4ffa-babb-fca5b707fb8f\") " pod="openstack/cinder-api-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.461035 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-logs\") pod \"cinder-api-0\" (UID: \"35f86c0e-94fb-4ffa-babb-fca5b707fb8f\") " pod="openstack/cinder-api-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.464283 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-scripts\") pod \"cinder-api-0\" (UID: \"35f86c0e-94fb-4ffa-babb-fca5b707fb8f\") " pod="openstack/cinder-api-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.470858 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"35f86c0e-94fb-4ffa-babb-fca5b707fb8f\") " pod="openstack/cinder-api-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.471158 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-config-data\") pod \"cinder-api-0\" (UID: \"35f86c0e-94fb-4ffa-babb-fca5b707fb8f\") " pod="openstack/cinder-api-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.471419 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-config-data-custom\") pod \"cinder-api-0\" (UID: \"35f86c0e-94fb-4ffa-babb-fca5b707fb8f\") " pod="openstack/cinder-api-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.513486 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ljpf8\" (UniqueName: \"kubernetes.io/projected/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-kube-api-access-ljpf8\") pod \"cinder-api-0\" (UID: \"35f86c0e-94fb-4ffa-babb-fca5b707fb8f\") " pod="openstack/cinder-api-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.637815 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.768822 4971 generic.go:334] "Generic (PLEG): container finished" podID="04c6fe9e-2c13-4757-b2f7-237b1b8849f7" containerID="c9e6b5f718d9ab4322f8be2590e93ec413c8974b46389cc43520e7f166ab9a2d" exitCode=0 Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.768889 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-xpjjx" event={"ID":"04c6fe9e-2c13-4757-b2f7-237b1b8849f7","Type":"ContainerDied","Data":"c9e6b5f718d9ab4322f8be2590e93ec413c8974b46389cc43520e7f166ab9a2d"} Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.856080 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 27 07:15:47 crc kubenswrapper[4971]: W1127 07:15:47.862182 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podffb1ef88_bf93_4f0c_a64a_952aeef1269d.slice/crio-1a4719af35d9259f65324555390b30dbacfe2fd15a264d66d7e26c814fb76e99 WatchSource:0}: Error finding container 1a4719af35d9259f65324555390b30dbacfe2fd15a264d66d7e26c814fb76e99: Status 404 returned error can't find the container with id 1a4719af35d9259f65324555390b30dbacfe2fd15a264d66d7e26c814fb76e99 Nov 27 07:15:47 crc kubenswrapper[4971]: I1127 07:15:47.970135 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7f8f75f555-mxcqw"] Nov 27 07:15:48 crc kubenswrapper[4971]: I1127 07:15:48.171174 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Nov 27 07:15:48 crc kubenswrapper[4971]: W1127 07:15:48.177848 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod35f86c0e_94fb_4ffa_babb_fca5b707fb8f.slice/crio-caa2b30635953c295efee54d82012310cc17b8c0fc73ef25dc41debc0e535f49 WatchSource:0}: Error finding container caa2b30635953c295efee54d82012310cc17b8c0fc73ef25dc41debc0e535f49: Status 404 returned error can't find the container with id caa2b30635953c295efee54d82012310cc17b8c0fc73ef25dc41debc0e535f49 Nov 27 07:15:48 crc kubenswrapper[4971]: I1127 07:15:48.780396 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ffb1ef88-bf93-4f0c-a64a-952aeef1269d","Type":"ContainerStarted","Data":"1a4719af35d9259f65324555390b30dbacfe2fd15a264d66d7e26c814fb76e99"} Nov 27 07:15:48 crc kubenswrapper[4971]: I1127 07:15:48.782868 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"35f86c0e-94fb-4ffa-babb-fca5b707fb8f","Type":"ContainerStarted","Data":"caa2b30635953c295efee54d82012310cc17b8c0fc73ef25dc41debc0e535f49"} Nov 27 07:15:48 crc kubenswrapper[4971]: I1127 07:15:48.785833 4971 generic.go:334] "Generic (PLEG): container finished" podID="5deb9f08-1de8-4f62-98cc-3e80232f17fc" containerID="4c977a1427eb8b3559a566dcfb48d34f77dc023ce8853e1620d08e7ec0f36948" exitCode=0 Nov 27 07:15:48 crc kubenswrapper[4971]: I1127 07:15:48.786085 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f8f75f555-mxcqw" event={"ID":"5deb9f08-1de8-4f62-98cc-3e80232f17fc","Type":"ContainerDied","Data":"4c977a1427eb8b3559a566dcfb48d34f77dc023ce8853e1620d08e7ec0f36948"} Nov 27 07:15:48 crc kubenswrapper[4971]: I1127 07:15:48.786116 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f8f75f555-mxcqw" event={"ID":"5deb9f08-1de8-4f62-98cc-3e80232f17fc","Type":"ContainerStarted","Data":"e8d2ae1fe2167f63776769eb4b33e19a0188b768e31b9a67009731d24e63d58e"} Nov 27 07:15:49 crc kubenswrapper[4971]: I1127 07:15:49.104098 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-xpjjx" Nov 27 07:15:49 crc kubenswrapper[4971]: I1127 07:15:49.199316 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04c6fe9e-2c13-4757-b2f7-237b1b8849f7-combined-ca-bundle\") pod \"04c6fe9e-2c13-4757-b2f7-237b1b8849f7\" (UID: \"04c6fe9e-2c13-4757-b2f7-237b1b8849f7\") " Nov 27 07:15:49 crc kubenswrapper[4971]: I1127 07:15:49.199488 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/04c6fe9e-2c13-4757-b2f7-237b1b8849f7-db-sync-config-data\") pod \"04c6fe9e-2c13-4757-b2f7-237b1b8849f7\" (UID: \"04c6fe9e-2c13-4757-b2f7-237b1b8849f7\") " Nov 27 07:15:49 crc kubenswrapper[4971]: I1127 07:15:49.199665 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fpt96\" (UniqueName: \"kubernetes.io/projected/04c6fe9e-2c13-4757-b2f7-237b1b8849f7-kube-api-access-fpt96\") pod \"04c6fe9e-2c13-4757-b2f7-237b1b8849f7\" (UID: \"04c6fe9e-2c13-4757-b2f7-237b1b8849f7\") " Nov 27 07:15:49 crc kubenswrapper[4971]: I1127 07:15:49.211779 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04c6fe9e-2c13-4757-b2f7-237b1b8849f7-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "04c6fe9e-2c13-4757-b2f7-237b1b8849f7" (UID: "04c6fe9e-2c13-4757-b2f7-237b1b8849f7"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:15:49 crc kubenswrapper[4971]: I1127 07:15:49.211814 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04c6fe9e-2c13-4757-b2f7-237b1b8849f7-kube-api-access-fpt96" (OuterVolumeSpecName: "kube-api-access-fpt96") pod "04c6fe9e-2c13-4757-b2f7-237b1b8849f7" (UID: "04c6fe9e-2c13-4757-b2f7-237b1b8849f7"). InnerVolumeSpecName "kube-api-access-fpt96". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:15:49 crc kubenswrapper[4971]: I1127 07:15:49.227199 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04c6fe9e-2c13-4757-b2f7-237b1b8849f7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "04c6fe9e-2c13-4757-b2f7-237b1b8849f7" (UID: "04c6fe9e-2c13-4757-b2f7-237b1b8849f7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:15:49 crc kubenswrapper[4971]: I1127 07:15:49.306171 4971 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/04c6fe9e-2c13-4757-b2f7-237b1b8849f7-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:49 crc kubenswrapper[4971]: I1127 07:15:49.306223 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fpt96\" (UniqueName: \"kubernetes.io/projected/04c6fe9e-2c13-4757-b2f7-237b1b8849f7-kube-api-access-fpt96\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:49 crc kubenswrapper[4971]: I1127 07:15:49.306236 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04c6fe9e-2c13-4757-b2f7-237b1b8849f7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:49 crc kubenswrapper[4971]: I1127 07:15:49.704702 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Nov 27 07:15:49 crc kubenswrapper[4971]: I1127 07:15:49.824014 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-xpjjx" event={"ID":"04c6fe9e-2c13-4757-b2f7-237b1b8849f7","Type":"ContainerDied","Data":"c9fa7351073f38c54dc1aa77c83bae318f8851533db7181c69600eca571a2dfb"} Nov 27 07:15:49 crc kubenswrapper[4971]: I1127 07:15:49.824320 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c9fa7351073f38c54dc1aa77c83bae318f8851533db7181c69600eca571a2dfb" Nov 27 07:15:49 crc kubenswrapper[4971]: I1127 07:15:49.824404 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-xpjjx" Nov 27 07:15:49 crc kubenswrapper[4971]: I1127 07:15:49.828792 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 07:15:49 crc kubenswrapper[4971]: I1127 07:15:49.838004 4971 generic.go:334] "Generic (PLEG): container finished" podID="b1ab1943-4c64-4083-8947-aa821a7298d2" containerID="61e437269c525a53ab08083843781fe1ad1d8d262e56a60813bfea706731a494" exitCode=0 Nov 27 07:15:49 crc kubenswrapper[4971]: I1127 07:15:49.838081 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b1ab1943-4c64-4083-8947-aa821a7298d2","Type":"ContainerDied","Data":"61e437269c525a53ab08083843781fe1ad1d8d262e56a60813bfea706731a494"} Nov 27 07:15:49 crc kubenswrapper[4971]: I1127 07:15:49.838133 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b1ab1943-4c64-4083-8947-aa821a7298d2","Type":"ContainerDied","Data":"af98c203026756aee7f3cd6d423aeb5cac208a2a5c1f8c70cf974e218b63bdda"} Nov 27 07:15:49 crc kubenswrapper[4971]: I1127 07:15:49.838153 4971 scope.go:117] "RemoveContainer" containerID="16b942a0f93378767fd502e8b27d4e29699ec3623fb1a5f84918b26b2ae2016a" Nov 27 07:15:49 crc kubenswrapper[4971]: I1127 07:15:49.846494 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f8f75f555-mxcqw" event={"ID":"5deb9f08-1de8-4f62-98cc-3e80232f17fc","Type":"ContainerStarted","Data":"81ea14a50102475c525ee547f9979162aa0063a316664e1bda1b771fb01c40a9"} Nov 27 07:15:49 crc kubenswrapper[4971]: I1127 07:15:49.846662 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7f8f75f555-mxcqw" Nov 27 07:15:49 crc kubenswrapper[4971]: I1127 07:15:49.848324 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ffb1ef88-bf93-4f0c-a64a-952aeef1269d","Type":"ContainerStarted","Data":"b095cff7a1a753b4b2004211a2c617ec125f18ff9ea89f9ba9efac82419fff32"} Nov 27 07:15:49 crc kubenswrapper[4971]: I1127 07:15:49.850437 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"35f86c0e-94fb-4ffa-babb-fca5b707fb8f","Type":"ContainerStarted","Data":"7cdecf9002a281a8d97c90cbc163f619be16412047c89726876d471270804577"} Nov 27 07:15:49 crc kubenswrapper[4971]: I1127 07:15:49.882008 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7f8f75f555-mxcqw" podStartSLOduration=2.881979529 podStartE2EDuration="2.881979529s" podCreationTimestamp="2025-11-27 07:15:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:15:49.876285716 +0000 UTC m=+1388.068329654" watchObservedRunningTime="2025-11-27 07:15:49.881979529 +0000 UTC m=+1388.074023457" Nov 27 07:15:49 crc kubenswrapper[4971]: I1127 07:15:49.882734 4971 scope.go:117] "RemoveContainer" containerID="71f99c90b42bb9aeabd87ed414835968d4a4c3b05fa9dae4f2ccd861e23a5d2f" Nov 27 07:15:49 crc kubenswrapper[4971]: I1127 07:15:49.918100 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1ab1943-4c64-4083-8947-aa821a7298d2-scripts\") pod \"b1ab1943-4c64-4083-8947-aa821a7298d2\" (UID: \"b1ab1943-4c64-4083-8947-aa821a7298d2\") " Nov 27 07:15:49 crc kubenswrapper[4971]: I1127 07:15:49.918179 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b1ab1943-4c64-4083-8947-aa821a7298d2-log-httpd\") pod \"b1ab1943-4c64-4083-8947-aa821a7298d2\" (UID: \"b1ab1943-4c64-4083-8947-aa821a7298d2\") " Nov 27 07:15:49 crc kubenswrapper[4971]: I1127 07:15:49.918220 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1ab1943-4c64-4083-8947-aa821a7298d2-combined-ca-bundle\") pod \"b1ab1943-4c64-4083-8947-aa821a7298d2\" (UID: \"b1ab1943-4c64-4083-8947-aa821a7298d2\") " Nov 27 07:15:49 crc kubenswrapper[4971]: I1127 07:15:49.918280 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b1ab1943-4c64-4083-8947-aa821a7298d2-run-httpd\") pod \"b1ab1943-4c64-4083-8947-aa821a7298d2\" (UID: \"b1ab1943-4c64-4083-8947-aa821a7298d2\") " Nov 27 07:15:49 crc kubenswrapper[4971]: I1127 07:15:49.918381 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b1ab1943-4c64-4083-8947-aa821a7298d2-sg-core-conf-yaml\") pod \"b1ab1943-4c64-4083-8947-aa821a7298d2\" (UID: \"b1ab1943-4c64-4083-8947-aa821a7298d2\") " Nov 27 07:15:49 crc kubenswrapper[4971]: I1127 07:15:49.918483 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ncglj\" (UniqueName: \"kubernetes.io/projected/b1ab1943-4c64-4083-8947-aa821a7298d2-kube-api-access-ncglj\") pod \"b1ab1943-4c64-4083-8947-aa821a7298d2\" (UID: \"b1ab1943-4c64-4083-8947-aa821a7298d2\") " Nov 27 07:15:49 crc kubenswrapper[4971]: I1127 07:15:49.918849 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b1ab1943-4c64-4083-8947-aa821a7298d2-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "b1ab1943-4c64-4083-8947-aa821a7298d2" (UID: "b1ab1943-4c64-4083-8947-aa821a7298d2"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:15:49 crc kubenswrapper[4971]: I1127 07:15:49.919150 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b1ab1943-4c64-4083-8947-aa821a7298d2-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "b1ab1943-4c64-4083-8947-aa821a7298d2" (UID: "b1ab1943-4c64-4083-8947-aa821a7298d2"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:15:49 crc kubenswrapper[4971]: I1127 07:15:49.920017 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1ab1943-4c64-4083-8947-aa821a7298d2-config-data\") pod \"b1ab1943-4c64-4083-8947-aa821a7298d2\" (UID: \"b1ab1943-4c64-4083-8947-aa821a7298d2\") " Nov 27 07:15:49 crc kubenswrapper[4971]: I1127 07:15:49.920775 4971 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b1ab1943-4c64-4083-8947-aa821a7298d2-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:49 crc kubenswrapper[4971]: I1127 07:15:49.920793 4971 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b1ab1943-4c64-4083-8947-aa821a7298d2-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:49 crc kubenswrapper[4971]: I1127 07:15:49.926596 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1ab1943-4c64-4083-8947-aa821a7298d2-kube-api-access-ncglj" (OuterVolumeSpecName: "kube-api-access-ncglj") pod "b1ab1943-4c64-4083-8947-aa821a7298d2" (UID: "b1ab1943-4c64-4083-8947-aa821a7298d2"). InnerVolumeSpecName "kube-api-access-ncglj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:15:49 crc kubenswrapper[4971]: I1127 07:15:49.927823 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1ab1943-4c64-4083-8947-aa821a7298d2-scripts" (OuterVolumeSpecName: "scripts") pod "b1ab1943-4c64-4083-8947-aa821a7298d2" (UID: "b1ab1943-4c64-4083-8947-aa821a7298d2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:15:49 crc kubenswrapper[4971]: I1127 07:15:49.955729 4971 scope.go:117] "RemoveContainer" containerID="61e437269c525a53ab08083843781fe1ad1d8d262e56a60813bfea706731a494" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:49.996792 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1ab1943-4c64-4083-8947-aa821a7298d2-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "b1ab1943-4c64-4083-8947-aa821a7298d2" (UID: "b1ab1943-4c64-4083-8947-aa821a7298d2"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.022361 4971 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b1ab1943-4c64-4083-8947-aa821a7298d2-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.022393 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ncglj\" (UniqueName: \"kubernetes.io/projected/b1ab1943-4c64-4083-8947-aa821a7298d2-kube-api-access-ncglj\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.022405 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1ab1943-4c64-4083-8947-aa821a7298d2-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.049804 4971 scope.go:117] "RemoveContainer" containerID="16b942a0f93378767fd502e8b27d4e29699ec3623fb1a5f84918b26b2ae2016a" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.051936 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-7865dbd7d9-zp55h"] Nov 27 07:15:50 crc kubenswrapper[4971]: E1127 07:15:50.052620 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1ab1943-4c64-4083-8947-aa821a7298d2" containerName="proxy-httpd" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.052663 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1ab1943-4c64-4083-8947-aa821a7298d2" containerName="proxy-httpd" Nov 27 07:15:50 crc kubenswrapper[4971]: E1127 07:15:50.052722 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1ab1943-4c64-4083-8947-aa821a7298d2" containerName="sg-core" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.052732 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1ab1943-4c64-4083-8947-aa821a7298d2" containerName="sg-core" Nov 27 07:15:50 crc kubenswrapper[4971]: E1127 07:15:50.052773 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04c6fe9e-2c13-4757-b2f7-237b1b8849f7" containerName="barbican-db-sync" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.052782 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="04c6fe9e-2c13-4757-b2f7-237b1b8849f7" containerName="barbican-db-sync" Nov 27 07:15:50 crc kubenswrapper[4971]: E1127 07:15:50.052811 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1ab1943-4c64-4083-8947-aa821a7298d2" containerName="ceilometer-notification-agent" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.052820 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1ab1943-4c64-4083-8947-aa821a7298d2" containerName="ceilometer-notification-agent" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.053351 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="04c6fe9e-2c13-4757-b2f7-237b1b8849f7" containerName="barbican-db-sync" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.053409 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1ab1943-4c64-4083-8947-aa821a7298d2" containerName="sg-core" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.053434 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1ab1943-4c64-4083-8947-aa821a7298d2" containerName="proxy-httpd" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.053445 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1ab1943-4c64-4083-8947-aa821a7298d2" containerName="ceilometer-notification-agent" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.063667 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-7865dbd7d9-zp55h" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.102709 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1ab1943-4c64-4083-8947-aa821a7298d2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b1ab1943-4c64-4083-8947-aa821a7298d2" (UID: "b1ab1943-4c64-4083-8947-aa821a7298d2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:15:50 crc kubenswrapper[4971]: E1127 07:15:50.103246 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"16b942a0f93378767fd502e8b27d4e29699ec3623fb1a5f84918b26b2ae2016a\": container with ID starting with 16b942a0f93378767fd502e8b27d4e29699ec3623fb1a5f84918b26b2ae2016a not found: ID does not exist" containerID="16b942a0f93378767fd502e8b27d4e29699ec3623fb1a5f84918b26b2ae2016a" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.103317 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16b942a0f93378767fd502e8b27d4e29699ec3623fb1a5f84918b26b2ae2016a"} err="failed to get container status \"16b942a0f93378767fd502e8b27d4e29699ec3623fb1a5f84918b26b2ae2016a\": rpc error: code = NotFound desc = could not find container \"16b942a0f93378767fd502e8b27d4e29699ec3623fb1a5f84918b26b2ae2016a\": container with ID starting with 16b942a0f93378767fd502e8b27d4e29699ec3623fb1a5f84918b26b2ae2016a not found: ID does not exist" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.103355 4971 scope.go:117] "RemoveContainer" containerID="71f99c90b42bb9aeabd87ed414835968d4a4c3b05fa9dae4f2ccd861e23a5d2f" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.103744 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.104187 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-2njps" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.104416 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Nov 27 07:15:50 crc kubenswrapper[4971]: E1127 07:15:50.111581 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71f99c90b42bb9aeabd87ed414835968d4a4c3b05fa9dae4f2ccd861e23a5d2f\": container with ID starting with 71f99c90b42bb9aeabd87ed414835968d4a4c3b05fa9dae4f2ccd861e23a5d2f not found: ID does not exist" containerID="71f99c90b42bb9aeabd87ed414835968d4a4c3b05fa9dae4f2ccd861e23a5d2f" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.111644 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71f99c90b42bb9aeabd87ed414835968d4a4c3b05fa9dae4f2ccd861e23a5d2f"} err="failed to get container status \"71f99c90b42bb9aeabd87ed414835968d4a4c3b05fa9dae4f2ccd861e23a5d2f\": rpc error: code = NotFound desc = could not find container \"71f99c90b42bb9aeabd87ed414835968d4a4c3b05fa9dae4f2ccd861e23a5d2f\": container with ID starting with 71f99c90b42bb9aeabd87ed414835968d4a4c3b05fa9dae4f2ccd861e23a5d2f not found: ID does not exist" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.111674 4971 scope.go:117] "RemoveContainer" containerID="61e437269c525a53ab08083843781fe1ad1d8d262e56a60813bfea706731a494" Nov 27 07:15:50 crc kubenswrapper[4971]: E1127 07:15:50.115508 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"61e437269c525a53ab08083843781fe1ad1d8d262e56a60813bfea706731a494\": container with ID starting with 61e437269c525a53ab08083843781fe1ad1d8d262e56a60813bfea706731a494 not found: ID does not exist" containerID="61e437269c525a53ab08083843781fe1ad1d8d262e56a60813bfea706731a494" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.115569 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61e437269c525a53ab08083843781fe1ad1d8d262e56a60813bfea706731a494"} err="failed to get container status \"61e437269c525a53ab08083843781fe1ad1d8d262e56a60813bfea706731a494\": rpc error: code = NotFound desc = could not find container \"61e437269c525a53ab08083843781fe1ad1d8d262e56a60813bfea706731a494\": container with ID starting with 61e437269c525a53ab08083843781fe1ad1d8d262e56a60813bfea706731a494 not found: ID does not exist" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.160399 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b3616559-d640-4b3b-a4b3-b9d9af1d0061-config-data-custom\") pod \"barbican-worker-7865dbd7d9-zp55h\" (UID: \"b3616559-d640-4b3b-a4b3-b9d9af1d0061\") " pod="openstack/barbican-worker-7865dbd7d9-zp55h" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.160936 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3616559-d640-4b3b-a4b3-b9d9af1d0061-combined-ca-bundle\") pod \"barbican-worker-7865dbd7d9-zp55h\" (UID: \"b3616559-d640-4b3b-a4b3-b9d9af1d0061\") " pod="openstack/barbican-worker-7865dbd7d9-zp55h" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.173423 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-7865dbd7d9-zp55h"] Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.178886 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3616559-d640-4b3b-a4b3-b9d9af1d0061-logs\") pod \"barbican-worker-7865dbd7d9-zp55h\" (UID: \"b3616559-d640-4b3b-a4b3-b9d9af1d0061\") " pod="openstack/barbican-worker-7865dbd7d9-zp55h" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.179055 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fc7ms\" (UniqueName: \"kubernetes.io/projected/b3616559-d640-4b3b-a4b3-b9d9af1d0061-kube-api-access-fc7ms\") pod \"barbican-worker-7865dbd7d9-zp55h\" (UID: \"b3616559-d640-4b3b-a4b3-b9d9af1d0061\") " pod="openstack/barbican-worker-7865dbd7d9-zp55h" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.179144 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3616559-d640-4b3b-a4b3-b9d9af1d0061-config-data\") pod \"barbican-worker-7865dbd7d9-zp55h\" (UID: \"b3616559-d640-4b3b-a4b3-b9d9af1d0061\") " pod="openstack/barbican-worker-7865dbd7d9-zp55h" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.179444 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1ab1943-4c64-4083-8947-aa821a7298d2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.241476 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-5b7b448d48-2wggc"] Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.243249 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5b7b448d48-2wggc" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.257193 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.280844 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b3616559-d640-4b3b-a4b3-b9d9af1d0061-config-data-custom\") pod \"barbican-worker-7865dbd7d9-zp55h\" (UID: \"b3616559-d640-4b3b-a4b3-b9d9af1d0061\") " pod="openstack/barbican-worker-7865dbd7d9-zp55h" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.280910 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e067498-7bc0-4bc5-a9a6-696c8aa3cf71-logs\") pod \"barbican-keystone-listener-5b7b448d48-2wggc\" (UID: \"6e067498-7bc0-4bc5-a9a6-696c8aa3cf71\") " pod="openstack/barbican-keystone-listener-5b7b448d48-2wggc" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.280939 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6e067498-7bc0-4bc5-a9a6-696c8aa3cf71-config-data-custom\") pod \"barbican-keystone-listener-5b7b448d48-2wggc\" (UID: \"6e067498-7bc0-4bc5-a9a6-696c8aa3cf71\") " pod="openstack/barbican-keystone-listener-5b7b448d48-2wggc" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.280971 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3616559-d640-4b3b-a4b3-b9d9af1d0061-combined-ca-bundle\") pod \"barbican-worker-7865dbd7d9-zp55h\" (UID: \"b3616559-d640-4b3b-a4b3-b9d9af1d0061\") " pod="openstack/barbican-worker-7865dbd7d9-zp55h" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.281019 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qbjj6\" (UniqueName: \"kubernetes.io/projected/6e067498-7bc0-4bc5-a9a6-696c8aa3cf71-kube-api-access-qbjj6\") pod \"barbican-keystone-listener-5b7b448d48-2wggc\" (UID: \"6e067498-7bc0-4bc5-a9a6-696c8aa3cf71\") " pod="openstack/barbican-keystone-listener-5b7b448d48-2wggc" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.281058 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e067498-7bc0-4bc5-a9a6-696c8aa3cf71-config-data\") pod \"barbican-keystone-listener-5b7b448d48-2wggc\" (UID: \"6e067498-7bc0-4bc5-a9a6-696c8aa3cf71\") " pod="openstack/barbican-keystone-listener-5b7b448d48-2wggc" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.281089 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3616559-d640-4b3b-a4b3-b9d9af1d0061-logs\") pod \"barbican-worker-7865dbd7d9-zp55h\" (UID: \"b3616559-d640-4b3b-a4b3-b9d9af1d0061\") " pod="openstack/barbican-worker-7865dbd7d9-zp55h" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.281132 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fc7ms\" (UniqueName: \"kubernetes.io/projected/b3616559-d640-4b3b-a4b3-b9d9af1d0061-kube-api-access-fc7ms\") pod \"barbican-worker-7865dbd7d9-zp55h\" (UID: \"b3616559-d640-4b3b-a4b3-b9d9af1d0061\") " pod="openstack/barbican-worker-7865dbd7d9-zp55h" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.281166 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3616559-d640-4b3b-a4b3-b9d9af1d0061-config-data\") pod \"barbican-worker-7865dbd7d9-zp55h\" (UID: \"b3616559-d640-4b3b-a4b3-b9d9af1d0061\") " pod="openstack/barbican-worker-7865dbd7d9-zp55h" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.281204 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e067498-7bc0-4bc5-a9a6-696c8aa3cf71-combined-ca-bundle\") pod \"barbican-keystone-listener-5b7b448d48-2wggc\" (UID: \"6e067498-7bc0-4bc5-a9a6-696c8aa3cf71\") " pod="openstack/barbican-keystone-listener-5b7b448d48-2wggc" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.287445 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3616559-d640-4b3b-a4b3-b9d9af1d0061-logs\") pod \"barbican-worker-7865dbd7d9-zp55h\" (UID: \"b3616559-d640-4b3b-a4b3-b9d9af1d0061\") " pod="openstack/barbican-worker-7865dbd7d9-zp55h" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.291791 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1ab1943-4c64-4083-8947-aa821a7298d2-config-data" (OuterVolumeSpecName: "config-data") pod "b1ab1943-4c64-4083-8947-aa821a7298d2" (UID: "b1ab1943-4c64-4083-8947-aa821a7298d2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.296641 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-5b7b448d48-2wggc"] Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.301018 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b3616559-d640-4b3b-a4b3-b9d9af1d0061-config-data-custom\") pod \"barbican-worker-7865dbd7d9-zp55h\" (UID: \"b3616559-d640-4b3b-a4b3-b9d9af1d0061\") " pod="openstack/barbican-worker-7865dbd7d9-zp55h" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.307882 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3616559-d640-4b3b-a4b3-b9d9af1d0061-combined-ca-bundle\") pod \"barbican-worker-7865dbd7d9-zp55h\" (UID: \"b3616559-d640-4b3b-a4b3-b9d9af1d0061\") " pod="openstack/barbican-worker-7865dbd7d9-zp55h" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.309252 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3616559-d640-4b3b-a4b3-b9d9af1d0061-config-data\") pod \"barbican-worker-7865dbd7d9-zp55h\" (UID: \"b3616559-d640-4b3b-a4b3-b9d9af1d0061\") " pod="openstack/barbican-worker-7865dbd7d9-zp55h" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.320235 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fc7ms\" (UniqueName: \"kubernetes.io/projected/b3616559-d640-4b3b-a4b3-b9d9af1d0061-kube-api-access-fc7ms\") pod \"barbican-worker-7865dbd7d9-zp55h\" (UID: \"b3616559-d640-4b3b-a4b3-b9d9af1d0061\") " pod="openstack/barbican-worker-7865dbd7d9-zp55h" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.331350 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f8f75f555-mxcqw"] Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.342970 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-9895c4665-wr5mf"] Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.344861 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9895c4665-wr5mf" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.350916 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-7b57df6474-qsrcc"] Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.352955 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7b57df6474-qsrcc" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.358980 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.371796 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-9895c4665-wr5mf"] Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.380081 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7b57df6474-qsrcc"] Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.384564 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0c78fe86-37ff-47e6-8730-afa07e4d1533-config-data-custom\") pod \"barbican-api-7b57df6474-qsrcc\" (UID: \"0c78fe86-37ff-47e6-8730-afa07e4d1533\") " pod="openstack/barbican-api-7b57df6474-qsrcc" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.384617 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5mbb\" (UniqueName: \"kubernetes.io/projected/0c78fe86-37ff-47e6-8730-afa07e4d1533-kube-api-access-j5mbb\") pod \"barbican-api-7b57df6474-qsrcc\" (UID: \"0c78fe86-37ff-47e6-8730-afa07e4d1533\") " pod="openstack/barbican-api-7b57df6474-qsrcc" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.384655 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/52ac96a5-4ef4-48ce-80d5-ca16e20de960-dns-svc\") pod \"dnsmasq-dns-9895c4665-wr5mf\" (UID: \"52ac96a5-4ef4-48ce-80d5-ca16e20de960\") " pod="openstack/dnsmasq-dns-9895c4665-wr5mf" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.384717 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z4bgd\" (UniqueName: \"kubernetes.io/projected/52ac96a5-4ef4-48ce-80d5-ca16e20de960-kube-api-access-z4bgd\") pod \"dnsmasq-dns-9895c4665-wr5mf\" (UID: \"52ac96a5-4ef4-48ce-80d5-ca16e20de960\") " pod="openstack/dnsmasq-dns-9895c4665-wr5mf" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.384743 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/52ac96a5-4ef4-48ce-80d5-ca16e20de960-ovsdbserver-nb\") pod \"dnsmasq-dns-9895c4665-wr5mf\" (UID: \"52ac96a5-4ef4-48ce-80d5-ca16e20de960\") " pod="openstack/dnsmasq-dns-9895c4665-wr5mf" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.384768 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/52ac96a5-4ef4-48ce-80d5-ca16e20de960-dns-swift-storage-0\") pod \"dnsmasq-dns-9895c4665-wr5mf\" (UID: \"52ac96a5-4ef4-48ce-80d5-ca16e20de960\") " pod="openstack/dnsmasq-dns-9895c4665-wr5mf" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.384798 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e067498-7bc0-4bc5-a9a6-696c8aa3cf71-logs\") pod \"barbican-keystone-listener-5b7b448d48-2wggc\" (UID: \"6e067498-7bc0-4bc5-a9a6-696c8aa3cf71\") " pod="openstack/barbican-keystone-listener-5b7b448d48-2wggc" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.384816 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6e067498-7bc0-4bc5-a9a6-696c8aa3cf71-config-data-custom\") pod \"barbican-keystone-listener-5b7b448d48-2wggc\" (UID: \"6e067498-7bc0-4bc5-a9a6-696c8aa3cf71\") " pod="openstack/barbican-keystone-listener-5b7b448d48-2wggc" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.384849 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qbjj6\" (UniqueName: \"kubernetes.io/projected/6e067498-7bc0-4bc5-a9a6-696c8aa3cf71-kube-api-access-qbjj6\") pod \"barbican-keystone-listener-5b7b448d48-2wggc\" (UID: \"6e067498-7bc0-4bc5-a9a6-696c8aa3cf71\") " pod="openstack/barbican-keystone-listener-5b7b448d48-2wggc" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.384865 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e067498-7bc0-4bc5-a9a6-696c8aa3cf71-config-data\") pod \"barbican-keystone-listener-5b7b448d48-2wggc\" (UID: \"6e067498-7bc0-4bc5-a9a6-696c8aa3cf71\") " pod="openstack/barbican-keystone-listener-5b7b448d48-2wggc" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.384915 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c78fe86-37ff-47e6-8730-afa07e4d1533-combined-ca-bundle\") pod \"barbican-api-7b57df6474-qsrcc\" (UID: \"0c78fe86-37ff-47e6-8730-afa07e4d1533\") " pod="openstack/barbican-api-7b57df6474-qsrcc" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.384943 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e067498-7bc0-4bc5-a9a6-696c8aa3cf71-combined-ca-bundle\") pod \"barbican-keystone-listener-5b7b448d48-2wggc\" (UID: \"6e067498-7bc0-4bc5-a9a6-696c8aa3cf71\") " pod="openstack/barbican-keystone-listener-5b7b448d48-2wggc" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.384974 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c78fe86-37ff-47e6-8730-afa07e4d1533-config-data\") pod \"barbican-api-7b57df6474-qsrcc\" (UID: \"0c78fe86-37ff-47e6-8730-afa07e4d1533\") " pod="openstack/barbican-api-7b57df6474-qsrcc" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.385001 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/52ac96a5-4ef4-48ce-80d5-ca16e20de960-ovsdbserver-sb\") pod \"dnsmasq-dns-9895c4665-wr5mf\" (UID: \"52ac96a5-4ef4-48ce-80d5-ca16e20de960\") " pod="openstack/dnsmasq-dns-9895c4665-wr5mf" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.385038 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0c78fe86-37ff-47e6-8730-afa07e4d1533-logs\") pod \"barbican-api-7b57df6474-qsrcc\" (UID: \"0c78fe86-37ff-47e6-8730-afa07e4d1533\") " pod="openstack/barbican-api-7b57df6474-qsrcc" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.385064 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/52ac96a5-4ef4-48ce-80d5-ca16e20de960-config\") pod \"dnsmasq-dns-9895c4665-wr5mf\" (UID: \"52ac96a5-4ef4-48ce-80d5-ca16e20de960\") " pod="openstack/dnsmasq-dns-9895c4665-wr5mf" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.385116 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1ab1943-4c64-4083-8947-aa821a7298d2-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.385787 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e067498-7bc0-4bc5-a9a6-696c8aa3cf71-logs\") pod \"barbican-keystone-listener-5b7b448d48-2wggc\" (UID: \"6e067498-7bc0-4bc5-a9a6-696c8aa3cf71\") " pod="openstack/barbican-keystone-listener-5b7b448d48-2wggc" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.401989 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e067498-7bc0-4bc5-a9a6-696c8aa3cf71-combined-ca-bundle\") pod \"barbican-keystone-listener-5b7b448d48-2wggc\" (UID: \"6e067498-7bc0-4bc5-a9a6-696c8aa3cf71\") " pod="openstack/barbican-keystone-listener-5b7b448d48-2wggc" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.403106 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6e067498-7bc0-4bc5-a9a6-696c8aa3cf71-config-data-custom\") pod \"barbican-keystone-listener-5b7b448d48-2wggc\" (UID: \"6e067498-7bc0-4bc5-a9a6-696c8aa3cf71\") " pod="openstack/barbican-keystone-listener-5b7b448d48-2wggc" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.417227 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qbjj6\" (UniqueName: \"kubernetes.io/projected/6e067498-7bc0-4bc5-a9a6-696c8aa3cf71-kube-api-access-qbjj6\") pod \"barbican-keystone-listener-5b7b448d48-2wggc\" (UID: \"6e067498-7bc0-4bc5-a9a6-696c8aa3cf71\") " pod="openstack/barbican-keystone-listener-5b7b448d48-2wggc" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.426991 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e067498-7bc0-4bc5-a9a6-696c8aa3cf71-config-data\") pod \"barbican-keystone-listener-5b7b448d48-2wggc\" (UID: \"6e067498-7bc0-4bc5-a9a6-696c8aa3cf71\") " pod="openstack/barbican-keystone-listener-5b7b448d48-2wggc" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.430210 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-7865dbd7d9-zp55h" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.472707 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5b7b448d48-2wggc" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.486825 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c78fe86-37ff-47e6-8730-afa07e4d1533-combined-ca-bundle\") pod \"barbican-api-7b57df6474-qsrcc\" (UID: \"0c78fe86-37ff-47e6-8730-afa07e4d1533\") " pod="openstack/barbican-api-7b57df6474-qsrcc" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.486886 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c78fe86-37ff-47e6-8730-afa07e4d1533-config-data\") pod \"barbican-api-7b57df6474-qsrcc\" (UID: \"0c78fe86-37ff-47e6-8730-afa07e4d1533\") " pod="openstack/barbican-api-7b57df6474-qsrcc" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.486926 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/52ac96a5-4ef4-48ce-80d5-ca16e20de960-ovsdbserver-sb\") pod \"dnsmasq-dns-9895c4665-wr5mf\" (UID: \"52ac96a5-4ef4-48ce-80d5-ca16e20de960\") " pod="openstack/dnsmasq-dns-9895c4665-wr5mf" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.486962 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0c78fe86-37ff-47e6-8730-afa07e4d1533-logs\") pod \"barbican-api-7b57df6474-qsrcc\" (UID: \"0c78fe86-37ff-47e6-8730-afa07e4d1533\") " pod="openstack/barbican-api-7b57df6474-qsrcc" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.486993 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/52ac96a5-4ef4-48ce-80d5-ca16e20de960-config\") pod \"dnsmasq-dns-9895c4665-wr5mf\" (UID: \"52ac96a5-4ef4-48ce-80d5-ca16e20de960\") " pod="openstack/dnsmasq-dns-9895c4665-wr5mf" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.487014 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0c78fe86-37ff-47e6-8730-afa07e4d1533-config-data-custom\") pod \"barbican-api-7b57df6474-qsrcc\" (UID: \"0c78fe86-37ff-47e6-8730-afa07e4d1533\") " pod="openstack/barbican-api-7b57df6474-qsrcc" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.487034 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5mbb\" (UniqueName: \"kubernetes.io/projected/0c78fe86-37ff-47e6-8730-afa07e4d1533-kube-api-access-j5mbb\") pod \"barbican-api-7b57df6474-qsrcc\" (UID: \"0c78fe86-37ff-47e6-8730-afa07e4d1533\") " pod="openstack/barbican-api-7b57df6474-qsrcc" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.487054 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/52ac96a5-4ef4-48ce-80d5-ca16e20de960-dns-svc\") pod \"dnsmasq-dns-9895c4665-wr5mf\" (UID: \"52ac96a5-4ef4-48ce-80d5-ca16e20de960\") " pod="openstack/dnsmasq-dns-9895c4665-wr5mf" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.487094 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z4bgd\" (UniqueName: \"kubernetes.io/projected/52ac96a5-4ef4-48ce-80d5-ca16e20de960-kube-api-access-z4bgd\") pod \"dnsmasq-dns-9895c4665-wr5mf\" (UID: \"52ac96a5-4ef4-48ce-80d5-ca16e20de960\") " pod="openstack/dnsmasq-dns-9895c4665-wr5mf" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.487113 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/52ac96a5-4ef4-48ce-80d5-ca16e20de960-ovsdbserver-nb\") pod \"dnsmasq-dns-9895c4665-wr5mf\" (UID: \"52ac96a5-4ef4-48ce-80d5-ca16e20de960\") " pod="openstack/dnsmasq-dns-9895c4665-wr5mf" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.487133 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/52ac96a5-4ef4-48ce-80d5-ca16e20de960-dns-swift-storage-0\") pod \"dnsmasq-dns-9895c4665-wr5mf\" (UID: \"52ac96a5-4ef4-48ce-80d5-ca16e20de960\") " pod="openstack/dnsmasq-dns-9895c4665-wr5mf" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.487761 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0c78fe86-37ff-47e6-8730-afa07e4d1533-logs\") pod \"barbican-api-7b57df6474-qsrcc\" (UID: \"0c78fe86-37ff-47e6-8730-afa07e4d1533\") " pod="openstack/barbican-api-7b57df6474-qsrcc" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.488762 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/52ac96a5-4ef4-48ce-80d5-ca16e20de960-config\") pod \"dnsmasq-dns-9895c4665-wr5mf\" (UID: \"52ac96a5-4ef4-48ce-80d5-ca16e20de960\") " pod="openstack/dnsmasq-dns-9895c4665-wr5mf" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.488780 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/52ac96a5-4ef4-48ce-80d5-ca16e20de960-dns-svc\") pod \"dnsmasq-dns-9895c4665-wr5mf\" (UID: \"52ac96a5-4ef4-48ce-80d5-ca16e20de960\") " pod="openstack/dnsmasq-dns-9895c4665-wr5mf" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.490601 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/52ac96a5-4ef4-48ce-80d5-ca16e20de960-ovsdbserver-nb\") pod \"dnsmasq-dns-9895c4665-wr5mf\" (UID: \"52ac96a5-4ef4-48ce-80d5-ca16e20de960\") " pod="openstack/dnsmasq-dns-9895c4665-wr5mf" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.490817 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c78fe86-37ff-47e6-8730-afa07e4d1533-combined-ca-bundle\") pod \"barbican-api-7b57df6474-qsrcc\" (UID: \"0c78fe86-37ff-47e6-8730-afa07e4d1533\") " pod="openstack/barbican-api-7b57df6474-qsrcc" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.490813 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/52ac96a5-4ef4-48ce-80d5-ca16e20de960-dns-swift-storage-0\") pod \"dnsmasq-dns-9895c4665-wr5mf\" (UID: \"52ac96a5-4ef4-48ce-80d5-ca16e20de960\") " pod="openstack/dnsmasq-dns-9895c4665-wr5mf" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.491887 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/52ac96a5-4ef4-48ce-80d5-ca16e20de960-ovsdbserver-sb\") pod \"dnsmasq-dns-9895c4665-wr5mf\" (UID: \"52ac96a5-4ef4-48ce-80d5-ca16e20de960\") " pod="openstack/dnsmasq-dns-9895c4665-wr5mf" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.492638 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0c78fe86-37ff-47e6-8730-afa07e4d1533-config-data-custom\") pod \"barbican-api-7b57df6474-qsrcc\" (UID: \"0c78fe86-37ff-47e6-8730-afa07e4d1533\") " pod="openstack/barbican-api-7b57df6474-qsrcc" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.493803 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c78fe86-37ff-47e6-8730-afa07e4d1533-config-data\") pod \"barbican-api-7b57df6474-qsrcc\" (UID: \"0c78fe86-37ff-47e6-8730-afa07e4d1533\") " pod="openstack/barbican-api-7b57df6474-qsrcc" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.508319 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5mbb\" (UniqueName: \"kubernetes.io/projected/0c78fe86-37ff-47e6-8730-afa07e4d1533-kube-api-access-j5mbb\") pod \"barbican-api-7b57df6474-qsrcc\" (UID: \"0c78fe86-37ff-47e6-8730-afa07e4d1533\") " pod="openstack/barbican-api-7b57df6474-qsrcc" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.509934 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7b57df6474-qsrcc" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.513276 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z4bgd\" (UniqueName: \"kubernetes.io/projected/52ac96a5-4ef4-48ce-80d5-ca16e20de960-kube-api-access-z4bgd\") pod \"dnsmasq-dns-9895c4665-wr5mf\" (UID: \"52ac96a5-4ef4-48ce-80d5-ca16e20de960\") " pod="openstack/dnsmasq-dns-9895c4665-wr5mf" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.803793 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9895c4665-wr5mf" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.892120 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"35f86c0e-94fb-4ffa-babb-fca5b707fb8f","Type":"ContainerStarted","Data":"450dbd3c6ca0ae203fcc410da643589a8532de0ce0eb828a3c54ec8a875aab0d"} Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.892318 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="35f86c0e-94fb-4ffa-babb-fca5b707fb8f" containerName="cinder-api-log" containerID="cri-o://7cdecf9002a281a8d97c90cbc163f619be16412047c89726876d471270804577" gracePeriod=30 Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.892597 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.892668 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="35f86c0e-94fb-4ffa-babb-fca5b707fb8f" containerName="cinder-api" containerID="cri-o://450dbd3c6ca0ae203fcc410da643589a8532de0ce0eb828a3c54ec8a875aab0d" gracePeriod=30 Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.917968 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.927836 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ffb1ef88-bf93-4f0c-a64a-952aeef1269d","Type":"ContainerStarted","Data":"ccd8989dbcf588a3bebd671e2c0ee20b33142581e8c5b4915db000e4b0c43193"} Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.928909 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.928889033 podStartE2EDuration="3.928889033s" podCreationTimestamp="2025-11-27 07:15:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:15:50.92323829 +0000 UTC m=+1389.115282208" watchObservedRunningTime="2025-11-27 07:15:50.928889033 +0000 UTC m=+1389.120932951" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.976019 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.23104591 podStartE2EDuration="4.976000283s" podCreationTimestamp="2025-11-27 07:15:46 +0000 UTC" firstStartedPulling="2025-11-27 07:15:47.865252181 +0000 UTC m=+1386.057296099" lastFinishedPulling="2025-11-27 07:15:48.610206554 +0000 UTC m=+1386.802250472" observedRunningTime="2025-11-27 07:15:50.951953955 +0000 UTC m=+1389.143997883" watchObservedRunningTime="2025-11-27 07:15:50.976000283 +0000 UTC m=+1389.168044201" Nov 27 07:15:50 crc kubenswrapper[4971]: I1127 07:15:50.994574 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-7865dbd7d9-zp55h"] Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.028289 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.041185 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.053476 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.055791 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.065938 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.066167 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.079583 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7b57df6474-qsrcc"] Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.094672 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.102131 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4a767b03-47f5-4cf2-b573-f30e3aa922b5-run-httpd\") pod \"ceilometer-0\" (UID: \"4a767b03-47f5-4cf2-b573-f30e3aa922b5\") " pod="openstack/ceilometer-0" Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.102254 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4a767b03-47f5-4cf2-b573-f30e3aa922b5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4a767b03-47f5-4cf2-b573-f30e3aa922b5\") " pod="openstack/ceilometer-0" Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.102291 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a767b03-47f5-4cf2-b573-f30e3aa922b5-config-data\") pod \"ceilometer-0\" (UID: \"4a767b03-47f5-4cf2-b573-f30e3aa922b5\") " pod="openstack/ceilometer-0" Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.102383 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4a767b03-47f5-4cf2-b573-f30e3aa922b5-scripts\") pod \"ceilometer-0\" (UID: \"4a767b03-47f5-4cf2-b573-f30e3aa922b5\") " pod="openstack/ceilometer-0" Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.102436 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a767b03-47f5-4cf2-b573-f30e3aa922b5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4a767b03-47f5-4cf2-b573-f30e3aa922b5\") " pod="openstack/ceilometer-0" Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.102495 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8wfts\" (UniqueName: \"kubernetes.io/projected/4a767b03-47f5-4cf2-b573-f30e3aa922b5-kube-api-access-8wfts\") pod \"ceilometer-0\" (UID: \"4a767b03-47f5-4cf2-b573-f30e3aa922b5\") " pod="openstack/ceilometer-0" Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.102587 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4a767b03-47f5-4cf2-b573-f30e3aa922b5-log-httpd\") pod \"ceilometer-0\" (UID: \"4a767b03-47f5-4cf2-b573-f30e3aa922b5\") " pod="openstack/ceilometer-0" Nov 27 07:15:51 crc kubenswrapper[4971]: W1127 07:15:51.113187 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0c78fe86_37ff_47e6_8730_afa07e4d1533.slice/crio-a419882b6c82ff86d4b92156a4e71f60741c315681fd3e20f521c7d0632a9710 WatchSource:0}: Error finding container a419882b6c82ff86d4b92156a4e71f60741c315681fd3e20f521c7d0632a9710: Status 404 returned error can't find the container with id a419882b6c82ff86d4b92156a4e71f60741c315681fd3e20f521c7d0632a9710 Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.134571 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-5b7b448d48-2wggc"] Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.204143 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4a767b03-47f5-4cf2-b573-f30e3aa922b5-run-httpd\") pod \"ceilometer-0\" (UID: \"4a767b03-47f5-4cf2-b573-f30e3aa922b5\") " pod="openstack/ceilometer-0" Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.204610 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4a767b03-47f5-4cf2-b573-f30e3aa922b5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4a767b03-47f5-4cf2-b573-f30e3aa922b5\") " pod="openstack/ceilometer-0" Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.204644 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a767b03-47f5-4cf2-b573-f30e3aa922b5-config-data\") pod \"ceilometer-0\" (UID: \"4a767b03-47f5-4cf2-b573-f30e3aa922b5\") " pod="openstack/ceilometer-0" Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.204718 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4a767b03-47f5-4cf2-b573-f30e3aa922b5-scripts\") pod \"ceilometer-0\" (UID: \"4a767b03-47f5-4cf2-b573-f30e3aa922b5\") " pod="openstack/ceilometer-0" Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.204730 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4a767b03-47f5-4cf2-b573-f30e3aa922b5-run-httpd\") pod \"ceilometer-0\" (UID: \"4a767b03-47f5-4cf2-b573-f30e3aa922b5\") " pod="openstack/ceilometer-0" Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.204946 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a767b03-47f5-4cf2-b573-f30e3aa922b5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4a767b03-47f5-4cf2-b573-f30e3aa922b5\") " pod="openstack/ceilometer-0" Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.205603 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8wfts\" (UniqueName: \"kubernetes.io/projected/4a767b03-47f5-4cf2-b573-f30e3aa922b5-kube-api-access-8wfts\") pod \"ceilometer-0\" (UID: \"4a767b03-47f5-4cf2-b573-f30e3aa922b5\") " pod="openstack/ceilometer-0" Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.205655 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4a767b03-47f5-4cf2-b573-f30e3aa922b5-log-httpd\") pod \"ceilometer-0\" (UID: \"4a767b03-47f5-4cf2-b573-f30e3aa922b5\") " pod="openstack/ceilometer-0" Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.205996 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4a767b03-47f5-4cf2-b573-f30e3aa922b5-log-httpd\") pod \"ceilometer-0\" (UID: \"4a767b03-47f5-4cf2-b573-f30e3aa922b5\") " pod="openstack/ceilometer-0" Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.211682 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4a767b03-47f5-4cf2-b573-f30e3aa922b5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4a767b03-47f5-4cf2-b573-f30e3aa922b5\") " pod="openstack/ceilometer-0" Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.211760 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4a767b03-47f5-4cf2-b573-f30e3aa922b5-scripts\") pod \"ceilometer-0\" (UID: \"4a767b03-47f5-4cf2-b573-f30e3aa922b5\") " pod="openstack/ceilometer-0" Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.213220 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a767b03-47f5-4cf2-b573-f30e3aa922b5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4a767b03-47f5-4cf2-b573-f30e3aa922b5\") " pod="openstack/ceilometer-0" Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.219714 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a767b03-47f5-4cf2-b573-f30e3aa922b5-config-data\") pod \"ceilometer-0\" (UID: \"4a767b03-47f5-4cf2-b573-f30e3aa922b5\") " pod="openstack/ceilometer-0" Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.253273 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8wfts\" (UniqueName: \"kubernetes.io/projected/4a767b03-47f5-4cf2-b573-f30e3aa922b5-kube-api-access-8wfts\") pod \"ceilometer-0\" (UID: \"4a767b03-47f5-4cf2-b573-f30e3aa922b5\") " pod="openstack/ceilometer-0" Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.416988 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.443794 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-9895c4665-wr5mf"] Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.903800 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.943625 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-scripts\") pod \"35f86c0e-94fb-4ffa-babb-fca5b707fb8f\" (UID: \"35f86c0e-94fb-4ffa-babb-fca5b707fb8f\") " Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.943679 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-etc-machine-id\") pod \"35f86c0e-94fb-4ffa-babb-fca5b707fb8f\" (UID: \"35f86c0e-94fb-4ffa-babb-fca5b707fb8f\") " Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.943759 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-config-data\") pod \"35f86c0e-94fb-4ffa-babb-fca5b707fb8f\" (UID: \"35f86c0e-94fb-4ffa-babb-fca5b707fb8f\") " Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.943806 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-combined-ca-bundle\") pod \"35f86c0e-94fb-4ffa-babb-fca5b707fb8f\" (UID: \"35f86c0e-94fb-4ffa-babb-fca5b707fb8f\") " Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.943826 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "35f86c0e-94fb-4ffa-babb-fca5b707fb8f" (UID: "35f86c0e-94fb-4ffa-babb-fca5b707fb8f"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.943865 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-logs\") pod \"35f86c0e-94fb-4ffa-babb-fca5b707fb8f\" (UID: \"35f86c0e-94fb-4ffa-babb-fca5b707fb8f\") " Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.943932 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ljpf8\" (UniqueName: \"kubernetes.io/projected/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-kube-api-access-ljpf8\") pod \"35f86c0e-94fb-4ffa-babb-fca5b707fb8f\" (UID: \"35f86c0e-94fb-4ffa-babb-fca5b707fb8f\") " Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.944922 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-logs" (OuterVolumeSpecName: "logs") pod "35f86c0e-94fb-4ffa-babb-fca5b707fb8f" (UID: "35f86c0e-94fb-4ffa-babb-fca5b707fb8f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.945075 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-config-data-custom\") pod \"35f86c0e-94fb-4ffa-babb-fca5b707fb8f\" (UID: \"35f86c0e-94fb-4ffa-babb-fca5b707fb8f\") " Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.945979 4971 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.945992 4971 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-logs\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.952964 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7865dbd7d9-zp55h" event={"ID":"b3616559-d640-4b3b-a4b3-b9d9af1d0061","Type":"ContainerStarted","Data":"f683a7245c3e4c96e78a29877fed747d3bba11220db6f862793ea02e6733adec"} Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.954122 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-kube-api-access-ljpf8" (OuterVolumeSpecName: "kube-api-access-ljpf8") pod "35f86c0e-94fb-4ffa-babb-fca5b707fb8f" (UID: "35f86c0e-94fb-4ffa-babb-fca5b707fb8f"). InnerVolumeSpecName "kube-api-access-ljpf8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.954324 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5b7b448d48-2wggc" event={"ID":"6e067498-7bc0-4bc5-a9a6-696c8aa3cf71","Type":"ContainerStarted","Data":"7852e87416c31263e7e00d8e70907a6f92840226e253d0e1be85a175d15e7676"} Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.959648 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-scripts" (OuterVolumeSpecName: "scripts") pod "35f86c0e-94fb-4ffa-babb-fca5b707fb8f" (UID: "35f86c0e-94fb-4ffa-babb-fca5b707fb8f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.959996 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "35f86c0e-94fb-4ffa-babb-fca5b707fb8f" (UID: "35f86c0e-94fb-4ffa-babb-fca5b707fb8f"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.960149 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7b57df6474-qsrcc" event={"ID":"0c78fe86-37ff-47e6-8730-afa07e4d1533","Type":"ContainerStarted","Data":"cd735c487fbcf020ffc031404c0dc001b75ae107ca62f1d3ce85525272dc2a21"} Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.960213 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7b57df6474-qsrcc" event={"ID":"0c78fe86-37ff-47e6-8730-afa07e4d1533","Type":"ContainerStarted","Data":"76729c51b41b73c92904a13d10f59a263a4bdd28a197a24521c0a656303539ea"} Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.960227 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7b57df6474-qsrcc" event={"ID":"0c78fe86-37ff-47e6-8730-afa07e4d1533","Type":"ContainerStarted","Data":"a419882b6c82ff86d4b92156a4e71f60741c315681fd3e20f521c7d0632a9710"} Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.960290 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7b57df6474-qsrcc" Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.960966 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7b57df6474-qsrcc" Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.967034 4971 generic.go:334] "Generic (PLEG): container finished" podID="52ac96a5-4ef4-48ce-80d5-ca16e20de960" containerID="d3651eeb82ea44564d71dff0627fcdcd88d912d928e661b235d834584973935e" exitCode=0 Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.967153 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9895c4665-wr5mf" event={"ID":"52ac96a5-4ef4-48ce-80d5-ca16e20de960","Type":"ContainerDied","Data":"d3651eeb82ea44564d71dff0627fcdcd88d912d928e661b235d834584973935e"} Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.967197 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9895c4665-wr5mf" event={"ID":"52ac96a5-4ef4-48ce-80d5-ca16e20de960","Type":"ContainerStarted","Data":"ce9bb0536c22314b74059002820957bb2ccdd23833d9d8995cb1d13dbb3e9dbf"} Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.979827 4971 generic.go:334] "Generic (PLEG): container finished" podID="35f86c0e-94fb-4ffa-babb-fca5b707fb8f" containerID="450dbd3c6ca0ae203fcc410da643589a8532de0ce0eb828a3c54ec8a875aab0d" exitCode=0 Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.979896 4971 generic.go:334] "Generic (PLEG): container finished" podID="35f86c0e-94fb-4ffa-babb-fca5b707fb8f" containerID="7cdecf9002a281a8d97c90cbc163f619be16412047c89726876d471270804577" exitCode=143 Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.979983 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.980104 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"35f86c0e-94fb-4ffa-babb-fca5b707fb8f","Type":"ContainerDied","Data":"450dbd3c6ca0ae203fcc410da643589a8532de0ce0eb828a3c54ec8a875aab0d"} Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.980152 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"35f86c0e-94fb-4ffa-babb-fca5b707fb8f","Type":"ContainerDied","Data":"7cdecf9002a281a8d97c90cbc163f619be16412047c89726876d471270804577"} Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.980164 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"35f86c0e-94fb-4ffa-babb-fca5b707fb8f","Type":"ContainerDied","Data":"caa2b30635953c295efee54d82012310cc17b8c0fc73ef25dc41debc0e535f49"} Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.980179 4971 scope.go:117] "RemoveContainer" containerID="450dbd3c6ca0ae203fcc410da643589a8532de0ce0eb828a3c54ec8a875aab0d" Nov 27 07:15:51 crc kubenswrapper[4971]: I1127 07:15:51.980596 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7f8f75f555-mxcqw" podUID="5deb9f08-1de8-4f62-98cc-3e80232f17fc" containerName="dnsmasq-dns" containerID="cri-o://81ea14a50102475c525ee547f9979162aa0063a316664e1bda1b771fb01c40a9" gracePeriod=10 Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.003552 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-config-data" (OuterVolumeSpecName: "config-data") pod "35f86c0e-94fb-4ffa-babb-fca5b707fb8f" (UID: "35f86c0e-94fb-4ffa-babb-fca5b707fb8f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.020839 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "35f86c0e-94fb-4ffa-babb-fca5b707fb8f" (UID: "35f86c0e-94fb-4ffa-babb-fca5b707fb8f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.021211 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-7b57df6474-qsrcc" podStartSLOduration=2.02118815 podStartE2EDuration="2.02118815s" podCreationTimestamp="2025-11-27 07:15:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:15:51.981174741 +0000 UTC m=+1390.173218659" watchObservedRunningTime="2025-11-27 07:15:52.02118815 +0000 UTC m=+1390.213232078" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.048169 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.048212 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.048417 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.051764 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ljpf8\" (UniqueName: \"kubernetes.io/projected/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-kube-api-access-ljpf8\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.051788 4971 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/35f86c0e-94fb-4ffa-babb-fca5b707fb8f-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.063716 4971 scope.go:117] "RemoveContainer" containerID="7cdecf9002a281a8d97c90cbc163f619be16412047c89726876d471270804577" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.149050 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.162770 4971 scope.go:117] "RemoveContainer" containerID="450dbd3c6ca0ae203fcc410da643589a8532de0ce0eb828a3c54ec8a875aab0d" Nov 27 07:15:52 crc kubenswrapper[4971]: E1127 07:15:52.171713 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"450dbd3c6ca0ae203fcc410da643589a8532de0ce0eb828a3c54ec8a875aab0d\": container with ID starting with 450dbd3c6ca0ae203fcc410da643589a8532de0ce0eb828a3c54ec8a875aab0d not found: ID does not exist" containerID="450dbd3c6ca0ae203fcc410da643589a8532de0ce0eb828a3c54ec8a875aab0d" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.171768 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"450dbd3c6ca0ae203fcc410da643589a8532de0ce0eb828a3c54ec8a875aab0d"} err="failed to get container status \"450dbd3c6ca0ae203fcc410da643589a8532de0ce0eb828a3c54ec8a875aab0d\": rpc error: code = NotFound desc = could not find container \"450dbd3c6ca0ae203fcc410da643589a8532de0ce0eb828a3c54ec8a875aab0d\": container with ID starting with 450dbd3c6ca0ae203fcc410da643589a8532de0ce0eb828a3c54ec8a875aab0d not found: ID does not exist" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.171801 4971 scope.go:117] "RemoveContainer" containerID="7cdecf9002a281a8d97c90cbc163f619be16412047c89726876d471270804577" Nov 27 07:15:52 crc kubenswrapper[4971]: E1127 07:15:52.175687 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7cdecf9002a281a8d97c90cbc163f619be16412047c89726876d471270804577\": container with ID starting with 7cdecf9002a281a8d97c90cbc163f619be16412047c89726876d471270804577 not found: ID does not exist" containerID="7cdecf9002a281a8d97c90cbc163f619be16412047c89726876d471270804577" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.175733 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7cdecf9002a281a8d97c90cbc163f619be16412047c89726876d471270804577"} err="failed to get container status \"7cdecf9002a281a8d97c90cbc163f619be16412047c89726876d471270804577\": rpc error: code = NotFound desc = could not find container \"7cdecf9002a281a8d97c90cbc163f619be16412047c89726876d471270804577\": container with ID starting with 7cdecf9002a281a8d97c90cbc163f619be16412047c89726876d471270804577 not found: ID does not exist" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.175762 4971 scope.go:117] "RemoveContainer" containerID="450dbd3c6ca0ae203fcc410da643589a8532de0ce0eb828a3c54ec8a875aab0d" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.176075 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"450dbd3c6ca0ae203fcc410da643589a8532de0ce0eb828a3c54ec8a875aab0d"} err="failed to get container status \"450dbd3c6ca0ae203fcc410da643589a8532de0ce0eb828a3c54ec8a875aab0d\": rpc error: code = NotFound desc = could not find container \"450dbd3c6ca0ae203fcc410da643589a8532de0ce0eb828a3c54ec8a875aab0d\": container with ID starting with 450dbd3c6ca0ae203fcc410da643589a8532de0ce0eb828a3c54ec8a875aab0d not found: ID does not exist" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.176097 4971 scope.go:117] "RemoveContainer" containerID="7cdecf9002a281a8d97c90cbc163f619be16412047c89726876d471270804577" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.176286 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7cdecf9002a281a8d97c90cbc163f619be16412047c89726876d471270804577"} err="failed to get container status \"7cdecf9002a281a8d97c90cbc163f619be16412047c89726876d471270804577\": rpc error: code = NotFound desc = could not find container \"7cdecf9002a281a8d97c90cbc163f619be16412047c89726876d471270804577\": container with ID starting with 7cdecf9002a281a8d97c90cbc163f619be16412047c89726876d471270804577 not found: ID does not exist" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.324660 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.398847 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.452879 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.467090 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Nov 27 07:15:52 crc kubenswrapper[4971]: E1127 07:15:52.467556 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35f86c0e-94fb-4ffa-babb-fca5b707fb8f" containerName="cinder-api-log" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.467567 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="35f86c0e-94fb-4ffa-babb-fca5b707fb8f" containerName="cinder-api-log" Nov 27 07:15:52 crc kubenswrapper[4971]: E1127 07:15:52.467578 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35f86c0e-94fb-4ffa-babb-fca5b707fb8f" containerName="cinder-api" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.467584 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="35f86c0e-94fb-4ffa-babb-fca5b707fb8f" containerName="cinder-api" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.467752 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="35f86c0e-94fb-4ffa-babb-fca5b707fb8f" containerName="cinder-api" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.467767 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="35f86c0e-94fb-4ffa-babb-fca5b707fb8f" containerName="cinder-api-log" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.468774 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.472030 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.472390 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.472573 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.481267 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.564030 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35f86c0e-94fb-4ffa-babb-fca5b707fb8f" path="/var/lib/kubelet/pods/35f86c0e-94fb-4ffa-babb-fca5b707fb8f/volumes" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.565871 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1ab1943-4c64-4083-8947-aa821a7298d2" path="/var/lib/kubelet/pods/b1ab1943-4c64-4083-8947-aa821a7298d2/volumes" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.577978 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-scripts\") pod \"cinder-api-0\" (UID: \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\") " pod="openstack/cinder-api-0" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.578030 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-config-data-custom\") pod \"cinder-api-0\" (UID: \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\") " pod="openstack/cinder-api-0" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.578097 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-public-tls-certs\") pod \"cinder-api-0\" (UID: \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\") " pod="openstack/cinder-api-0" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.578158 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-etc-machine-id\") pod \"cinder-api-0\" (UID: \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\") " pod="openstack/cinder-api-0" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.578187 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\") " pod="openstack/cinder-api-0" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.578243 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-config-data\") pod \"cinder-api-0\" (UID: \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\") " pod="openstack/cinder-api-0" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.578307 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qx8kk\" (UniqueName: \"kubernetes.io/projected/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-kube-api-access-qx8kk\") pod \"cinder-api-0\" (UID: \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\") " pod="openstack/cinder-api-0" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.578327 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\") " pod="openstack/cinder-api-0" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.578409 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-logs\") pod \"cinder-api-0\" (UID: \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\") " pod="openstack/cinder-api-0" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.679772 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qx8kk\" (UniqueName: \"kubernetes.io/projected/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-kube-api-access-qx8kk\") pod \"cinder-api-0\" (UID: \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\") " pod="openstack/cinder-api-0" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.679820 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\") " pod="openstack/cinder-api-0" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.679905 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-logs\") pod \"cinder-api-0\" (UID: \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\") " pod="openstack/cinder-api-0" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.679993 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-scripts\") pod \"cinder-api-0\" (UID: \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\") " pod="openstack/cinder-api-0" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.680011 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-etc-machine-id\") pod \"cinder-api-0\" (UID: \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\") " pod="openstack/cinder-api-0" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.680029 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-config-data-custom\") pod \"cinder-api-0\" (UID: \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\") " pod="openstack/cinder-api-0" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.680043 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-public-tls-certs\") pod \"cinder-api-0\" (UID: \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\") " pod="openstack/cinder-api-0" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.680068 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\") " pod="openstack/cinder-api-0" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.680102 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-config-data\") pod \"cinder-api-0\" (UID: \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\") " pod="openstack/cinder-api-0" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.682221 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-logs\") pod \"cinder-api-0\" (UID: \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\") " pod="openstack/cinder-api-0" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.684737 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-etc-machine-id\") pod \"cinder-api-0\" (UID: \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\") " pod="openstack/cinder-api-0" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.695480 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-public-tls-certs\") pod \"cinder-api-0\" (UID: \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\") " pod="openstack/cinder-api-0" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.695545 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\") " pod="openstack/cinder-api-0" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.700425 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-config-data\") pod \"cinder-api-0\" (UID: \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\") " pod="openstack/cinder-api-0" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.700996 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-config-data-custom\") pod \"cinder-api-0\" (UID: \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\") " pod="openstack/cinder-api-0" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.704400 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\") " pod="openstack/cinder-api-0" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.704543 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qx8kk\" (UniqueName: \"kubernetes.io/projected/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-kube-api-access-qx8kk\") pod \"cinder-api-0\" (UID: \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\") " pod="openstack/cinder-api-0" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.713105 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-scripts\") pod \"cinder-api-0\" (UID: \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\") " pod="openstack/cinder-api-0" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.798387 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.854201 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f8f75f555-mxcqw" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.883145 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5deb9f08-1de8-4f62-98cc-3e80232f17fc-ovsdbserver-nb\") pod \"5deb9f08-1de8-4f62-98cc-3e80232f17fc\" (UID: \"5deb9f08-1de8-4f62-98cc-3e80232f17fc\") " Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.883889 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5deb9f08-1de8-4f62-98cc-3e80232f17fc-dns-swift-storage-0\") pod \"5deb9f08-1de8-4f62-98cc-3e80232f17fc\" (UID: \"5deb9f08-1de8-4f62-98cc-3e80232f17fc\") " Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.883946 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7qzpz\" (UniqueName: \"kubernetes.io/projected/5deb9f08-1de8-4f62-98cc-3e80232f17fc-kube-api-access-7qzpz\") pod \"5deb9f08-1de8-4f62-98cc-3e80232f17fc\" (UID: \"5deb9f08-1de8-4f62-98cc-3e80232f17fc\") " Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.884045 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5deb9f08-1de8-4f62-98cc-3e80232f17fc-dns-svc\") pod \"5deb9f08-1de8-4f62-98cc-3e80232f17fc\" (UID: \"5deb9f08-1de8-4f62-98cc-3e80232f17fc\") " Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.884165 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5deb9f08-1de8-4f62-98cc-3e80232f17fc-config\") pod \"5deb9f08-1de8-4f62-98cc-3e80232f17fc\" (UID: \"5deb9f08-1de8-4f62-98cc-3e80232f17fc\") " Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.884245 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5deb9f08-1de8-4f62-98cc-3e80232f17fc-ovsdbserver-sb\") pod \"5deb9f08-1de8-4f62-98cc-3e80232f17fc\" (UID: \"5deb9f08-1de8-4f62-98cc-3e80232f17fc\") " Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.889451 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5deb9f08-1de8-4f62-98cc-3e80232f17fc-kube-api-access-7qzpz" (OuterVolumeSpecName: "kube-api-access-7qzpz") pod "5deb9f08-1de8-4f62-98cc-3e80232f17fc" (UID: "5deb9f08-1de8-4f62-98cc-3e80232f17fc"). InnerVolumeSpecName "kube-api-access-7qzpz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:15:52 crc kubenswrapper[4971]: I1127 07:15:52.987204 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7qzpz\" (UniqueName: \"kubernetes.io/projected/5deb9f08-1de8-4f62-98cc-3e80232f17fc-kube-api-access-7qzpz\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:53 crc kubenswrapper[4971]: I1127 07:15:53.033920 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9895c4665-wr5mf" event={"ID":"52ac96a5-4ef4-48ce-80d5-ca16e20de960","Type":"ContainerStarted","Data":"0ee65125860516ca5db7cb878fef64dec49ccb71ab10d8dca20aaf709802f06b"} Nov 27 07:15:53 crc kubenswrapper[4971]: I1127 07:15:53.035042 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-9895c4665-wr5mf" Nov 27 07:15:53 crc kubenswrapper[4971]: I1127 07:15:53.047997 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4a767b03-47f5-4cf2-b573-f30e3aa922b5","Type":"ContainerStarted","Data":"7f3bb04a37ee11808c4d59f2a925ac76e9a36af845b03328e0d04568125bc3e8"} Nov 27 07:15:53 crc kubenswrapper[4971]: I1127 07:15:53.052842 4971 generic.go:334] "Generic (PLEG): container finished" podID="5deb9f08-1de8-4f62-98cc-3e80232f17fc" containerID="81ea14a50102475c525ee547f9979162aa0063a316664e1bda1b771fb01c40a9" exitCode=0 Nov 27 07:15:53 crc kubenswrapper[4971]: I1127 07:15:53.053190 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f8f75f555-mxcqw" event={"ID":"5deb9f08-1de8-4f62-98cc-3e80232f17fc","Type":"ContainerDied","Data":"81ea14a50102475c525ee547f9979162aa0063a316664e1bda1b771fb01c40a9"} Nov 27 07:15:53 crc kubenswrapper[4971]: I1127 07:15:53.053246 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f8f75f555-mxcqw" event={"ID":"5deb9f08-1de8-4f62-98cc-3e80232f17fc","Type":"ContainerDied","Data":"e8d2ae1fe2167f63776769eb4b33e19a0188b768e31b9a67009731d24e63d58e"} Nov 27 07:15:53 crc kubenswrapper[4971]: I1127 07:15:53.053270 4971 scope.go:117] "RemoveContainer" containerID="81ea14a50102475c525ee547f9979162aa0063a316664e1bda1b771fb01c40a9" Nov 27 07:15:53 crc kubenswrapper[4971]: I1127 07:15:53.053441 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f8f75f555-mxcqw" Nov 27 07:15:53 crc kubenswrapper[4971]: I1127 07:15:53.056981 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5deb9f08-1de8-4f62-98cc-3e80232f17fc-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5deb9f08-1de8-4f62-98cc-3e80232f17fc" (UID: "5deb9f08-1de8-4f62-98cc-3e80232f17fc"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:15:53 crc kubenswrapper[4971]: I1127 07:15:53.085041 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5deb9f08-1de8-4f62-98cc-3e80232f17fc-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5deb9f08-1de8-4f62-98cc-3e80232f17fc" (UID: "5deb9f08-1de8-4f62-98cc-3e80232f17fc"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:15:53 crc kubenswrapper[4971]: I1127 07:15:53.088964 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5deb9f08-1de8-4f62-98cc-3e80232f17fc-config" (OuterVolumeSpecName: "config") pod "5deb9f08-1de8-4f62-98cc-3e80232f17fc" (UID: "5deb9f08-1de8-4f62-98cc-3e80232f17fc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:15:53 crc kubenswrapper[4971]: I1127 07:15:53.090633 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5deb9f08-1de8-4f62-98cc-3e80232f17fc-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:53 crc kubenswrapper[4971]: I1127 07:15:53.090659 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5deb9f08-1de8-4f62-98cc-3e80232f17fc-config\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:53 crc kubenswrapper[4971]: I1127 07:15:53.090668 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5deb9f08-1de8-4f62-98cc-3e80232f17fc-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:53 crc kubenswrapper[4971]: I1127 07:15:53.111127 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5deb9f08-1de8-4f62-98cc-3e80232f17fc-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5deb9f08-1de8-4f62-98cc-3e80232f17fc" (UID: "5deb9f08-1de8-4f62-98cc-3e80232f17fc"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:15:53 crc kubenswrapper[4971]: I1127 07:15:53.111311 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5deb9f08-1de8-4f62-98cc-3e80232f17fc-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "5deb9f08-1de8-4f62-98cc-3e80232f17fc" (UID: "5deb9f08-1de8-4f62-98cc-3e80232f17fc"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:15:53 crc kubenswrapper[4971]: I1127 07:15:53.192303 4971 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5deb9f08-1de8-4f62-98cc-3e80232f17fc-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:53 crc kubenswrapper[4971]: I1127 07:15:53.192350 4971 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5deb9f08-1de8-4f62-98cc-3e80232f17fc-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 27 07:15:53 crc kubenswrapper[4971]: I1127 07:15:53.334063 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-9895c4665-wr5mf" podStartSLOduration=3.334044602 podStartE2EDuration="3.334044602s" podCreationTimestamp="2025-11-27 07:15:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:15:53.062459972 +0000 UTC m=+1391.254503900" watchObservedRunningTime="2025-11-27 07:15:53.334044602 +0000 UTC m=+1391.526088520" Nov 27 07:15:53 crc kubenswrapper[4971]: I1127 07:15:53.347312 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Nov 27 07:15:53 crc kubenswrapper[4971]: I1127 07:15:53.395069 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f8f75f555-mxcqw"] Nov 27 07:15:53 crc kubenswrapper[4971]: I1127 07:15:53.404731 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7f8f75f555-mxcqw"] Nov 27 07:15:53 crc kubenswrapper[4971]: W1127 07:15:53.628069 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2988a7d8_1d6b_46d8_b204_8e02d0be3b4d.slice/crio-161d68366ec826553a8bafea685338e9cd1ccc17a39d021ac9e9e24f5ff8d859 WatchSource:0}: Error finding container 161d68366ec826553a8bafea685338e9cd1ccc17a39d021ac9e9e24f5ff8d859: Status 404 returned error can't find the container with id 161d68366ec826553a8bafea685338e9cd1ccc17a39d021ac9e9e24f5ff8d859 Nov 27 07:15:53 crc kubenswrapper[4971]: I1127 07:15:53.635754 4971 scope.go:117] "RemoveContainer" containerID="4c977a1427eb8b3559a566dcfb48d34f77dc023ce8853e1620d08e7ec0f36948" Nov 27 07:15:53 crc kubenswrapper[4971]: I1127 07:15:53.966511 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-85d46db856-nzmcq"] Nov 27 07:15:53 crc kubenswrapper[4971]: E1127 07:15:53.967030 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5deb9f08-1de8-4f62-98cc-3e80232f17fc" containerName="init" Nov 27 07:15:53 crc kubenswrapper[4971]: I1127 07:15:53.967052 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="5deb9f08-1de8-4f62-98cc-3e80232f17fc" containerName="init" Nov 27 07:15:53 crc kubenswrapper[4971]: E1127 07:15:53.967080 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5deb9f08-1de8-4f62-98cc-3e80232f17fc" containerName="dnsmasq-dns" Nov 27 07:15:53 crc kubenswrapper[4971]: I1127 07:15:53.967086 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="5deb9f08-1de8-4f62-98cc-3e80232f17fc" containerName="dnsmasq-dns" Nov 27 07:15:53 crc kubenswrapper[4971]: I1127 07:15:53.967278 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="5deb9f08-1de8-4f62-98cc-3e80232f17fc" containerName="dnsmasq-dns" Nov 27 07:15:53 crc kubenswrapper[4971]: I1127 07:15:53.968371 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-85d46db856-nzmcq" Nov 27 07:15:53 crc kubenswrapper[4971]: I1127 07:15:53.974963 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Nov 27 07:15:53 crc kubenswrapper[4971]: I1127 07:15:53.975774 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Nov 27 07:15:53 crc kubenswrapper[4971]: I1127 07:15:53.992595 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-85d46db856-nzmcq"] Nov 27 07:15:54 crc kubenswrapper[4971]: I1127 07:15:54.007780 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc889790-089f-4007-876f-874880dad975-logs\") pod \"barbican-api-85d46db856-nzmcq\" (UID: \"fc889790-089f-4007-876f-874880dad975\") " pod="openstack/barbican-api-85d46db856-nzmcq" Nov 27 07:15:54 crc kubenswrapper[4971]: I1127 07:15:54.007866 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc889790-089f-4007-876f-874880dad975-config-data\") pod \"barbican-api-85d46db856-nzmcq\" (UID: \"fc889790-089f-4007-876f-874880dad975\") " pod="openstack/barbican-api-85d46db856-nzmcq" Nov 27 07:15:54 crc kubenswrapper[4971]: I1127 07:15:54.007888 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fc889790-089f-4007-876f-874880dad975-config-data-custom\") pod \"barbican-api-85d46db856-nzmcq\" (UID: \"fc889790-089f-4007-876f-874880dad975\") " pod="openstack/barbican-api-85d46db856-nzmcq" Nov 27 07:15:54 crc kubenswrapper[4971]: I1127 07:15:54.007932 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc889790-089f-4007-876f-874880dad975-public-tls-certs\") pod \"barbican-api-85d46db856-nzmcq\" (UID: \"fc889790-089f-4007-876f-874880dad975\") " pod="openstack/barbican-api-85d46db856-nzmcq" Nov 27 07:15:54 crc kubenswrapper[4971]: I1127 07:15:54.007961 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rqvtl\" (UniqueName: \"kubernetes.io/projected/fc889790-089f-4007-876f-874880dad975-kube-api-access-rqvtl\") pod \"barbican-api-85d46db856-nzmcq\" (UID: \"fc889790-089f-4007-876f-874880dad975\") " pod="openstack/barbican-api-85d46db856-nzmcq" Nov 27 07:15:54 crc kubenswrapper[4971]: I1127 07:15:54.007989 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc889790-089f-4007-876f-874880dad975-internal-tls-certs\") pod \"barbican-api-85d46db856-nzmcq\" (UID: \"fc889790-089f-4007-876f-874880dad975\") " pod="openstack/barbican-api-85d46db856-nzmcq" Nov 27 07:15:54 crc kubenswrapper[4971]: I1127 07:15:54.008014 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc889790-089f-4007-876f-874880dad975-combined-ca-bundle\") pod \"barbican-api-85d46db856-nzmcq\" (UID: \"fc889790-089f-4007-876f-874880dad975\") " pod="openstack/barbican-api-85d46db856-nzmcq" Nov 27 07:15:54 crc kubenswrapper[4971]: I1127 07:15:54.079435 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d","Type":"ContainerStarted","Data":"161d68366ec826553a8bafea685338e9cd1ccc17a39d021ac9e9e24f5ff8d859"} Nov 27 07:15:54 crc kubenswrapper[4971]: I1127 07:15:54.081708 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4a767b03-47f5-4cf2-b573-f30e3aa922b5","Type":"ContainerStarted","Data":"e33500b78ddcfba0deb8dea898fe276e9aa9b11dc5cc25915d38d7c59aa1dff0"} Nov 27 07:15:54 crc kubenswrapper[4971]: I1127 07:15:54.109620 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc889790-089f-4007-876f-874880dad975-combined-ca-bundle\") pod \"barbican-api-85d46db856-nzmcq\" (UID: \"fc889790-089f-4007-876f-874880dad975\") " pod="openstack/barbican-api-85d46db856-nzmcq" Nov 27 07:15:54 crc kubenswrapper[4971]: I1127 07:15:54.109919 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc889790-089f-4007-876f-874880dad975-logs\") pod \"barbican-api-85d46db856-nzmcq\" (UID: \"fc889790-089f-4007-876f-874880dad975\") " pod="openstack/barbican-api-85d46db856-nzmcq" Nov 27 07:15:54 crc kubenswrapper[4971]: I1127 07:15:54.110038 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc889790-089f-4007-876f-874880dad975-config-data\") pod \"barbican-api-85d46db856-nzmcq\" (UID: \"fc889790-089f-4007-876f-874880dad975\") " pod="openstack/barbican-api-85d46db856-nzmcq" Nov 27 07:15:54 crc kubenswrapper[4971]: I1127 07:15:54.110131 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fc889790-089f-4007-876f-874880dad975-config-data-custom\") pod \"barbican-api-85d46db856-nzmcq\" (UID: \"fc889790-089f-4007-876f-874880dad975\") " pod="openstack/barbican-api-85d46db856-nzmcq" Nov 27 07:15:54 crc kubenswrapper[4971]: I1127 07:15:54.110225 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc889790-089f-4007-876f-874880dad975-public-tls-certs\") pod \"barbican-api-85d46db856-nzmcq\" (UID: \"fc889790-089f-4007-876f-874880dad975\") " pod="openstack/barbican-api-85d46db856-nzmcq" Nov 27 07:15:54 crc kubenswrapper[4971]: I1127 07:15:54.110305 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rqvtl\" (UniqueName: \"kubernetes.io/projected/fc889790-089f-4007-876f-874880dad975-kube-api-access-rqvtl\") pod \"barbican-api-85d46db856-nzmcq\" (UID: \"fc889790-089f-4007-876f-874880dad975\") " pod="openstack/barbican-api-85d46db856-nzmcq" Nov 27 07:15:54 crc kubenswrapper[4971]: I1127 07:15:54.110391 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc889790-089f-4007-876f-874880dad975-internal-tls-certs\") pod \"barbican-api-85d46db856-nzmcq\" (UID: \"fc889790-089f-4007-876f-874880dad975\") " pod="openstack/barbican-api-85d46db856-nzmcq" Nov 27 07:15:54 crc kubenswrapper[4971]: I1127 07:15:54.113074 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc889790-089f-4007-876f-874880dad975-logs\") pod \"barbican-api-85d46db856-nzmcq\" (UID: \"fc889790-089f-4007-876f-874880dad975\") " pod="openstack/barbican-api-85d46db856-nzmcq" Nov 27 07:15:54 crc kubenswrapper[4971]: I1127 07:15:54.117564 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fc889790-089f-4007-876f-874880dad975-config-data-custom\") pod \"barbican-api-85d46db856-nzmcq\" (UID: \"fc889790-089f-4007-876f-874880dad975\") " pod="openstack/barbican-api-85d46db856-nzmcq" Nov 27 07:15:54 crc kubenswrapper[4971]: I1127 07:15:54.117867 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc889790-089f-4007-876f-874880dad975-public-tls-certs\") pod \"barbican-api-85d46db856-nzmcq\" (UID: \"fc889790-089f-4007-876f-874880dad975\") " pod="openstack/barbican-api-85d46db856-nzmcq" Nov 27 07:15:54 crc kubenswrapper[4971]: I1127 07:15:54.118193 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc889790-089f-4007-876f-874880dad975-combined-ca-bundle\") pod \"barbican-api-85d46db856-nzmcq\" (UID: \"fc889790-089f-4007-876f-874880dad975\") " pod="openstack/barbican-api-85d46db856-nzmcq" Nov 27 07:15:54 crc kubenswrapper[4971]: I1127 07:15:54.124474 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc889790-089f-4007-876f-874880dad975-internal-tls-certs\") pod \"barbican-api-85d46db856-nzmcq\" (UID: \"fc889790-089f-4007-876f-874880dad975\") " pod="openstack/barbican-api-85d46db856-nzmcq" Nov 27 07:15:54 crc kubenswrapper[4971]: I1127 07:15:54.125368 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc889790-089f-4007-876f-874880dad975-config-data\") pod \"barbican-api-85d46db856-nzmcq\" (UID: \"fc889790-089f-4007-876f-874880dad975\") " pod="openstack/barbican-api-85d46db856-nzmcq" Nov 27 07:15:54 crc kubenswrapper[4971]: I1127 07:15:54.151045 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rqvtl\" (UniqueName: \"kubernetes.io/projected/fc889790-089f-4007-876f-874880dad975-kube-api-access-rqvtl\") pod \"barbican-api-85d46db856-nzmcq\" (UID: \"fc889790-089f-4007-876f-874880dad975\") " pod="openstack/barbican-api-85d46db856-nzmcq" Nov 27 07:15:54 crc kubenswrapper[4971]: I1127 07:15:54.160340 4971 scope.go:117] "RemoveContainer" containerID="81ea14a50102475c525ee547f9979162aa0063a316664e1bda1b771fb01c40a9" Nov 27 07:15:54 crc kubenswrapper[4971]: E1127 07:15:54.160863 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"81ea14a50102475c525ee547f9979162aa0063a316664e1bda1b771fb01c40a9\": container with ID starting with 81ea14a50102475c525ee547f9979162aa0063a316664e1bda1b771fb01c40a9 not found: ID does not exist" containerID="81ea14a50102475c525ee547f9979162aa0063a316664e1bda1b771fb01c40a9" Nov 27 07:15:54 crc kubenswrapper[4971]: I1127 07:15:54.160895 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81ea14a50102475c525ee547f9979162aa0063a316664e1bda1b771fb01c40a9"} err="failed to get container status \"81ea14a50102475c525ee547f9979162aa0063a316664e1bda1b771fb01c40a9\": rpc error: code = NotFound desc = could not find container \"81ea14a50102475c525ee547f9979162aa0063a316664e1bda1b771fb01c40a9\": container with ID starting with 81ea14a50102475c525ee547f9979162aa0063a316664e1bda1b771fb01c40a9 not found: ID does not exist" Nov 27 07:15:54 crc kubenswrapper[4971]: I1127 07:15:54.160918 4971 scope.go:117] "RemoveContainer" containerID="4c977a1427eb8b3559a566dcfb48d34f77dc023ce8853e1620d08e7ec0f36948" Nov 27 07:15:54 crc kubenswrapper[4971]: E1127 07:15:54.161266 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c977a1427eb8b3559a566dcfb48d34f77dc023ce8853e1620d08e7ec0f36948\": container with ID starting with 4c977a1427eb8b3559a566dcfb48d34f77dc023ce8853e1620d08e7ec0f36948 not found: ID does not exist" containerID="4c977a1427eb8b3559a566dcfb48d34f77dc023ce8853e1620d08e7ec0f36948" Nov 27 07:15:54 crc kubenswrapper[4971]: I1127 07:15:54.161360 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c977a1427eb8b3559a566dcfb48d34f77dc023ce8853e1620d08e7ec0f36948"} err="failed to get container status \"4c977a1427eb8b3559a566dcfb48d34f77dc023ce8853e1620d08e7ec0f36948\": rpc error: code = NotFound desc = could not find container \"4c977a1427eb8b3559a566dcfb48d34f77dc023ce8853e1620d08e7ec0f36948\": container with ID starting with 4c977a1427eb8b3559a566dcfb48d34f77dc023ce8853e1620d08e7ec0f36948 not found: ID does not exist" Nov 27 07:15:54 crc kubenswrapper[4971]: I1127 07:15:54.296987 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-85d46db856-nzmcq" Nov 27 07:15:54 crc kubenswrapper[4971]: I1127 07:15:54.574051 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5deb9f08-1de8-4f62-98cc-3e80232f17fc" path="/var/lib/kubelet/pods/5deb9f08-1de8-4f62-98cc-3e80232f17fc/volumes" Nov 27 07:15:54 crc kubenswrapper[4971]: I1127 07:15:54.981335 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-85d46db856-nzmcq"] Nov 27 07:15:55 crc kubenswrapper[4971]: I1127 07:15:55.094109 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5b7b448d48-2wggc" event={"ID":"6e067498-7bc0-4bc5-a9a6-696c8aa3cf71","Type":"ContainerStarted","Data":"cce9aeab95941685da7fd97b4408ffcb6ac7f7c473cf5bbf43edec5fe397efd6"} Nov 27 07:15:55 crc kubenswrapper[4971]: W1127 07:15:55.374710 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfc889790_089f_4007_876f_874880dad975.slice/crio-0ba695ab2b4cada687607e751c739c6465567ff29b3737781325a36e76b0ada3 WatchSource:0}: Error finding container 0ba695ab2b4cada687607e751c739c6465567ff29b3737781325a36e76b0ada3: Status 404 returned error can't find the container with id 0ba695ab2b4cada687607e751c739c6465567ff29b3737781325a36e76b0ada3 Nov 27 07:15:56 crc kubenswrapper[4971]: I1127 07:15:56.107787 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5b7b448d48-2wggc" event={"ID":"6e067498-7bc0-4bc5-a9a6-696c8aa3cf71","Type":"ContainerStarted","Data":"0d9471b432378d52a3ce483c444b3f5beceab405dd8b991eb4ecb32b754851b8"} Nov 27 07:15:56 crc kubenswrapper[4971]: I1127 07:15:56.114881 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4a767b03-47f5-4cf2-b573-f30e3aa922b5","Type":"ContainerStarted","Data":"90758f96d363ce7f90865abbaa6ed510ec7f2279d57b85d2451ba4860bd5e8dd"} Nov 27 07:15:56 crc kubenswrapper[4971]: I1127 07:15:56.116466 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7865dbd7d9-zp55h" event={"ID":"b3616559-d640-4b3b-a4b3-b9d9af1d0061","Type":"ContainerStarted","Data":"432905efdade8907249e6259f8519c70b607016106a34709d73f373c91efe0a5"} Nov 27 07:15:56 crc kubenswrapper[4971]: I1127 07:15:56.116506 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7865dbd7d9-zp55h" event={"ID":"b3616559-d640-4b3b-a4b3-b9d9af1d0061","Type":"ContainerStarted","Data":"85ed74a43c71f2fd0dc70f5ef0da68149334b289a557cdd8a6923696f588e66b"} Nov 27 07:15:56 crc kubenswrapper[4971]: I1127 07:15:56.127863 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d","Type":"ContainerStarted","Data":"9cbb1e297d7eff42db8e89fa629eec8f4c7fb3f3405eaec525774254aea00153"} Nov 27 07:15:56 crc kubenswrapper[4971]: I1127 07:15:56.143336 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-5b7b448d48-2wggc" podStartSLOduration=3.103461498 podStartE2EDuration="6.143317168s" podCreationTimestamp="2025-11-27 07:15:50 +0000 UTC" firstStartedPulling="2025-11-27 07:15:51.123769837 +0000 UTC m=+1389.315813755" lastFinishedPulling="2025-11-27 07:15:54.163625507 +0000 UTC m=+1392.355669425" observedRunningTime="2025-11-27 07:15:56.141115429 +0000 UTC m=+1394.333159347" watchObservedRunningTime="2025-11-27 07:15:56.143317168 +0000 UTC m=+1394.335361106" Nov 27 07:15:56 crc kubenswrapper[4971]: I1127 07:15:56.148506 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-85d46db856-nzmcq" event={"ID":"fc889790-089f-4007-876f-874880dad975","Type":"ContainerStarted","Data":"5d760ae55be3a486a97462d9bd8576d1f7879d82b2f6d6318d4076d0797b7097"} Nov 27 07:15:56 crc kubenswrapper[4971]: I1127 07:15:56.148584 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-85d46db856-nzmcq" event={"ID":"fc889790-089f-4007-876f-874880dad975","Type":"ContainerStarted","Data":"213b456ae37e6e54ce799e71136d0150d71e3784b58ba6e15c73b1242adc249b"} Nov 27 07:15:56 crc kubenswrapper[4971]: I1127 07:15:56.148597 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-85d46db856-nzmcq" event={"ID":"fc889790-089f-4007-876f-874880dad975","Type":"ContainerStarted","Data":"0ba695ab2b4cada687607e751c739c6465567ff29b3737781325a36e76b0ada3"} Nov 27 07:15:56 crc kubenswrapper[4971]: I1127 07:15:56.148842 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-85d46db856-nzmcq" Nov 27 07:15:56 crc kubenswrapper[4971]: I1127 07:15:56.149017 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-85d46db856-nzmcq" Nov 27 07:15:56 crc kubenswrapper[4971]: I1127 07:15:56.181193 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-7865dbd7d9-zp55h" podStartSLOduration=2.8446010900000003 podStartE2EDuration="7.181171299s" podCreationTimestamp="2025-11-27 07:15:49 +0000 UTC" firstStartedPulling="2025-11-27 07:15:51.038716374 +0000 UTC m=+1389.230760292" lastFinishedPulling="2025-11-27 07:15:55.375286583 +0000 UTC m=+1393.567330501" observedRunningTime="2025-11-27 07:15:56.167989323 +0000 UTC m=+1394.360033251" watchObservedRunningTime="2025-11-27 07:15:56.181171299 +0000 UTC m=+1394.373215217" Nov 27 07:15:56 crc kubenswrapper[4971]: I1127 07:15:56.207069 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-85d46db856-nzmcq" podStartSLOduration=3.207054477 podStartE2EDuration="3.207054477s" podCreationTimestamp="2025-11-27 07:15:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:15:56.205935876 +0000 UTC m=+1394.397979794" watchObservedRunningTime="2025-11-27 07:15:56.207054477 +0000 UTC m=+1394.399098395" Nov 27 07:15:57 crc kubenswrapper[4971]: I1127 07:15:57.157737 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d","Type":"ContainerStarted","Data":"32b6c0f43018436689e6595a7fbba50fff0b5d5d5f4ec9b1c259ec629b4ab6bc"} Nov 27 07:15:57 crc kubenswrapper[4971]: I1127 07:15:57.158481 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Nov 27 07:15:57 crc kubenswrapper[4971]: I1127 07:15:57.161080 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4a767b03-47f5-4cf2-b573-f30e3aa922b5","Type":"ContainerStarted","Data":"4826b19de310d9c87ffd3ed8e71d8044651260f608a892b79b4f24eecd62924a"} Nov 27 07:15:57 crc kubenswrapper[4971]: I1127 07:15:57.206697 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=5.206679244 podStartE2EDuration="5.206679244s" podCreationTimestamp="2025-11-27 07:15:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:15:57.196326085 +0000 UTC m=+1395.388370003" watchObservedRunningTime="2025-11-27 07:15:57.206679244 +0000 UTC m=+1395.398723162" Nov 27 07:15:57 crc kubenswrapper[4971]: I1127 07:15:57.655734 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Nov 27 07:15:57 crc kubenswrapper[4971]: I1127 07:15:57.699293 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 27 07:15:57 crc kubenswrapper[4971]: I1127 07:15:57.996968 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-797b967b5b-7wfhc" Nov 27 07:15:58 crc kubenswrapper[4971]: I1127 07:15:58.171847 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4a767b03-47f5-4cf2-b573-f30e3aa922b5","Type":"ContainerStarted","Data":"8401472fdb05ee764b52e1e4288e43d7d114c36a189f7cff80d4d75bbc639485"} Nov 27 07:15:58 crc kubenswrapper[4971]: I1127 07:15:58.172003 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="ffb1ef88-bf93-4f0c-a64a-952aeef1269d" containerName="cinder-scheduler" containerID="cri-o://b095cff7a1a753b4b2004211a2c617ec125f18ff9ea89f9ba9efac82419fff32" gracePeriod=30 Nov 27 07:15:58 crc kubenswrapper[4971]: I1127 07:15:58.172345 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="ffb1ef88-bf93-4f0c-a64a-952aeef1269d" containerName="probe" containerID="cri-o://ccd8989dbcf588a3bebd671e2c0ee20b33142581e8c5b4915db000e4b0c43193" gracePeriod=30 Nov 27 07:15:58 crc kubenswrapper[4971]: I1127 07:15:58.172669 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 27 07:15:58 crc kubenswrapper[4971]: I1127 07:15:58.211989 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.5553253900000001 podStartE2EDuration="7.211969026s" podCreationTimestamp="2025-11-27 07:15:51 +0000 UTC" firstStartedPulling="2025-11-27 07:15:52.162695675 +0000 UTC m=+1390.354739593" lastFinishedPulling="2025-11-27 07:15:57.819339311 +0000 UTC m=+1396.011383229" observedRunningTime="2025-11-27 07:15:58.203800046 +0000 UTC m=+1396.395843964" watchObservedRunningTime="2025-11-27 07:15:58.211969026 +0000 UTC m=+1396.404012954" Nov 27 07:15:59 crc kubenswrapper[4971]: I1127 07:15:59.184284 4971 generic.go:334] "Generic (PLEG): container finished" podID="ffb1ef88-bf93-4f0c-a64a-952aeef1269d" containerID="ccd8989dbcf588a3bebd671e2c0ee20b33142581e8c5b4915db000e4b0c43193" exitCode=0 Nov 27 07:15:59 crc kubenswrapper[4971]: I1127 07:15:59.185448 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ffb1ef88-bf93-4f0c-a64a-952aeef1269d","Type":"ContainerDied","Data":"ccd8989dbcf588a3bebd671e2c0ee20b33142581e8c5b4915db000e4b0c43193"} Nov 27 07:16:00 crc kubenswrapper[4971]: I1127 07:16:00.669858 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-7b8cfffdbf-92cj6" Nov 27 07:16:00 crc kubenswrapper[4971]: I1127 07:16:00.773675 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-797b967b5b-7wfhc"] Nov 27 07:16:00 crc kubenswrapper[4971]: I1127 07:16:00.774559 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-797b967b5b-7wfhc" podUID="cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d" containerName="neutron-httpd" containerID="cri-o://619b29157e835d81ce9cbe2e7c2420eae8c918ffa9f80d969ea45c6be45769d8" gracePeriod=30 Nov 27 07:16:00 crc kubenswrapper[4971]: I1127 07:16:00.774016 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-797b967b5b-7wfhc" podUID="cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d" containerName="neutron-api" containerID="cri-o://4709702de716b12d745d76424c14b3427d0845081a7da6c4031024fe395ac4ef" gracePeriod=30 Nov 27 07:16:00 crc kubenswrapper[4971]: I1127 07:16:00.805998 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-9895c4665-wr5mf" Nov 27 07:16:00 crc kubenswrapper[4971]: I1127 07:16:00.910656 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-ccf55cbcc-jsxv8"] Nov 27 07:16:00 crc kubenswrapper[4971]: I1127 07:16:00.910982 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-ccf55cbcc-jsxv8" podUID="4794dbe4-7057-46d2-90d6-11a2bda30e6a" containerName="dnsmasq-dns" containerID="cri-o://9277a9bdb5129fc609fd9f09229bde9c450696e154ed58abc17c9bd971e52f93" gracePeriod=10 Nov 27 07:16:01 crc kubenswrapper[4971]: I1127 07:16:01.234707 4971 generic.go:334] "Generic (PLEG): container finished" podID="4794dbe4-7057-46d2-90d6-11a2bda30e6a" containerID="9277a9bdb5129fc609fd9f09229bde9c450696e154ed58abc17c9bd971e52f93" exitCode=0 Nov 27 07:16:01 crc kubenswrapper[4971]: I1127 07:16:01.234751 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-ccf55cbcc-jsxv8" event={"ID":"4794dbe4-7057-46d2-90d6-11a2bda30e6a","Type":"ContainerDied","Data":"9277a9bdb5129fc609fd9f09229bde9c450696e154ed58abc17c9bd971e52f93"} Nov 27 07:16:02 crc kubenswrapper[4971]: I1127 07:16:02.097372 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-ccf55cbcc-jsxv8" Nov 27 07:16:02 crc kubenswrapper[4971]: I1127 07:16:02.607363 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-65cf5bcbb-zf65t" Nov 27 07:16:02 crc kubenswrapper[4971]: I1127 07:16:02.607821 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4794dbe4-7057-46d2-90d6-11a2bda30e6a-dns-svc\") pod \"4794dbe4-7057-46d2-90d6-11a2bda30e6a\" (UID: \"4794dbe4-7057-46d2-90d6-11a2bda30e6a\") " Nov 27 07:16:02 crc kubenswrapper[4971]: I1127 07:16:02.607941 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4794dbe4-7057-46d2-90d6-11a2bda30e6a-ovsdbserver-nb\") pod \"4794dbe4-7057-46d2-90d6-11a2bda30e6a\" (UID: \"4794dbe4-7057-46d2-90d6-11a2bda30e6a\") " Nov 27 07:16:02 crc kubenswrapper[4971]: I1127 07:16:02.607974 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rxpjz\" (UniqueName: \"kubernetes.io/projected/4794dbe4-7057-46d2-90d6-11a2bda30e6a-kube-api-access-rxpjz\") pod \"4794dbe4-7057-46d2-90d6-11a2bda30e6a\" (UID: \"4794dbe4-7057-46d2-90d6-11a2bda30e6a\") " Nov 27 07:16:02 crc kubenswrapper[4971]: I1127 07:16:02.608017 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4794dbe4-7057-46d2-90d6-11a2bda30e6a-ovsdbserver-sb\") pod \"4794dbe4-7057-46d2-90d6-11a2bda30e6a\" (UID: \"4794dbe4-7057-46d2-90d6-11a2bda30e6a\") " Nov 27 07:16:02 crc kubenswrapper[4971]: I1127 07:16:02.608042 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4794dbe4-7057-46d2-90d6-11a2bda30e6a-dns-swift-storage-0\") pod \"4794dbe4-7057-46d2-90d6-11a2bda30e6a\" (UID: \"4794dbe4-7057-46d2-90d6-11a2bda30e6a\") " Nov 27 07:16:02 crc kubenswrapper[4971]: I1127 07:16:02.608059 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4794dbe4-7057-46d2-90d6-11a2bda30e6a-config\") pod \"4794dbe4-7057-46d2-90d6-11a2bda30e6a\" (UID: \"4794dbe4-7057-46d2-90d6-11a2bda30e6a\") " Nov 27 07:16:02 crc kubenswrapper[4971]: I1127 07:16:02.608365 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-ccf55cbcc-jsxv8" event={"ID":"4794dbe4-7057-46d2-90d6-11a2bda30e6a","Type":"ContainerDied","Data":"f7c89d0b923349e7eed8e259371421a1224afe56de834fdd14e939fa06daa4a8"} Nov 27 07:16:02 crc kubenswrapper[4971]: I1127 07:16:02.608470 4971 scope.go:117] "RemoveContainer" containerID="9277a9bdb5129fc609fd9f09229bde9c450696e154ed58abc17c9bd971e52f93" Nov 27 07:16:02 crc kubenswrapper[4971]: I1127 07:16:02.608698 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-ccf55cbcc-jsxv8" Nov 27 07:16:02 crc kubenswrapper[4971]: I1127 07:16:02.623104 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4794dbe4-7057-46d2-90d6-11a2bda30e6a-kube-api-access-rxpjz" (OuterVolumeSpecName: "kube-api-access-rxpjz") pod "4794dbe4-7057-46d2-90d6-11a2bda30e6a" (UID: "4794dbe4-7057-46d2-90d6-11a2bda30e6a"). InnerVolumeSpecName "kube-api-access-rxpjz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:16:02 crc kubenswrapper[4971]: I1127 07:16:02.652664 4971 generic.go:334] "Generic (PLEG): container finished" podID="cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d" containerID="619b29157e835d81ce9cbe2e7c2420eae8c918ffa9f80d969ea45c6be45769d8" exitCode=0 Nov 27 07:16:02 crc kubenswrapper[4971]: I1127 07:16:02.654279 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-797b967b5b-7wfhc" event={"ID":"cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d","Type":"ContainerDied","Data":"619b29157e835d81ce9cbe2e7c2420eae8c918ffa9f80d969ea45c6be45769d8"} Nov 27 07:16:02 crc kubenswrapper[4971]: I1127 07:16:02.679239 4971 generic.go:334] "Generic (PLEG): container finished" podID="ffb1ef88-bf93-4f0c-a64a-952aeef1269d" containerID="b095cff7a1a753b4b2004211a2c617ec125f18ff9ea89f9ba9efac82419fff32" exitCode=0 Nov 27 07:16:02 crc kubenswrapper[4971]: I1127 07:16:02.679282 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ffb1ef88-bf93-4f0c-a64a-952aeef1269d","Type":"ContainerDied","Data":"b095cff7a1a753b4b2004211a2c617ec125f18ff9ea89f9ba9efac82419fff32"} Nov 27 07:16:02 crc kubenswrapper[4971]: I1127 07:16:02.711614 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rxpjz\" (UniqueName: \"kubernetes.io/projected/4794dbe4-7057-46d2-90d6-11a2bda30e6a-kube-api-access-rxpjz\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:02 crc kubenswrapper[4971]: I1127 07:16:02.770802 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4794dbe4-7057-46d2-90d6-11a2bda30e6a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4794dbe4-7057-46d2-90d6-11a2bda30e6a" (UID: "4794dbe4-7057-46d2-90d6-11a2bda30e6a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:16:02 crc kubenswrapper[4971]: I1127 07:16:02.770960 4971 scope.go:117] "RemoveContainer" containerID="bebd79ccf252c4009ab7d34445f34c6b773dd2e45cf68ac93647621c0b23d9b5" Nov 27 07:16:02 crc kubenswrapper[4971]: I1127 07:16:02.821196 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4794dbe4-7057-46d2-90d6-11a2bda30e6a-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "4794dbe4-7057-46d2-90d6-11a2bda30e6a" (UID: "4794dbe4-7057-46d2-90d6-11a2bda30e6a"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:16:02 crc kubenswrapper[4971]: I1127 07:16:02.823467 4971 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4794dbe4-7057-46d2-90d6-11a2bda30e6a-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:02 crc kubenswrapper[4971]: I1127 07:16:02.823499 4971 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4794dbe4-7057-46d2-90d6-11a2bda30e6a-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:02 crc kubenswrapper[4971]: I1127 07:16:02.829252 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4794dbe4-7057-46d2-90d6-11a2bda30e6a-config" (OuterVolumeSpecName: "config") pod "4794dbe4-7057-46d2-90d6-11a2bda30e6a" (UID: "4794dbe4-7057-46d2-90d6-11a2bda30e6a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:16:02 crc kubenswrapper[4971]: I1127 07:16:02.830906 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4794dbe4-7057-46d2-90d6-11a2bda30e6a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "4794dbe4-7057-46d2-90d6-11a2bda30e6a" (UID: "4794dbe4-7057-46d2-90d6-11a2bda30e6a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:16:02 crc kubenswrapper[4971]: I1127 07:16:02.890172 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4794dbe4-7057-46d2-90d6-11a2bda30e6a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "4794dbe4-7057-46d2-90d6-11a2bda30e6a" (UID: "4794dbe4-7057-46d2-90d6-11a2bda30e6a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:16:02 crc kubenswrapper[4971]: I1127 07:16:02.929214 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4794dbe4-7057-46d2-90d6-11a2bda30e6a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:02 crc kubenswrapper[4971]: I1127 07:16:02.929243 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4794dbe4-7057-46d2-90d6-11a2bda30e6a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:02 crc kubenswrapper[4971]: I1127 07:16:02.929254 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4794dbe4-7057-46d2-90d6-11a2bda30e6a-config\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:02 crc kubenswrapper[4971]: I1127 07:16:02.961238 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-ccf55cbcc-jsxv8"] Nov 27 07:16:02 crc kubenswrapper[4971]: I1127 07:16:02.980504 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-65cf5bcbb-zf65t" Nov 27 07:16:02 crc kubenswrapper[4971]: I1127 07:16:02.980759 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7b57df6474-qsrcc" Nov 27 07:16:02 crc kubenswrapper[4971]: I1127 07:16:02.992417 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-ccf55cbcc-jsxv8"] Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.255305 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.376956 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ffb1ef88-bf93-4f0c-a64a-952aeef1269d-etc-machine-id\") pod \"ffb1ef88-bf93-4f0c-a64a-952aeef1269d\" (UID: \"ffb1ef88-bf93-4f0c-a64a-952aeef1269d\") " Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.377416 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffb1ef88-bf93-4f0c-a64a-952aeef1269d-config-data\") pod \"ffb1ef88-bf93-4f0c-a64a-952aeef1269d\" (UID: \"ffb1ef88-bf93-4f0c-a64a-952aeef1269d\") " Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.377507 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffb1ef88-bf93-4f0c-a64a-952aeef1269d-combined-ca-bundle\") pod \"ffb1ef88-bf93-4f0c-a64a-952aeef1269d\" (UID: \"ffb1ef88-bf93-4f0c-a64a-952aeef1269d\") " Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.377555 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ffb1ef88-bf93-4f0c-a64a-952aeef1269d-config-data-custom\") pod \"ffb1ef88-bf93-4f0c-a64a-952aeef1269d\" (UID: \"ffb1ef88-bf93-4f0c-a64a-952aeef1269d\") " Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.377655 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vrpzb\" (UniqueName: \"kubernetes.io/projected/ffb1ef88-bf93-4f0c-a64a-952aeef1269d-kube-api-access-vrpzb\") pod \"ffb1ef88-bf93-4f0c-a64a-952aeef1269d\" (UID: \"ffb1ef88-bf93-4f0c-a64a-952aeef1269d\") " Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.377693 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ffb1ef88-bf93-4f0c-a64a-952aeef1269d-scripts\") pod \"ffb1ef88-bf93-4f0c-a64a-952aeef1269d\" (UID: \"ffb1ef88-bf93-4f0c-a64a-952aeef1269d\") " Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.379395 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ffb1ef88-bf93-4f0c-a64a-952aeef1269d-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "ffb1ef88-bf93-4f0c-a64a-952aeef1269d" (UID: "ffb1ef88-bf93-4f0c-a64a-952aeef1269d"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.404790 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffb1ef88-bf93-4f0c-a64a-952aeef1269d-scripts" (OuterVolumeSpecName: "scripts") pod "ffb1ef88-bf93-4f0c-a64a-952aeef1269d" (UID: "ffb1ef88-bf93-4f0c-a64a-952aeef1269d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.405169 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffb1ef88-bf93-4f0c-a64a-952aeef1269d-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "ffb1ef88-bf93-4f0c-a64a-952aeef1269d" (UID: "ffb1ef88-bf93-4f0c-a64a-952aeef1269d"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.405416 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ffb1ef88-bf93-4f0c-a64a-952aeef1269d-kube-api-access-vrpzb" (OuterVolumeSpecName: "kube-api-access-vrpzb") pod "ffb1ef88-bf93-4f0c-a64a-952aeef1269d" (UID: "ffb1ef88-bf93-4f0c-a64a-952aeef1269d"). InnerVolumeSpecName "kube-api-access-vrpzb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.482755 4971 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ffb1ef88-bf93-4f0c-a64a-952aeef1269d-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.482785 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vrpzb\" (UniqueName: \"kubernetes.io/projected/ffb1ef88-bf93-4f0c-a64a-952aeef1269d-kube-api-access-vrpzb\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.482796 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ffb1ef88-bf93-4f0c-a64a-952aeef1269d-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.482804 4971 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ffb1ef88-bf93-4f0c-a64a-952aeef1269d-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.526264 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7b57df6474-qsrcc" Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.606552 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffb1ef88-bf93-4f0c-a64a-952aeef1269d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ffb1ef88-bf93-4f0c-a64a-952aeef1269d" (UID: "ffb1ef88-bf93-4f0c-a64a-952aeef1269d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.618504 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffb1ef88-bf93-4f0c-a64a-952aeef1269d-config-data" (OuterVolumeSpecName: "config-data") pod "ffb1ef88-bf93-4f0c-a64a-952aeef1269d" (UID: "ffb1ef88-bf93-4f0c-a64a-952aeef1269d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.687439 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffb1ef88-bf93-4f0c-a64a-952aeef1269d-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.687474 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffb1ef88-bf93-4f0c-a64a-952aeef1269d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.724600 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.732661 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ffb1ef88-bf93-4f0c-a64a-952aeef1269d","Type":"ContainerDied","Data":"1a4719af35d9259f65324555390b30dbacfe2fd15a264d66d7e26c814fb76e99"} Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.733003 4971 scope.go:117] "RemoveContainer" containerID="ccd8989dbcf588a3bebd671e2c0ee20b33142581e8c5b4915db000e4b0c43193" Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.758955 4971 scope.go:117] "RemoveContainer" containerID="b095cff7a1a753b4b2004211a2c617ec125f18ff9ea89f9ba9efac82419fff32" Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.790619 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.823333 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.837018 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Nov 27 07:16:03 crc kubenswrapper[4971]: E1127 07:16:03.837898 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffb1ef88-bf93-4f0c-a64a-952aeef1269d" containerName="cinder-scheduler" Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.837985 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffb1ef88-bf93-4f0c-a64a-952aeef1269d" containerName="cinder-scheduler" Nov 27 07:16:03 crc kubenswrapper[4971]: E1127 07:16:03.838110 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4794dbe4-7057-46d2-90d6-11a2bda30e6a" containerName="dnsmasq-dns" Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.838195 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="4794dbe4-7057-46d2-90d6-11a2bda30e6a" containerName="dnsmasq-dns" Nov 27 07:16:03 crc kubenswrapper[4971]: E1127 07:16:03.838276 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffb1ef88-bf93-4f0c-a64a-952aeef1269d" containerName="probe" Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.838351 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffb1ef88-bf93-4f0c-a64a-952aeef1269d" containerName="probe" Nov 27 07:16:03 crc kubenswrapper[4971]: E1127 07:16:03.838431 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4794dbe4-7057-46d2-90d6-11a2bda30e6a" containerName="init" Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.838502 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="4794dbe4-7057-46d2-90d6-11a2bda30e6a" containerName="init" Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.838834 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="ffb1ef88-bf93-4f0c-a64a-952aeef1269d" containerName="cinder-scheduler" Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.838925 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="ffb1ef88-bf93-4f0c-a64a-952aeef1269d" containerName="probe" Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.839011 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="4794dbe4-7057-46d2-90d6-11a2bda30e6a" containerName="dnsmasq-dns" Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.840562 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.843424 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.848449 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.893347 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6754a19e-e024-4b15-8464-49e127bd35ad-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"6754a19e-e024-4b15-8464-49e127bd35ad\") " pod="openstack/cinder-scheduler-0" Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.893392 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6754a19e-e024-4b15-8464-49e127bd35ad-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"6754a19e-e024-4b15-8464-49e127bd35ad\") " pod="openstack/cinder-scheduler-0" Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.893418 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vjvxt\" (UniqueName: \"kubernetes.io/projected/6754a19e-e024-4b15-8464-49e127bd35ad-kube-api-access-vjvxt\") pod \"cinder-scheduler-0\" (UID: \"6754a19e-e024-4b15-8464-49e127bd35ad\") " pod="openstack/cinder-scheduler-0" Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.893461 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6754a19e-e024-4b15-8464-49e127bd35ad-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"6754a19e-e024-4b15-8464-49e127bd35ad\") " pod="openstack/cinder-scheduler-0" Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.893483 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6754a19e-e024-4b15-8464-49e127bd35ad-config-data\") pod \"cinder-scheduler-0\" (UID: \"6754a19e-e024-4b15-8464-49e127bd35ad\") " pod="openstack/cinder-scheduler-0" Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.893510 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6754a19e-e024-4b15-8464-49e127bd35ad-scripts\") pod \"cinder-scheduler-0\" (UID: \"6754a19e-e024-4b15-8464-49e127bd35ad\") " pod="openstack/cinder-scheduler-0" Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.995367 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6754a19e-e024-4b15-8464-49e127bd35ad-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"6754a19e-e024-4b15-8464-49e127bd35ad\") " pod="openstack/cinder-scheduler-0" Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.995417 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6754a19e-e024-4b15-8464-49e127bd35ad-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"6754a19e-e024-4b15-8464-49e127bd35ad\") " pod="openstack/cinder-scheduler-0" Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.995442 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vjvxt\" (UniqueName: \"kubernetes.io/projected/6754a19e-e024-4b15-8464-49e127bd35ad-kube-api-access-vjvxt\") pod \"cinder-scheduler-0\" (UID: \"6754a19e-e024-4b15-8464-49e127bd35ad\") " pod="openstack/cinder-scheduler-0" Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.995507 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6754a19e-e024-4b15-8464-49e127bd35ad-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"6754a19e-e024-4b15-8464-49e127bd35ad\") " pod="openstack/cinder-scheduler-0" Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.995538 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6754a19e-e024-4b15-8464-49e127bd35ad-config-data\") pod \"cinder-scheduler-0\" (UID: \"6754a19e-e024-4b15-8464-49e127bd35ad\") " pod="openstack/cinder-scheduler-0" Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.995572 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6754a19e-e024-4b15-8464-49e127bd35ad-scripts\") pod \"cinder-scheduler-0\" (UID: \"6754a19e-e024-4b15-8464-49e127bd35ad\") " pod="openstack/cinder-scheduler-0" Nov 27 07:16:03 crc kubenswrapper[4971]: I1127 07:16:03.995960 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6754a19e-e024-4b15-8464-49e127bd35ad-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"6754a19e-e024-4b15-8464-49e127bd35ad\") " pod="openstack/cinder-scheduler-0" Nov 27 07:16:04 crc kubenswrapper[4971]: I1127 07:16:04.000837 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6754a19e-e024-4b15-8464-49e127bd35ad-config-data\") pod \"cinder-scheduler-0\" (UID: \"6754a19e-e024-4b15-8464-49e127bd35ad\") " pod="openstack/cinder-scheduler-0" Nov 27 07:16:04 crc kubenswrapper[4971]: I1127 07:16:04.001658 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6754a19e-e024-4b15-8464-49e127bd35ad-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"6754a19e-e024-4b15-8464-49e127bd35ad\") " pod="openstack/cinder-scheduler-0" Nov 27 07:16:04 crc kubenswrapper[4971]: I1127 07:16:04.006692 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6754a19e-e024-4b15-8464-49e127bd35ad-scripts\") pod \"cinder-scheduler-0\" (UID: \"6754a19e-e024-4b15-8464-49e127bd35ad\") " pod="openstack/cinder-scheduler-0" Nov 27 07:16:04 crc kubenswrapper[4971]: I1127 07:16:04.007333 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6754a19e-e024-4b15-8464-49e127bd35ad-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"6754a19e-e024-4b15-8464-49e127bd35ad\") " pod="openstack/cinder-scheduler-0" Nov 27 07:16:04 crc kubenswrapper[4971]: I1127 07:16:04.027265 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vjvxt\" (UniqueName: \"kubernetes.io/projected/6754a19e-e024-4b15-8464-49e127bd35ad-kube-api-access-vjvxt\") pod \"cinder-scheduler-0\" (UID: \"6754a19e-e024-4b15-8464-49e127bd35ad\") " pod="openstack/cinder-scheduler-0" Nov 27 07:16:04 crc kubenswrapper[4971]: I1127 07:16:04.161438 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 27 07:16:04 crc kubenswrapper[4971]: I1127 07:16:04.563998 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4794dbe4-7057-46d2-90d6-11a2bda30e6a" path="/var/lib/kubelet/pods/4794dbe4-7057-46d2-90d6-11a2bda30e6a/volumes" Nov 27 07:16:04 crc kubenswrapper[4971]: I1127 07:16:04.565046 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ffb1ef88-bf93-4f0c-a64a-952aeef1269d" path="/var/lib/kubelet/pods/ffb1ef88-bf93-4f0c-a64a-952aeef1269d/volumes" Nov 27 07:16:04 crc kubenswrapper[4971]: I1127 07:16:04.830384 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 27 07:16:05 crc kubenswrapper[4971]: I1127 07:16:05.755022 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"6754a19e-e024-4b15-8464-49e127bd35ad","Type":"ContainerStarted","Data":"f25c15b6849ee4de1357a742e5ff9d74b03bc332580c116f4c0b091d577d4a1d"} Nov 27 07:16:05 crc kubenswrapper[4971]: I1127 07:16:05.755618 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"6754a19e-e024-4b15-8464-49e127bd35ad","Type":"ContainerStarted","Data":"0588e324332a29c1829f747049eff940a0369fe3e5f347a71d15feb03a83af60"} Nov 27 07:16:06 crc kubenswrapper[4971]: I1127 07:16:06.041368 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-85d46db856-nzmcq" Nov 27 07:16:06 crc kubenswrapper[4971]: I1127 07:16:06.139662 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-85d46db856-nzmcq" Nov 27 07:16:06 crc kubenswrapper[4971]: I1127 07:16:06.201362 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-7b57df6474-qsrcc"] Nov 27 07:16:06 crc kubenswrapper[4971]: I1127 07:16:06.201607 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-7b57df6474-qsrcc" podUID="0c78fe86-37ff-47e6-8730-afa07e4d1533" containerName="barbican-api-log" containerID="cri-o://76729c51b41b73c92904a13d10f59a263a4bdd28a197a24521c0a656303539ea" gracePeriod=30 Nov 27 07:16:06 crc kubenswrapper[4971]: I1127 07:16:06.201719 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-7b57df6474-qsrcc" podUID="0c78fe86-37ff-47e6-8730-afa07e4d1533" containerName="barbican-api" containerID="cri-o://cd735c487fbcf020ffc031404c0dc001b75ae107ca62f1d3ce85525272dc2a21" gracePeriod=30 Nov 27 07:16:06 crc kubenswrapper[4971]: I1127 07:16:06.621238 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Nov 27 07:16:06 crc kubenswrapper[4971]: I1127 07:16:06.772560 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"6754a19e-e024-4b15-8464-49e127bd35ad","Type":"ContainerStarted","Data":"11a43f33fa3835d6bc3caba0e4cd634a1bc0dcb8a9fc8879c3ba3bbe47cba2b2"} Nov 27 07:16:06 crc kubenswrapper[4971]: I1127 07:16:06.778974 4971 generic.go:334] "Generic (PLEG): container finished" podID="0c78fe86-37ff-47e6-8730-afa07e4d1533" containerID="76729c51b41b73c92904a13d10f59a263a4bdd28a197a24521c0a656303539ea" exitCode=143 Nov 27 07:16:06 crc kubenswrapper[4971]: I1127 07:16:06.779336 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7b57df6474-qsrcc" event={"ID":"0c78fe86-37ff-47e6-8730-afa07e4d1533","Type":"ContainerDied","Data":"76729c51b41b73c92904a13d10f59a263a4bdd28a197a24521c0a656303539ea"} Nov 27 07:16:06 crc kubenswrapper[4971]: I1127 07:16:06.809832 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.809804343 podStartE2EDuration="3.809804343s" podCreationTimestamp="2025-11-27 07:16:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:16:06.796218977 +0000 UTC m=+1404.988262915" watchObservedRunningTime="2025-11-27 07:16:06.809804343 +0000 UTC m=+1405.001848261" Nov 27 07:16:08 crc kubenswrapper[4971]: I1127 07:16:08.290263 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-55bd67bdbd-llwzk" Nov 27 07:16:09 crc kubenswrapper[4971]: I1127 07:16:09.162379 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Nov 27 07:16:09 crc kubenswrapper[4971]: I1127 07:16:09.793170 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7b57df6474-qsrcc" Nov 27 07:16:09 crc kubenswrapper[4971]: I1127 07:16:09.823468 4971 generic.go:334] "Generic (PLEG): container finished" podID="0c78fe86-37ff-47e6-8730-afa07e4d1533" containerID="cd735c487fbcf020ffc031404c0dc001b75ae107ca62f1d3ce85525272dc2a21" exitCode=0 Nov 27 07:16:09 crc kubenswrapper[4971]: I1127 07:16:09.823507 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7b57df6474-qsrcc" event={"ID":"0c78fe86-37ff-47e6-8730-afa07e4d1533","Type":"ContainerDied","Data":"cd735c487fbcf020ffc031404c0dc001b75ae107ca62f1d3ce85525272dc2a21"} Nov 27 07:16:09 crc kubenswrapper[4971]: I1127 07:16:09.823548 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7b57df6474-qsrcc" event={"ID":"0c78fe86-37ff-47e6-8730-afa07e4d1533","Type":"ContainerDied","Data":"a419882b6c82ff86d4b92156a4e71f60741c315681fd3e20f521c7d0632a9710"} Nov 27 07:16:09 crc kubenswrapper[4971]: I1127 07:16:09.823566 4971 scope.go:117] "RemoveContainer" containerID="cd735c487fbcf020ffc031404c0dc001b75ae107ca62f1d3ce85525272dc2a21" Nov 27 07:16:09 crc kubenswrapper[4971]: I1127 07:16:09.823705 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7b57df6474-qsrcc" Nov 27 07:16:09 crc kubenswrapper[4971]: I1127 07:16:09.841373 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c78fe86-37ff-47e6-8730-afa07e4d1533-config-data\") pod \"0c78fe86-37ff-47e6-8730-afa07e4d1533\" (UID: \"0c78fe86-37ff-47e6-8730-afa07e4d1533\") " Nov 27 07:16:09 crc kubenswrapper[4971]: I1127 07:16:09.842237 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0c78fe86-37ff-47e6-8730-afa07e4d1533-config-data-custom\") pod \"0c78fe86-37ff-47e6-8730-afa07e4d1533\" (UID: \"0c78fe86-37ff-47e6-8730-afa07e4d1533\") " Nov 27 07:16:09 crc kubenswrapper[4971]: I1127 07:16:09.842342 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j5mbb\" (UniqueName: \"kubernetes.io/projected/0c78fe86-37ff-47e6-8730-afa07e4d1533-kube-api-access-j5mbb\") pod \"0c78fe86-37ff-47e6-8730-afa07e4d1533\" (UID: \"0c78fe86-37ff-47e6-8730-afa07e4d1533\") " Nov 27 07:16:09 crc kubenswrapper[4971]: I1127 07:16:09.842431 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0c78fe86-37ff-47e6-8730-afa07e4d1533-logs\") pod \"0c78fe86-37ff-47e6-8730-afa07e4d1533\" (UID: \"0c78fe86-37ff-47e6-8730-afa07e4d1533\") " Nov 27 07:16:09 crc kubenswrapper[4971]: I1127 07:16:09.842479 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c78fe86-37ff-47e6-8730-afa07e4d1533-combined-ca-bundle\") pod \"0c78fe86-37ff-47e6-8730-afa07e4d1533\" (UID: \"0c78fe86-37ff-47e6-8730-afa07e4d1533\") " Nov 27 07:16:09 crc kubenswrapper[4971]: I1127 07:16:09.843170 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0c78fe86-37ff-47e6-8730-afa07e4d1533-logs" (OuterVolumeSpecName: "logs") pod "0c78fe86-37ff-47e6-8730-afa07e4d1533" (UID: "0c78fe86-37ff-47e6-8730-afa07e4d1533"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:16:09 crc kubenswrapper[4971]: I1127 07:16:09.843760 4971 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0c78fe86-37ff-47e6-8730-afa07e4d1533-logs\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:09 crc kubenswrapper[4971]: I1127 07:16:09.847441 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c78fe86-37ff-47e6-8730-afa07e4d1533-kube-api-access-j5mbb" (OuterVolumeSpecName: "kube-api-access-j5mbb") pod "0c78fe86-37ff-47e6-8730-afa07e4d1533" (UID: "0c78fe86-37ff-47e6-8730-afa07e4d1533"). InnerVolumeSpecName "kube-api-access-j5mbb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:16:09 crc kubenswrapper[4971]: I1127 07:16:09.848971 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c78fe86-37ff-47e6-8730-afa07e4d1533-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "0c78fe86-37ff-47e6-8730-afa07e4d1533" (UID: "0c78fe86-37ff-47e6-8730-afa07e4d1533"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:09 crc kubenswrapper[4971]: I1127 07:16:09.853038 4971 scope.go:117] "RemoveContainer" containerID="76729c51b41b73c92904a13d10f59a263a4bdd28a197a24521c0a656303539ea" Nov 27 07:16:09 crc kubenswrapper[4971]: I1127 07:16:09.871944 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c78fe86-37ff-47e6-8730-afa07e4d1533-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0c78fe86-37ff-47e6-8730-afa07e4d1533" (UID: "0c78fe86-37ff-47e6-8730-afa07e4d1533"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:09 crc kubenswrapper[4971]: I1127 07:16:09.887502 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c78fe86-37ff-47e6-8730-afa07e4d1533-config-data" (OuterVolumeSpecName: "config-data") pod "0c78fe86-37ff-47e6-8730-afa07e4d1533" (UID: "0c78fe86-37ff-47e6-8730-afa07e4d1533"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:09 crc kubenswrapper[4971]: I1127 07:16:09.945326 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c78fe86-37ff-47e6-8730-afa07e4d1533-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:09 crc kubenswrapper[4971]: I1127 07:16:09.945358 4971 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0c78fe86-37ff-47e6-8730-afa07e4d1533-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:09 crc kubenswrapper[4971]: I1127 07:16:09.945370 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j5mbb\" (UniqueName: \"kubernetes.io/projected/0c78fe86-37ff-47e6-8730-afa07e4d1533-kube-api-access-j5mbb\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:09 crc kubenswrapper[4971]: I1127 07:16:09.945379 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c78fe86-37ff-47e6-8730-afa07e4d1533-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:09 crc kubenswrapper[4971]: I1127 07:16:09.948600 4971 scope.go:117] "RemoveContainer" containerID="cd735c487fbcf020ffc031404c0dc001b75ae107ca62f1d3ce85525272dc2a21" Nov 27 07:16:09 crc kubenswrapper[4971]: E1127 07:16:09.948976 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd735c487fbcf020ffc031404c0dc001b75ae107ca62f1d3ce85525272dc2a21\": container with ID starting with cd735c487fbcf020ffc031404c0dc001b75ae107ca62f1d3ce85525272dc2a21 not found: ID does not exist" containerID="cd735c487fbcf020ffc031404c0dc001b75ae107ca62f1d3ce85525272dc2a21" Nov 27 07:16:09 crc kubenswrapper[4971]: I1127 07:16:09.949011 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd735c487fbcf020ffc031404c0dc001b75ae107ca62f1d3ce85525272dc2a21"} err="failed to get container status \"cd735c487fbcf020ffc031404c0dc001b75ae107ca62f1d3ce85525272dc2a21\": rpc error: code = NotFound desc = could not find container \"cd735c487fbcf020ffc031404c0dc001b75ae107ca62f1d3ce85525272dc2a21\": container with ID starting with cd735c487fbcf020ffc031404c0dc001b75ae107ca62f1d3ce85525272dc2a21 not found: ID does not exist" Nov 27 07:16:09 crc kubenswrapper[4971]: I1127 07:16:09.949032 4971 scope.go:117] "RemoveContainer" containerID="76729c51b41b73c92904a13d10f59a263a4bdd28a197a24521c0a656303539ea" Nov 27 07:16:09 crc kubenswrapper[4971]: E1127 07:16:09.949311 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76729c51b41b73c92904a13d10f59a263a4bdd28a197a24521c0a656303539ea\": container with ID starting with 76729c51b41b73c92904a13d10f59a263a4bdd28a197a24521c0a656303539ea not found: ID does not exist" containerID="76729c51b41b73c92904a13d10f59a263a4bdd28a197a24521c0a656303539ea" Nov 27 07:16:09 crc kubenswrapper[4971]: I1127 07:16:09.949337 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76729c51b41b73c92904a13d10f59a263a4bdd28a197a24521c0a656303539ea"} err="failed to get container status \"76729c51b41b73c92904a13d10f59a263a4bdd28a197a24521c0a656303539ea\": rpc error: code = NotFound desc = could not find container \"76729c51b41b73c92904a13d10f59a263a4bdd28a197a24521c0a656303539ea\": container with ID starting with 76729c51b41b73c92904a13d10f59a263a4bdd28a197a24521c0a656303539ea not found: ID does not exist" Nov 27 07:16:10 crc kubenswrapper[4971]: I1127 07:16:10.173749 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-7b57df6474-qsrcc"] Nov 27 07:16:10 crc kubenswrapper[4971]: I1127 07:16:10.186593 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-7b57df6474-qsrcc"] Nov 27 07:16:10 crc kubenswrapper[4971]: I1127 07:16:10.561729 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0c78fe86-37ff-47e6-8730-afa07e4d1533" path="/var/lib/kubelet/pods/0c78fe86-37ff-47e6-8730-afa07e4d1533/volumes" Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.187218 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Nov 27 07:16:11 crc kubenswrapper[4971]: E1127 07:16:11.187785 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c78fe86-37ff-47e6-8730-afa07e4d1533" containerName="barbican-api-log" Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.187802 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c78fe86-37ff-47e6-8730-afa07e4d1533" containerName="barbican-api-log" Nov 27 07:16:11 crc kubenswrapper[4971]: E1127 07:16:11.187846 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c78fe86-37ff-47e6-8730-afa07e4d1533" containerName="barbican-api" Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.187853 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c78fe86-37ff-47e6-8730-afa07e4d1533" containerName="barbican-api" Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.188088 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c78fe86-37ff-47e6-8730-afa07e4d1533" containerName="barbican-api" Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.188104 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c78fe86-37ff-47e6-8730-afa07e4d1533" containerName="barbican-api-log" Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.188972 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.191045 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-q8qjx" Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.192168 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.195040 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.198099 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.379751 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hfvwf\" (UniqueName: \"kubernetes.io/projected/8651d70b-9655-451a-a038-1e6daf97bb0b-kube-api-access-hfvwf\") pod \"openstackclient\" (UID: \"8651d70b-9655-451a-a038-1e6daf97bb0b\") " pod="openstack/openstackclient" Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.379802 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8651d70b-9655-451a-a038-1e6daf97bb0b-openstack-config\") pod \"openstackclient\" (UID: \"8651d70b-9655-451a-a038-1e6daf97bb0b\") " pod="openstack/openstackclient" Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.379873 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8651d70b-9655-451a-a038-1e6daf97bb0b-combined-ca-bundle\") pod \"openstackclient\" (UID: \"8651d70b-9655-451a-a038-1e6daf97bb0b\") " pod="openstack/openstackclient" Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.379986 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8651d70b-9655-451a-a038-1e6daf97bb0b-openstack-config-secret\") pod \"openstackclient\" (UID: \"8651d70b-9655-451a-a038-1e6daf97bb0b\") " pod="openstack/openstackclient" Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.481568 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8651d70b-9655-451a-a038-1e6daf97bb0b-openstack-config-secret\") pod \"openstackclient\" (UID: \"8651d70b-9655-451a-a038-1e6daf97bb0b\") " pod="openstack/openstackclient" Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.481888 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hfvwf\" (UniqueName: \"kubernetes.io/projected/8651d70b-9655-451a-a038-1e6daf97bb0b-kube-api-access-hfvwf\") pod \"openstackclient\" (UID: \"8651d70b-9655-451a-a038-1e6daf97bb0b\") " pod="openstack/openstackclient" Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.481921 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8651d70b-9655-451a-a038-1e6daf97bb0b-openstack-config\") pod \"openstackclient\" (UID: \"8651d70b-9655-451a-a038-1e6daf97bb0b\") " pod="openstack/openstackclient" Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.481984 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8651d70b-9655-451a-a038-1e6daf97bb0b-combined-ca-bundle\") pod \"openstackclient\" (UID: \"8651d70b-9655-451a-a038-1e6daf97bb0b\") " pod="openstack/openstackclient" Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.483306 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8651d70b-9655-451a-a038-1e6daf97bb0b-openstack-config\") pod \"openstackclient\" (UID: \"8651d70b-9655-451a-a038-1e6daf97bb0b\") " pod="openstack/openstackclient" Nov 27 07:16:11 crc kubenswrapper[4971]: E1127 07:16:11.485431 4971 projected.go:194] Error preparing data for projected volume kube-api-access-hfvwf for pod openstack/openstackclient: failed to fetch token: serviceaccounts "openstackclient-openstackclient" is forbidden: User "system:node:crc" cannot create resource "serviceaccounts/token" in API group "" in the namespace "openstack": no relationship found between node 'crc' and this object Nov 27 07:16:11 crc kubenswrapper[4971]: E1127 07:16:11.485482 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8651d70b-9655-451a-a038-1e6daf97bb0b-kube-api-access-hfvwf podName:8651d70b-9655-451a-a038-1e6daf97bb0b nodeName:}" failed. No retries permitted until 2025-11-27 07:16:11.985466413 +0000 UTC m=+1410.177510331 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-hfvwf" (UniqueName: "kubernetes.io/projected/8651d70b-9655-451a-a038-1e6daf97bb0b-kube-api-access-hfvwf") pod "openstackclient" (UID: "8651d70b-9655-451a-a038-1e6daf97bb0b") : failed to fetch token: serviceaccounts "openstackclient-openstackclient" is forbidden: User "system:node:crc" cannot create resource "serviceaccounts/token" in API group "" in the namespace "openstack": no relationship found between node 'crc' and this object Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.488361 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8651d70b-9655-451a-a038-1e6daf97bb0b-openstack-config-secret\") pod \"openstackclient\" (UID: \"8651d70b-9655-451a-a038-1e6daf97bb0b\") " pod="openstack/openstackclient" Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.494483 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8651d70b-9655-451a-a038-1e6daf97bb0b-combined-ca-bundle\") pod \"openstackclient\" (UID: \"8651d70b-9655-451a-a038-1e6daf97bb0b\") " pod="openstack/openstackclient" Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.497474 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Nov 27 07:16:11 crc kubenswrapper[4971]: E1127 07:16:11.498406 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-hfvwf], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/openstackclient" podUID="8651d70b-9655-451a-a038-1e6daf97bb0b" Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.499386 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.540742 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.542046 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.575318 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.686145 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/36d86c04-b5e4-4334-832b-10953c1d0b1d-openstack-config-secret\") pod \"openstackclient\" (UID: \"36d86c04-b5e4-4334-832b-10953c1d0b1d\") " pod="openstack/openstackclient" Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.686258 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rl9r5\" (UniqueName: \"kubernetes.io/projected/36d86c04-b5e4-4334-832b-10953c1d0b1d-kube-api-access-rl9r5\") pod \"openstackclient\" (UID: \"36d86c04-b5e4-4334-832b-10953c1d0b1d\") " pod="openstack/openstackclient" Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.686337 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/36d86c04-b5e4-4334-832b-10953c1d0b1d-openstack-config\") pod \"openstackclient\" (UID: \"36d86c04-b5e4-4334-832b-10953c1d0b1d\") " pod="openstack/openstackclient" Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.686398 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d86c04-b5e4-4334-832b-10953c1d0b1d-combined-ca-bundle\") pod \"openstackclient\" (UID: \"36d86c04-b5e4-4334-832b-10953c1d0b1d\") " pod="openstack/openstackclient" Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.788341 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/36d86c04-b5e4-4334-832b-10953c1d0b1d-openstack-config-secret\") pod \"openstackclient\" (UID: \"36d86c04-b5e4-4334-832b-10953c1d0b1d\") " pod="openstack/openstackclient" Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.788416 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rl9r5\" (UniqueName: \"kubernetes.io/projected/36d86c04-b5e4-4334-832b-10953c1d0b1d-kube-api-access-rl9r5\") pod \"openstackclient\" (UID: \"36d86c04-b5e4-4334-832b-10953c1d0b1d\") " pod="openstack/openstackclient" Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.788506 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/36d86c04-b5e4-4334-832b-10953c1d0b1d-openstack-config\") pod \"openstackclient\" (UID: \"36d86c04-b5e4-4334-832b-10953c1d0b1d\") " pod="openstack/openstackclient" Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.788557 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d86c04-b5e4-4334-832b-10953c1d0b1d-combined-ca-bundle\") pod \"openstackclient\" (UID: \"36d86c04-b5e4-4334-832b-10953c1d0b1d\") " pod="openstack/openstackclient" Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.789778 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/36d86c04-b5e4-4334-832b-10953c1d0b1d-openstack-config\") pod \"openstackclient\" (UID: \"36d86c04-b5e4-4334-832b-10953c1d0b1d\") " pod="openstack/openstackclient" Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.792204 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d86c04-b5e4-4334-832b-10953c1d0b1d-combined-ca-bundle\") pod \"openstackclient\" (UID: \"36d86c04-b5e4-4334-832b-10953c1d0b1d\") " pod="openstack/openstackclient" Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.793860 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/36d86c04-b5e4-4334-832b-10953c1d0b1d-openstack-config-secret\") pod \"openstackclient\" (UID: \"36d86c04-b5e4-4334-832b-10953c1d0b1d\") " pod="openstack/openstackclient" Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.808205 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rl9r5\" (UniqueName: \"kubernetes.io/projected/36d86c04-b5e4-4334-832b-10953c1d0b1d-kube-api-access-rl9r5\") pod \"openstackclient\" (UID: \"36d86c04-b5e4-4334-832b-10953c1d0b1d\") " pod="openstack/openstackclient" Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.847571 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.852326 4971 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="8651d70b-9655-451a-a038-1e6daf97bb0b" podUID="36d86c04-b5e4-4334-832b-10953c1d0b1d" Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.877440 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.896398 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.998164 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8651d70b-9655-451a-a038-1e6daf97bb0b-openstack-config\") pod \"8651d70b-9655-451a-a038-1e6daf97bb0b\" (UID: \"8651d70b-9655-451a-a038-1e6daf97bb0b\") " Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.998208 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8651d70b-9655-451a-a038-1e6daf97bb0b-openstack-config-secret\") pod \"8651d70b-9655-451a-a038-1e6daf97bb0b\" (UID: \"8651d70b-9655-451a-a038-1e6daf97bb0b\") " Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.998240 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8651d70b-9655-451a-a038-1e6daf97bb0b-combined-ca-bundle\") pod \"8651d70b-9655-451a-a038-1e6daf97bb0b\" (UID: \"8651d70b-9655-451a-a038-1e6daf97bb0b\") " Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.998509 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hfvwf\" (UniqueName: \"kubernetes.io/projected/8651d70b-9655-451a-a038-1e6daf97bb0b-kube-api-access-hfvwf\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:11 crc kubenswrapper[4971]: I1127 07:16:11.999680 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8651d70b-9655-451a-a038-1e6daf97bb0b-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "8651d70b-9655-451a-a038-1e6daf97bb0b" (UID: "8651d70b-9655-451a-a038-1e6daf97bb0b"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:16:12 crc kubenswrapper[4971]: I1127 07:16:12.004798 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8651d70b-9655-451a-a038-1e6daf97bb0b-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "8651d70b-9655-451a-a038-1e6daf97bb0b" (UID: "8651d70b-9655-451a-a038-1e6daf97bb0b"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:12 crc kubenswrapper[4971]: I1127 07:16:12.004898 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8651d70b-9655-451a-a038-1e6daf97bb0b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8651d70b-9655-451a-a038-1e6daf97bb0b" (UID: "8651d70b-9655-451a-a038-1e6daf97bb0b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:12 crc kubenswrapper[4971]: I1127 07:16:12.100646 4971 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8651d70b-9655-451a-a038-1e6daf97bb0b-openstack-config\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:12 crc kubenswrapper[4971]: I1127 07:16:12.100684 4971 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8651d70b-9655-451a-a038-1e6daf97bb0b-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:12 crc kubenswrapper[4971]: I1127 07:16:12.100694 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8651d70b-9655-451a-a038-1e6daf97bb0b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:12 crc kubenswrapper[4971]: I1127 07:16:12.387174 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Nov 27 07:16:12 crc kubenswrapper[4971]: W1127 07:16:12.388969 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod36d86c04_b5e4_4334_832b_10953c1d0b1d.slice/crio-4f944caeffccc2743f5571c0ce0f60199160438ea14fabd30d8720b6c08ab302 WatchSource:0}: Error finding container 4f944caeffccc2743f5571c0ce0f60199160438ea14fabd30d8720b6c08ab302: Status 404 returned error can't find the container with id 4f944caeffccc2743f5571c0ce0f60199160438ea14fabd30d8720b6c08ab302 Nov 27 07:16:12 crc kubenswrapper[4971]: I1127 07:16:12.580149 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8651d70b-9655-451a-a038-1e6daf97bb0b" path="/var/lib/kubelet/pods/8651d70b-9655-451a-a038-1e6daf97bb0b/volumes" Nov 27 07:16:12 crc kubenswrapper[4971]: I1127 07:16:12.706653 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-797b967b5b-7wfhc" Nov 27 07:16:12 crc kubenswrapper[4971]: I1127 07:16:12.815297 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c9ksw\" (UniqueName: \"kubernetes.io/projected/cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d-kube-api-access-c9ksw\") pod \"cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d\" (UID: \"cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d\") " Nov 27 07:16:12 crc kubenswrapper[4971]: I1127 07:16:12.815358 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d-httpd-config\") pod \"cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d\" (UID: \"cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d\") " Nov 27 07:16:12 crc kubenswrapper[4971]: I1127 07:16:12.815444 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d-config\") pod \"cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d\" (UID: \"cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d\") " Nov 27 07:16:12 crc kubenswrapper[4971]: I1127 07:16:12.815510 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d-combined-ca-bundle\") pod \"cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d\" (UID: \"cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d\") " Nov 27 07:16:12 crc kubenswrapper[4971]: I1127 07:16:12.815655 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d-ovndb-tls-certs\") pod \"cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d\" (UID: \"cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d\") " Nov 27 07:16:12 crc kubenswrapper[4971]: I1127 07:16:12.838444 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d-kube-api-access-c9ksw" (OuterVolumeSpecName: "kube-api-access-c9ksw") pod "cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d" (UID: "cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d"). InnerVolumeSpecName "kube-api-access-c9ksw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:16:12 crc kubenswrapper[4971]: I1127 07:16:12.838603 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d" (UID: "cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:12 crc kubenswrapper[4971]: I1127 07:16:12.857178 4971 generic.go:334] "Generic (PLEG): container finished" podID="cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d" containerID="4709702de716b12d745d76424c14b3427d0845081a7da6c4031024fe395ac4ef" exitCode=0 Nov 27 07:16:12 crc kubenswrapper[4971]: I1127 07:16:12.857236 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-797b967b5b-7wfhc" event={"ID":"cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d","Type":"ContainerDied","Data":"4709702de716b12d745d76424c14b3427d0845081a7da6c4031024fe395ac4ef"} Nov 27 07:16:12 crc kubenswrapper[4971]: I1127 07:16:12.857263 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-797b967b5b-7wfhc" event={"ID":"cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d","Type":"ContainerDied","Data":"5034f48f0bffa9b460c5305537f686d48abcdf0c4094489ea3c5ce109016f206"} Nov 27 07:16:12 crc kubenswrapper[4971]: I1127 07:16:12.857280 4971 scope.go:117] "RemoveContainer" containerID="619b29157e835d81ce9cbe2e7c2420eae8c918ffa9f80d969ea45c6be45769d8" Nov 27 07:16:12 crc kubenswrapper[4971]: I1127 07:16:12.857403 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-797b967b5b-7wfhc" Nov 27 07:16:12 crc kubenswrapper[4971]: I1127 07:16:12.860295 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 27 07:16:12 crc kubenswrapper[4971]: I1127 07:16:12.861130 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"36d86c04-b5e4-4334-832b-10953c1d0b1d","Type":"ContainerStarted","Data":"4f944caeffccc2743f5571c0ce0f60199160438ea14fabd30d8720b6c08ab302"} Nov 27 07:16:12 crc kubenswrapper[4971]: I1127 07:16:12.875154 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d" (UID: "cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:12 crc kubenswrapper[4971]: I1127 07:16:12.880784 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d-config" (OuterVolumeSpecName: "config") pod "cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d" (UID: "cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:12 crc kubenswrapper[4971]: I1127 07:16:12.912878 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d" (UID: "cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:12 crc kubenswrapper[4971]: I1127 07:16:12.917666 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c9ksw\" (UniqueName: \"kubernetes.io/projected/cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d-kube-api-access-c9ksw\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:12 crc kubenswrapper[4971]: I1127 07:16:12.917730 4971 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d-httpd-config\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:12 crc kubenswrapper[4971]: I1127 07:16:12.917750 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d-config\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:12 crc kubenswrapper[4971]: I1127 07:16:12.917765 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:12 crc kubenswrapper[4971]: I1127 07:16:12.917779 4971 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:12 crc kubenswrapper[4971]: I1127 07:16:12.931011 4971 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="8651d70b-9655-451a-a038-1e6daf97bb0b" podUID="36d86c04-b5e4-4334-832b-10953c1d0b1d" Nov 27 07:16:12 crc kubenswrapper[4971]: I1127 07:16:12.942818 4971 scope.go:117] "RemoveContainer" containerID="4709702de716b12d745d76424c14b3427d0845081a7da6c4031024fe395ac4ef" Nov 27 07:16:12 crc kubenswrapper[4971]: I1127 07:16:12.967998 4971 scope.go:117] "RemoveContainer" containerID="619b29157e835d81ce9cbe2e7c2420eae8c918ffa9f80d969ea45c6be45769d8" Nov 27 07:16:12 crc kubenswrapper[4971]: E1127 07:16:12.969013 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"619b29157e835d81ce9cbe2e7c2420eae8c918ffa9f80d969ea45c6be45769d8\": container with ID starting with 619b29157e835d81ce9cbe2e7c2420eae8c918ffa9f80d969ea45c6be45769d8 not found: ID does not exist" containerID="619b29157e835d81ce9cbe2e7c2420eae8c918ffa9f80d969ea45c6be45769d8" Nov 27 07:16:12 crc kubenswrapper[4971]: I1127 07:16:12.969052 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"619b29157e835d81ce9cbe2e7c2420eae8c918ffa9f80d969ea45c6be45769d8"} err="failed to get container status \"619b29157e835d81ce9cbe2e7c2420eae8c918ffa9f80d969ea45c6be45769d8\": rpc error: code = NotFound desc = could not find container \"619b29157e835d81ce9cbe2e7c2420eae8c918ffa9f80d969ea45c6be45769d8\": container with ID starting with 619b29157e835d81ce9cbe2e7c2420eae8c918ffa9f80d969ea45c6be45769d8 not found: ID does not exist" Nov 27 07:16:12 crc kubenswrapper[4971]: I1127 07:16:12.969077 4971 scope.go:117] "RemoveContainer" containerID="4709702de716b12d745d76424c14b3427d0845081a7da6c4031024fe395ac4ef" Nov 27 07:16:12 crc kubenswrapper[4971]: E1127 07:16:12.969641 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4709702de716b12d745d76424c14b3427d0845081a7da6c4031024fe395ac4ef\": container with ID starting with 4709702de716b12d745d76424c14b3427d0845081a7da6c4031024fe395ac4ef not found: ID does not exist" containerID="4709702de716b12d745d76424c14b3427d0845081a7da6c4031024fe395ac4ef" Nov 27 07:16:12 crc kubenswrapper[4971]: I1127 07:16:12.969694 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4709702de716b12d745d76424c14b3427d0845081a7da6c4031024fe395ac4ef"} err="failed to get container status \"4709702de716b12d745d76424c14b3427d0845081a7da6c4031024fe395ac4ef\": rpc error: code = NotFound desc = could not find container \"4709702de716b12d745d76424c14b3427d0845081a7da6c4031024fe395ac4ef\": container with ID starting with 4709702de716b12d745d76424c14b3427d0845081a7da6c4031024fe395ac4ef not found: ID does not exist" Nov 27 07:16:13 crc kubenswrapper[4971]: I1127 07:16:13.197833 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-797b967b5b-7wfhc"] Nov 27 07:16:13 crc kubenswrapper[4971]: I1127 07:16:13.212172 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-797b967b5b-7wfhc"] Nov 27 07:16:14 crc kubenswrapper[4971]: I1127 07:16:14.439455 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Nov 27 07:16:14 crc kubenswrapper[4971]: I1127 07:16:14.561099 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d" path="/var/lib/kubelet/pods/cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d/volumes" Nov 27 07:16:14 crc kubenswrapper[4971]: I1127 07:16:14.644223 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-66674dfb5f-52hcq"] Nov 27 07:16:14 crc kubenswrapper[4971]: E1127 07:16:14.644687 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d" containerName="neutron-api" Nov 27 07:16:14 crc kubenswrapper[4971]: I1127 07:16:14.644705 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d" containerName="neutron-api" Nov 27 07:16:14 crc kubenswrapper[4971]: E1127 07:16:14.644724 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d" containerName="neutron-httpd" Nov 27 07:16:14 crc kubenswrapper[4971]: I1127 07:16:14.644732 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d" containerName="neutron-httpd" Nov 27 07:16:14 crc kubenswrapper[4971]: I1127 07:16:14.644916 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d" containerName="neutron-api" Nov 27 07:16:14 crc kubenswrapper[4971]: I1127 07:16:14.644956 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf24ef8f-fdd5-4a28-b9a2-e44c69ace38d" containerName="neutron-httpd" Nov 27 07:16:14 crc kubenswrapper[4971]: I1127 07:16:14.646037 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-66674dfb5f-52hcq" Nov 27 07:16:14 crc kubenswrapper[4971]: I1127 07:16:14.649707 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Nov 27 07:16:14 crc kubenswrapper[4971]: I1127 07:16:14.650049 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Nov 27 07:16:14 crc kubenswrapper[4971]: I1127 07:16:14.653465 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Nov 27 07:16:14 crc kubenswrapper[4971]: I1127 07:16:14.676856 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-66674dfb5f-52hcq"] Nov 27 07:16:14 crc kubenswrapper[4971]: I1127 07:16:14.769633 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/82a33cc1-32fe-464f-ac33-b802fd32a4c1-etc-swift\") pod \"swift-proxy-66674dfb5f-52hcq\" (UID: \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\") " pod="openstack/swift-proxy-66674dfb5f-52hcq" Nov 27 07:16:14 crc kubenswrapper[4971]: I1127 07:16:14.769676 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/82a33cc1-32fe-464f-ac33-b802fd32a4c1-public-tls-certs\") pod \"swift-proxy-66674dfb5f-52hcq\" (UID: \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\") " pod="openstack/swift-proxy-66674dfb5f-52hcq" Nov 27 07:16:14 crc kubenswrapper[4971]: I1127 07:16:14.769720 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jj97k\" (UniqueName: \"kubernetes.io/projected/82a33cc1-32fe-464f-ac33-b802fd32a4c1-kube-api-access-jj97k\") pod \"swift-proxy-66674dfb5f-52hcq\" (UID: \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\") " pod="openstack/swift-proxy-66674dfb5f-52hcq" Nov 27 07:16:14 crc kubenswrapper[4971]: I1127 07:16:14.769741 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/82a33cc1-32fe-464f-ac33-b802fd32a4c1-combined-ca-bundle\") pod \"swift-proxy-66674dfb5f-52hcq\" (UID: \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\") " pod="openstack/swift-proxy-66674dfb5f-52hcq" Nov 27 07:16:14 crc kubenswrapper[4971]: I1127 07:16:14.769775 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/82a33cc1-32fe-464f-ac33-b802fd32a4c1-run-httpd\") pod \"swift-proxy-66674dfb5f-52hcq\" (UID: \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\") " pod="openstack/swift-proxy-66674dfb5f-52hcq" Nov 27 07:16:14 crc kubenswrapper[4971]: I1127 07:16:14.769802 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/82a33cc1-32fe-464f-ac33-b802fd32a4c1-log-httpd\") pod \"swift-proxy-66674dfb5f-52hcq\" (UID: \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\") " pod="openstack/swift-proxy-66674dfb5f-52hcq" Nov 27 07:16:14 crc kubenswrapper[4971]: I1127 07:16:14.769848 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/82a33cc1-32fe-464f-ac33-b802fd32a4c1-internal-tls-certs\") pod \"swift-proxy-66674dfb5f-52hcq\" (UID: \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\") " pod="openstack/swift-proxy-66674dfb5f-52hcq" Nov 27 07:16:14 crc kubenswrapper[4971]: I1127 07:16:14.769906 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/82a33cc1-32fe-464f-ac33-b802fd32a4c1-config-data\") pod \"swift-proxy-66674dfb5f-52hcq\" (UID: \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\") " pod="openstack/swift-proxy-66674dfb5f-52hcq" Nov 27 07:16:14 crc kubenswrapper[4971]: I1127 07:16:14.871456 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/82a33cc1-32fe-464f-ac33-b802fd32a4c1-config-data\") pod \"swift-proxy-66674dfb5f-52hcq\" (UID: \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\") " pod="openstack/swift-proxy-66674dfb5f-52hcq" Nov 27 07:16:14 crc kubenswrapper[4971]: I1127 07:16:14.871515 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/82a33cc1-32fe-464f-ac33-b802fd32a4c1-etc-swift\") pod \"swift-proxy-66674dfb5f-52hcq\" (UID: \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\") " pod="openstack/swift-proxy-66674dfb5f-52hcq" Nov 27 07:16:14 crc kubenswrapper[4971]: I1127 07:16:14.871548 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/82a33cc1-32fe-464f-ac33-b802fd32a4c1-public-tls-certs\") pod \"swift-proxy-66674dfb5f-52hcq\" (UID: \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\") " pod="openstack/swift-proxy-66674dfb5f-52hcq" Nov 27 07:16:14 crc kubenswrapper[4971]: I1127 07:16:14.871590 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jj97k\" (UniqueName: \"kubernetes.io/projected/82a33cc1-32fe-464f-ac33-b802fd32a4c1-kube-api-access-jj97k\") pod \"swift-proxy-66674dfb5f-52hcq\" (UID: \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\") " pod="openstack/swift-proxy-66674dfb5f-52hcq" Nov 27 07:16:14 crc kubenswrapper[4971]: I1127 07:16:14.871612 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/82a33cc1-32fe-464f-ac33-b802fd32a4c1-combined-ca-bundle\") pod \"swift-proxy-66674dfb5f-52hcq\" (UID: \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\") " pod="openstack/swift-proxy-66674dfb5f-52hcq" Nov 27 07:16:14 crc kubenswrapper[4971]: I1127 07:16:14.871640 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/82a33cc1-32fe-464f-ac33-b802fd32a4c1-run-httpd\") pod \"swift-proxy-66674dfb5f-52hcq\" (UID: \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\") " pod="openstack/swift-proxy-66674dfb5f-52hcq" Nov 27 07:16:14 crc kubenswrapper[4971]: I1127 07:16:14.871667 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/82a33cc1-32fe-464f-ac33-b802fd32a4c1-log-httpd\") pod \"swift-proxy-66674dfb5f-52hcq\" (UID: \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\") " pod="openstack/swift-proxy-66674dfb5f-52hcq" Nov 27 07:16:14 crc kubenswrapper[4971]: I1127 07:16:14.871726 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/82a33cc1-32fe-464f-ac33-b802fd32a4c1-internal-tls-certs\") pod \"swift-proxy-66674dfb5f-52hcq\" (UID: \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\") " pod="openstack/swift-proxy-66674dfb5f-52hcq" Nov 27 07:16:14 crc kubenswrapper[4971]: I1127 07:16:14.872099 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/82a33cc1-32fe-464f-ac33-b802fd32a4c1-run-httpd\") pod \"swift-proxy-66674dfb5f-52hcq\" (UID: \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\") " pod="openstack/swift-proxy-66674dfb5f-52hcq" Nov 27 07:16:14 crc kubenswrapper[4971]: I1127 07:16:14.874707 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/82a33cc1-32fe-464f-ac33-b802fd32a4c1-log-httpd\") pod \"swift-proxy-66674dfb5f-52hcq\" (UID: \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\") " pod="openstack/swift-proxy-66674dfb5f-52hcq" Nov 27 07:16:14 crc kubenswrapper[4971]: I1127 07:16:14.883414 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/82a33cc1-32fe-464f-ac33-b802fd32a4c1-internal-tls-certs\") pod \"swift-proxy-66674dfb5f-52hcq\" (UID: \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\") " pod="openstack/swift-proxy-66674dfb5f-52hcq" Nov 27 07:16:14 crc kubenswrapper[4971]: I1127 07:16:14.886944 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/82a33cc1-32fe-464f-ac33-b802fd32a4c1-config-data\") pod \"swift-proxy-66674dfb5f-52hcq\" (UID: \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\") " pod="openstack/swift-proxy-66674dfb5f-52hcq" Nov 27 07:16:14 crc kubenswrapper[4971]: I1127 07:16:14.887940 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jj97k\" (UniqueName: \"kubernetes.io/projected/82a33cc1-32fe-464f-ac33-b802fd32a4c1-kube-api-access-jj97k\") pod \"swift-proxy-66674dfb5f-52hcq\" (UID: \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\") " pod="openstack/swift-proxy-66674dfb5f-52hcq" Nov 27 07:16:14 crc kubenswrapper[4971]: I1127 07:16:14.889188 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/82a33cc1-32fe-464f-ac33-b802fd32a4c1-public-tls-certs\") pod \"swift-proxy-66674dfb5f-52hcq\" (UID: \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\") " pod="openstack/swift-proxy-66674dfb5f-52hcq" Nov 27 07:16:14 crc kubenswrapper[4971]: I1127 07:16:14.896248 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/82a33cc1-32fe-464f-ac33-b802fd32a4c1-etc-swift\") pod \"swift-proxy-66674dfb5f-52hcq\" (UID: \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\") " pod="openstack/swift-proxy-66674dfb5f-52hcq" Nov 27 07:16:14 crc kubenswrapper[4971]: I1127 07:16:14.896302 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/82a33cc1-32fe-464f-ac33-b802fd32a4c1-combined-ca-bundle\") pod \"swift-proxy-66674dfb5f-52hcq\" (UID: \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\") " pod="openstack/swift-proxy-66674dfb5f-52hcq" Nov 27 07:16:14 crc kubenswrapper[4971]: I1127 07:16:14.970030 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-66674dfb5f-52hcq" Nov 27 07:16:15 crc kubenswrapper[4971]: I1127 07:16:15.514319 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-66674dfb5f-52hcq"] Nov 27 07:16:15 crc kubenswrapper[4971]: I1127 07:16:15.915980 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-66674dfb5f-52hcq" event={"ID":"82a33cc1-32fe-464f-ac33-b802fd32a4c1","Type":"ContainerStarted","Data":"22d3d2192ae3c18ce743c1ddda10575c592232c7adf55747fe07b64dfb765bb4"} Nov 27 07:16:15 crc kubenswrapper[4971]: I1127 07:16:15.916312 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-66674dfb5f-52hcq" event={"ID":"82a33cc1-32fe-464f-ac33-b802fd32a4c1","Type":"ContainerStarted","Data":"e5e757909b07d75f7894463f911274815946df5ca2fcfd472e16750d3eb2c73d"} Nov 27 07:16:16 crc kubenswrapper[4971]: I1127 07:16:16.344013 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:16:16 crc kubenswrapper[4971]: I1127 07:16:16.344326 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4a767b03-47f5-4cf2-b573-f30e3aa922b5" containerName="ceilometer-central-agent" containerID="cri-o://e33500b78ddcfba0deb8dea898fe276e9aa9b11dc5cc25915d38d7c59aa1dff0" gracePeriod=30 Nov 27 07:16:16 crc kubenswrapper[4971]: I1127 07:16:16.344390 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4a767b03-47f5-4cf2-b573-f30e3aa922b5" containerName="proxy-httpd" containerID="cri-o://8401472fdb05ee764b52e1e4288e43d7d114c36a189f7cff80d4d75bbc639485" gracePeriod=30 Nov 27 07:16:16 crc kubenswrapper[4971]: I1127 07:16:16.344401 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4a767b03-47f5-4cf2-b573-f30e3aa922b5" containerName="sg-core" containerID="cri-o://4826b19de310d9c87ffd3ed8e71d8044651260f608a892b79b4f24eecd62924a" gracePeriod=30 Nov 27 07:16:16 crc kubenswrapper[4971]: I1127 07:16:16.344416 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4a767b03-47f5-4cf2-b573-f30e3aa922b5" containerName="ceilometer-notification-agent" containerID="cri-o://90758f96d363ce7f90865abbaa6ed510ec7f2279d57b85d2451ba4860bd5e8dd" gracePeriod=30 Nov 27 07:16:16 crc kubenswrapper[4971]: I1127 07:16:16.351939 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Nov 27 07:16:16 crc kubenswrapper[4971]: I1127 07:16:16.927618 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-66674dfb5f-52hcq" event={"ID":"82a33cc1-32fe-464f-ac33-b802fd32a4c1","Type":"ContainerStarted","Data":"bce205f17147516a60414770d105f40e40e7784c6e25ddf99d12115151c6a716"} Nov 27 07:16:16 crc kubenswrapper[4971]: I1127 07:16:16.928030 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-66674dfb5f-52hcq" Nov 27 07:16:16 crc kubenswrapper[4971]: I1127 07:16:16.936617 4971 generic.go:334] "Generic (PLEG): container finished" podID="4a767b03-47f5-4cf2-b573-f30e3aa922b5" containerID="8401472fdb05ee764b52e1e4288e43d7d114c36a189f7cff80d4d75bbc639485" exitCode=0 Nov 27 07:16:16 crc kubenswrapper[4971]: I1127 07:16:16.936660 4971 generic.go:334] "Generic (PLEG): container finished" podID="4a767b03-47f5-4cf2-b573-f30e3aa922b5" containerID="4826b19de310d9c87ffd3ed8e71d8044651260f608a892b79b4f24eecd62924a" exitCode=2 Nov 27 07:16:16 crc kubenswrapper[4971]: I1127 07:16:16.936674 4971 generic.go:334] "Generic (PLEG): container finished" podID="4a767b03-47f5-4cf2-b573-f30e3aa922b5" containerID="e33500b78ddcfba0deb8dea898fe276e9aa9b11dc5cc25915d38d7c59aa1dff0" exitCode=0 Nov 27 07:16:16 crc kubenswrapper[4971]: I1127 07:16:16.936676 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4a767b03-47f5-4cf2-b573-f30e3aa922b5","Type":"ContainerDied","Data":"8401472fdb05ee764b52e1e4288e43d7d114c36a189f7cff80d4d75bbc639485"} Nov 27 07:16:16 crc kubenswrapper[4971]: I1127 07:16:16.936708 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4a767b03-47f5-4cf2-b573-f30e3aa922b5","Type":"ContainerDied","Data":"4826b19de310d9c87ffd3ed8e71d8044651260f608a892b79b4f24eecd62924a"} Nov 27 07:16:16 crc kubenswrapper[4971]: I1127 07:16:16.936717 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4a767b03-47f5-4cf2-b573-f30e3aa922b5","Type":"ContainerDied","Data":"e33500b78ddcfba0deb8dea898fe276e9aa9b11dc5cc25915d38d7c59aa1dff0"} Nov 27 07:16:16 crc kubenswrapper[4971]: I1127 07:16:16.954877 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-66674dfb5f-52hcq" podStartSLOduration=2.954854201 podStartE2EDuration="2.954854201s" podCreationTimestamp="2025-11-27 07:16:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:16:16.946343691 +0000 UTC m=+1415.138387609" watchObservedRunningTime="2025-11-27 07:16:16.954854201 +0000 UTC m=+1415.146898119" Nov 27 07:16:17 crc kubenswrapper[4971]: I1127 07:16:17.948502 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-66674dfb5f-52hcq" Nov 27 07:16:19 crc kubenswrapper[4971]: I1127 07:16:19.980662 4971 generic.go:334] "Generic (PLEG): container finished" podID="4a767b03-47f5-4cf2-b573-f30e3aa922b5" containerID="90758f96d363ce7f90865abbaa6ed510ec7f2279d57b85d2451ba4860bd5e8dd" exitCode=0 Nov 27 07:16:19 crc kubenswrapper[4971]: I1127 07:16:19.980727 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4a767b03-47f5-4cf2-b573-f30e3aa922b5","Type":"ContainerDied","Data":"90758f96d363ce7f90865abbaa6ed510ec7f2279d57b85d2451ba4860bd5e8dd"} Nov 27 07:16:21 crc kubenswrapper[4971]: I1127 07:16:21.418111 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="4a767b03-47f5-4cf2-b573-f30e3aa922b5" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.162:3000/\": dial tcp 10.217.0.162:3000: connect: connection refused" Nov 27 07:16:21 crc kubenswrapper[4971]: I1127 07:16:21.649807 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 27 07:16:21 crc kubenswrapper[4971]: I1127 07:16:21.650266 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="02f2a8a6-cfc5-4973-9f54-e4647a8e4552" containerName="glance-log" containerID="cri-o://77ce808b9ead19bdd862809257befb60bc498510116c4ba5427f504d52563215" gracePeriod=30 Nov 27 07:16:21 crc kubenswrapper[4971]: I1127 07:16:21.650499 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="02f2a8a6-cfc5-4973-9f54-e4647a8e4552" containerName="glance-httpd" containerID="cri-o://295d83fb85f340aebaee5fe652faaafa4bfaa16381ff8087ec2330b20ed3fd1d" gracePeriod=30 Nov 27 07:16:21 crc kubenswrapper[4971]: I1127 07:16:21.803120 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 27 07:16:21 crc kubenswrapper[4971]: I1127 07:16:21.803619 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="95491a98-26dd-4fe9-95a8-09e1c3f7d3f6" containerName="kube-state-metrics" containerID="cri-o://5f69ff59df757c223b52667faed26c43c2f9179d089f63d894199a3c579b0475" gracePeriod=30 Nov 27 07:16:21 crc kubenswrapper[4971]: I1127 07:16:21.835591 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/kube-state-metrics-0" podUID="95491a98-26dd-4fe9-95a8-09e1c3f7d3f6" containerName="kube-state-metrics" probeResult="failure" output="Get \"http://10.217.0.105:8081/readyz\": dial tcp 10.217.0.105:8081: connect: connection refused" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.004170 4971 generic.go:334] "Generic (PLEG): container finished" podID="95491a98-26dd-4fe9-95a8-09e1c3f7d3f6" containerID="5f69ff59df757c223b52667faed26c43c2f9179d089f63d894199a3c579b0475" exitCode=2 Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.004246 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"95491a98-26dd-4fe9-95a8-09e1c3f7d3f6","Type":"ContainerDied","Data":"5f69ff59df757c223b52667faed26c43c2f9179d089f63d894199a3c579b0475"} Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.008199 4971 generic.go:334] "Generic (PLEG): container finished" podID="02f2a8a6-cfc5-4973-9f54-e4647a8e4552" containerID="77ce808b9ead19bdd862809257befb60bc498510116c4ba5427f504d52563215" exitCode=143 Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.008293 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"02f2a8a6-cfc5-4973-9f54-e4647a8e4552","Type":"ContainerDied","Data":"77ce808b9ead19bdd862809257befb60bc498510116c4ba5427f504d52563215"} Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.041304 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-b4zt8"] Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.043287 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-b4zt8" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.052307 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-b4zt8"] Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.138498 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-glkvs\" (UniqueName: \"kubernetes.io/projected/eb3b1410-ced5-4047-8aee-eb6e28fd5fb5-kube-api-access-glkvs\") pod \"nova-api-db-create-b4zt8\" (UID: \"eb3b1410-ced5-4047-8aee-eb6e28fd5fb5\") " pod="openstack/nova-api-db-create-b4zt8" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.138629 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb3b1410-ced5-4047-8aee-eb6e28fd5fb5-operator-scripts\") pod \"nova-api-db-create-b4zt8\" (UID: \"eb3b1410-ced5-4047-8aee-eb6e28fd5fb5\") " pod="openstack/nova-api-db-create-b4zt8" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.198599 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-6022-account-create-update-hwnvl"] Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.200273 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-6022-account-create-update-hwnvl" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.205165 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.240481 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-glkvs\" (UniqueName: \"kubernetes.io/projected/eb3b1410-ced5-4047-8aee-eb6e28fd5fb5-kube-api-access-glkvs\") pod \"nova-api-db-create-b4zt8\" (UID: \"eb3b1410-ced5-4047-8aee-eb6e28fd5fb5\") " pod="openstack/nova-api-db-create-b4zt8" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.240906 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb3b1410-ced5-4047-8aee-eb6e28fd5fb5-operator-scripts\") pod \"nova-api-db-create-b4zt8\" (UID: \"eb3b1410-ced5-4047-8aee-eb6e28fd5fb5\") " pod="openstack/nova-api-db-create-b4zt8" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.240967 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59x2g\" (UniqueName: \"kubernetes.io/projected/f379bced-5e57-4106-86b4-437cf8bce9b4-kube-api-access-59x2g\") pod \"nova-api-6022-account-create-update-hwnvl\" (UID: \"f379bced-5e57-4106-86b4-437cf8bce9b4\") " pod="openstack/nova-api-6022-account-create-update-hwnvl" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.240998 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f379bced-5e57-4106-86b4-437cf8bce9b4-operator-scripts\") pod \"nova-api-6022-account-create-update-hwnvl\" (UID: \"f379bced-5e57-4106-86b4-437cf8bce9b4\") " pod="openstack/nova-api-6022-account-create-update-hwnvl" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.242372 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb3b1410-ced5-4047-8aee-eb6e28fd5fb5-operator-scripts\") pod \"nova-api-db-create-b4zt8\" (UID: \"eb3b1410-ced5-4047-8aee-eb6e28fd5fb5\") " pod="openstack/nova-api-db-create-b4zt8" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.276578 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-6022-account-create-update-hwnvl"] Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.283096 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-glkvs\" (UniqueName: \"kubernetes.io/projected/eb3b1410-ced5-4047-8aee-eb6e28fd5fb5-kube-api-access-glkvs\") pod \"nova-api-db-create-b4zt8\" (UID: \"eb3b1410-ced5-4047-8aee-eb6e28fd5fb5\") " pod="openstack/nova-api-db-create-b4zt8" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.298116 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-vpqrm"] Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.299928 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-vpqrm" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.319738 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-vpqrm"] Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.344882 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f379bced-5e57-4106-86b4-437cf8bce9b4-operator-scripts\") pod \"nova-api-6022-account-create-update-hwnvl\" (UID: \"f379bced-5e57-4106-86b4-437cf8bce9b4\") " pod="openstack/nova-api-6022-account-create-update-hwnvl" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.345243 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bf3a9110-6490-49f0-9bcc-bbd3bd595d58-operator-scripts\") pod \"nova-cell0-db-create-vpqrm\" (UID: \"bf3a9110-6490-49f0-9bcc-bbd3bd595d58\") " pod="openstack/nova-cell0-db-create-vpqrm" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.345419 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ksvnd\" (UniqueName: \"kubernetes.io/projected/bf3a9110-6490-49f0-9bcc-bbd3bd595d58-kube-api-access-ksvnd\") pod \"nova-cell0-db-create-vpqrm\" (UID: \"bf3a9110-6490-49f0-9bcc-bbd3bd595d58\") " pod="openstack/nova-cell0-db-create-vpqrm" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.345598 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59x2g\" (UniqueName: \"kubernetes.io/projected/f379bced-5e57-4106-86b4-437cf8bce9b4-kube-api-access-59x2g\") pod \"nova-api-6022-account-create-update-hwnvl\" (UID: \"f379bced-5e57-4106-86b4-437cf8bce9b4\") " pod="openstack/nova-api-6022-account-create-update-hwnvl" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.345998 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f379bced-5e57-4106-86b4-437cf8bce9b4-operator-scripts\") pod \"nova-api-6022-account-create-update-hwnvl\" (UID: \"f379bced-5e57-4106-86b4-437cf8bce9b4\") " pod="openstack/nova-api-6022-account-create-update-hwnvl" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.402989 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-b4zt8" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.407598 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-91e4-account-create-update-qxzp8"] Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.409247 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-91e4-account-create-update-qxzp8" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.414507 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.428637 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-91e4-account-create-update-qxzp8"] Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.429996 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59x2g\" (UniqueName: \"kubernetes.io/projected/f379bced-5e57-4106-86b4-437cf8bce9b4-kube-api-access-59x2g\") pod \"nova-api-6022-account-create-update-hwnvl\" (UID: \"f379bced-5e57-4106-86b4-437cf8bce9b4\") " pod="openstack/nova-api-6022-account-create-update-hwnvl" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.454065 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bf3a9110-6490-49f0-9bcc-bbd3bd595d58-operator-scripts\") pod \"nova-cell0-db-create-vpqrm\" (UID: \"bf3a9110-6490-49f0-9bcc-bbd3bd595d58\") " pod="openstack/nova-cell0-db-create-vpqrm" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.478843 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ksvnd\" (UniqueName: \"kubernetes.io/projected/bf3a9110-6490-49f0-9bcc-bbd3bd595d58-kube-api-access-ksvnd\") pod \"nova-cell0-db-create-vpqrm\" (UID: \"bf3a9110-6490-49f0-9bcc-bbd3bd595d58\") " pod="openstack/nova-cell0-db-create-vpqrm" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.479979 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fc08a45a-a721-4947-97d4-1592a8547091-operator-scripts\") pod \"nova-cell0-91e4-account-create-update-qxzp8\" (UID: \"fc08a45a-a721-4947-97d4-1592a8547091\") " pod="openstack/nova-cell0-91e4-account-create-update-qxzp8" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.458126 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bf3a9110-6490-49f0-9bcc-bbd3bd595d58-operator-scripts\") pod \"nova-cell0-db-create-vpqrm\" (UID: \"bf3a9110-6490-49f0-9bcc-bbd3bd595d58\") " pod="openstack/nova-cell0-db-create-vpqrm" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.482039 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7s6nf\" (UniqueName: \"kubernetes.io/projected/fc08a45a-a721-4947-97d4-1592a8547091-kube-api-access-7s6nf\") pod \"nova-cell0-91e4-account-create-update-qxzp8\" (UID: \"fc08a45a-a721-4947-97d4-1592a8547091\") " pod="openstack/nova-cell0-91e4-account-create-update-qxzp8" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.495604 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-p4742"] Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.496988 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-p4742" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.516176 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ksvnd\" (UniqueName: \"kubernetes.io/projected/bf3a9110-6490-49f0-9bcc-bbd3bd595d58-kube-api-access-ksvnd\") pod \"nova-cell0-db-create-vpqrm\" (UID: \"bf3a9110-6490-49f0-9bcc-bbd3bd595d58\") " pod="openstack/nova-cell0-db-create-vpqrm" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.522114 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-6022-account-create-update-hwnvl" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.529389 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-p4742"] Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.590907 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-884zm\" (UniqueName: \"kubernetes.io/projected/66f97b8c-cee1-4869-b034-c66f2830b14e-kube-api-access-884zm\") pod \"nova-cell1-db-create-p4742\" (UID: \"66f97b8c-cee1-4869-b034-c66f2830b14e\") " pod="openstack/nova-cell1-db-create-p4742" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.591052 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fc08a45a-a721-4947-97d4-1592a8547091-operator-scripts\") pod \"nova-cell0-91e4-account-create-update-qxzp8\" (UID: \"fc08a45a-a721-4947-97d4-1592a8547091\") " pod="openstack/nova-cell0-91e4-account-create-update-qxzp8" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.591113 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66f97b8c-cee1-4869-b034-c66f2830b14e-operator-scripts\") pod \"nova-cell1-db-create-p4742\" (UID: \"66f97b8c-cee1-4869-b034-c66f2830b14e\") " pod="openstack/nova-cell1-db-create-p4742" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.591177 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7s6nf\" (UniqueName: \"kubernetes.io/projected/fc08a45a-a721-4947-97d4-1592a8547091-kube-api-access-7s6nf\") pod \"nova-cell0-91e4-account-create-update-qxzp8\" (UID: \"fc08a45a-a721-4947-97d4-1592a8547091\") " pod="openstack/nova-cell0-91e4-account-create-update-qxzp8" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.606749 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fc08a45a-a721-4947-97d4-1592a8547091-operator-scripts\") pod \"nova-cell0-91e4-account-create-update-qxzp8\" (UID: \"fc08a45a-a721-4947-97d4-1592a8547091\") " pod="openstack/nova-cell0-91e4-account-create-update-qxzp8" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.623067 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-6991-account-create-update-flxv8"] Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.624422 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-6991-account-create-update-flxv8" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.626037 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7s6nf\" (UniqueName: \"kubernetes.io/projected/fc08a45a-a721-4947-97d4-1592a8547091-kube-api-access-7s6nf\") pod \"nova-cell0-91e4-account-create-update-qxzp8\" (UID: \"fc08a45a-a721-4947-97d4-1592a8547091\") " pod="openstack/nova-cell0-91e4-account-create-update-qxzp8" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.626807 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.627821 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-6991-account-create-update-flxv8"] Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.678145 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-vpqrm" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.694869 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/10d9f99e-353e-4988-a85f-0173312c0a24-operator-scripts\") pod \"nova-cell1-6991-account-create-update-flxv8\" (UID: \"10d9f99e-353e-4988-a85f-0173312c0a24\") " pod="openstack/nova-cell1-6991-account-create-update-flxv8" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.694958 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7zpvx\" (UniqueName: \"kubernetes.io/projected/10d9f99e-353e-4988-a85f-0173312c0a24-kube-api-access-7zpvx\") pod \"nova-cell1-6991-account-create-update-flxv8\" (UID: \"10d9f99e-353e-4988-a85f-0173312c0a24\") " pod="openstack/nova-cell1-6991-account-create-update-flxv8" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.695059 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-884zm\" (UniqueName: \"kubernetes.io/projected/66f97b8c-cee1-4869-b034-c66f2830b14e-kube-api-access-884zm\") pod \"nova-cell1-db-create-p4742\" (UID: \"66f97b8c-cee1-4869-b034-c66f2830b14e\") " pod="openstack/nova-cell1-db-create-p4742" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.695421 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66f97b8c-cee1-4869-b034-c66f2830b14e-operator-scripts\") pod \"nova-cell1-db-create-p4742\" (UID: \"66f97b8c-cee1-4869-b034-c66f2830b14e\") " pod="openstack/nova-cell1-db-create-p4742" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.696345 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66f97b8c-cee1-4869-b034-c66f2830b14e-operator-scripts\") pod \"nova-cell1-db-create-p4742\" (UID: \"66f97b8c-cee1-4869-b034-c66f2830b14e\") " pod="openstack/nova-cell1-db-create-p4742" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.715142 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-884zm\" (UniqueName: \"kubernetes.io/projected/66f97b8c-cee1-4869-b034-c66f2830b14e-kube-api-access-884zm\") pod \"nova-cell1-db-create-p4742\" (UID: \"66f97b8c-cee1-4869-b034-c66f2830b14e\") " pod="openstack/nova-cell1-db-create-p4742" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.787791 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-91e4-account-create-update-qxzp8" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.797670 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/10d9f99e-353e-4988-a85f-0173312c0a24-operator-scripts\") pod \"nova-cell1-6991-account-create-update-flxv8\" (UID: \"10d9f99e-353e-4988-a85f-0173312c0a24\") " pod="openstack/nova-cell1-6991-account-create-update-flxv8" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.797731 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7zpvx\" (UniqueName: \"kubernetes.io/projected/10d9f99e-353e-4988-a85f-0173312c0a24-kube-api-access-7zpvx\") pod \"nova-cell1-6991-account-create-update-flxv8\" (UID: \"10d9f99e-353e-4988-a85f-0173312c0a24\") " pod="openstack/nova-cell1-6991-account-create-update-flxv8" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.798333 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/10d9f99e-353e-4988-a85f-0173312c0a24-operator-scripts\") pod \"nova-cell1-6991-account-create-update-flxv8\" (UID: \"10d9f99e-353e-4988-a85f-0173312c0a24\") " pod="openstack/nova-cell1-6991-account-create-update-flxv8" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.814652 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7zpvx\" (UniqueName: \"kubernetes.io/projected/10d9f99e-353e-4988-a85f-0173312c0a24-kube-api-access-7zpvx\") pod \"nova-cell1-6991-account-create-update-flxv8\" (UID: \"10d9f99e-353e-4988-a85f-0173312c0a24\") " pod="openstack/nova-cell1-6991-account-create-update-flxv8" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.909622 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-p4742" Nov 27 07:16:22 crc kubenswrapper[4971]: I1127 07:16:22.978282 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-6991-account-create-update-flxv8" Nov 27 07:16:23 crc kubenswrapper[4971]: I1127 07:16:23.007428 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 27 07:16:23 crc kubenswrapper[4971]: I1127 07:16:23.008925 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="8cbb2c33-b8a0-4220-916a-ea0a8d738755" containerName="glance-log" containerID="cri-o://aa0a8caacb07764852e7600c71557c1dc12f7c2b4791c9c93c1bf945ac233ecc" gracePeriod=30 Nov 27 07:16:23 crc kubenswrapper[4971]: I1127 07:16:23.009499 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="8cbb2c33-b8a0-4220-916a-ea0a8d738755" containerName="glance-httpd" containerID="cri-o://1cfd5131dc75f97aea2ccb4abbcadf7829f00887f56b2a68f2cc459a21f9b7d0" gracePeriod=30 Nov 27 07:16:23 crc kubenswrapper[4971]: I1127 07:16:23.240379 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 27 07:16:23 crc kubenswrapper[4971]: I1127 07:16:23.308590 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g6mrh\" (UniqueName: \"kubernetes.io/projected/95491a98-26dd-4fe9-95a8-09e1c3f7d3f6-kube-api-access-g6mrh\") pod \"95491a98-26dd-4fe9-95a8-09e1c3f7d3f6\" (UID: \"95491a98-26dd-4fe9-95a8-09e1c3f7d3f6\") " Nov 27 07:16:23 crc kubenswrapper[4971]: I1127 07:16:23.343857 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95491a98-26dd-4fe9-95a8-09e1c3f7d3f6-kube-api-access-g6mrh" (OuterVolumeSpecName: "kube-api-access-g6mrh") pod "95491a98-26dd-4fe9-95a8-09e1c3f7d3f6" (UID: "95491a98-26dd-4fe9-95a8-09e1c3f7d3f6"). InnerVolumeSpecName "kube-api-access-g6mrh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:16:23 crc kubenswrapper[4971]: I1127 07:16:23.410592 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g6mrh\" (UniqueName: \"kubernetes.io/projected/95491a98-26dd-4fe9-95a8-09e1c3f7d3f6-kube-api-access-g6mrh\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:23 crc kubenswrapper[4971]: I1127 07:16:23.493235 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 07:16:23 crc kubenswrapper[4971]: I1127 07:16:23.592921 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-91e4-account-create-update-qxzp8"] Nov 27 07:16:23 crc kubenswrapper[4971]: I1127 07:16:23.615567 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8wfts\" (UniqueName: \"kubernetes.io/projected/4a767b03-47f5-4cf2-b573-f30e3aa922b5-kube-api-access-8wfts\") pod \"4a767b03-47f5-4cf2-b573-f30e3aa922b5\" (UID: \"4a767b03-47f5-4cf2-b573-f30e3aa922b5\") " Nov 27 07:16:23 crc kubenswrapper[4971]: I1127 07:16:23.615689 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4a767b03-47f5-4cf2-b573-f30e3aa922b5-log-httpd\") pod \"4a767b03-47f5-4cf2-b573-f30e3aa922b5\" (UID: \"4a767b03-47f5-4cf2-b573-f30e3aa922b5\") " Nov 27 07:16:23 crc kubenswrapper[4971]: I1127 07:16:23.615758 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a767b03-47f5-4cf2-b573-f30e3aa922b5-combined-ca-bundle\") pod \"4a767b03-47f5-4cf2-b573-f30e3aa922b5\" (UID: \"4a767b03-47f5-4cf2-b573-f30e3aa922b5\") " Nov 27 07:16:23 crc kubenswrapper[4971]: I1127 07:16:23.615861 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a767b03-47f5-4cf2-b573-f30e3aa922b5-config-data\") pod \"4a767b03-47f5-4cf2-b573-f30e3aa922b5\" (UID: \"4a767b03-47f5-4cf2-b573-f30e3aa922b5\") " Nov 27 07:16:23 crc kubenswrapper[4971]: I1127 07:16:23.615964 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4a767b03-47f5-4cf2-b573-f30e3aa922b5-scripts\") pod \"4a767b03-47f5-4cf2-b573-f30e3aa922b5\" (UID: \"4a767b03-47f5-4cf2-b573-f30e3aa922b5\") " Nov 27 07:16:23 crc kubenswrapper[4971]: I1127 07:16:23.616010 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4a767b03-47f5-4cf2-b573-f30e3aa922b5-run-httpd\") pod \"4a767b03-47f5-4cf2-b573-f30e3aa922b5\" (UID: \"4a767b03-47f5-4cf2-b573-f30e3aa922b5\") " Nov 27 07:16:23 crc kubenswrapper[4971]: I1127 07:16:23.616037 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4a767b03-47f5-4cf2-b573-f30e3aa922b5-sg-core-conf-yaml\") pod \"4a767b03-47f5-4cf2-b573-f30e3aa922b5\" (UID: \"4a767b03-47f5-4cf2-b573-f30e3aa922b5\") " Nov 27 07:16:23 crc kubenswrapper[4971]: I1127 07:16:23.616830 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a767b03-47f5-4cf2-b573-f30e3aa922b5-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "4a767b03-47f5-4cf2-b573-f30e3aa922b5" (UID: "4a767b03-47f5-4cf2-b573-f30e3aa922b5"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:16:23 crc kubenswrapper[4971]: I1127 07:16:23.617711 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a767b03-47f5-4cf2-b573-f30e3aa922b5-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "4a767b03-47f5-4cf2-b573-f30e3aa922b5" (UID: "4a767b03-47f5-4cf2-b573-f30e3aa922b5"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:16:23 crc kubenswrapper[4971]: I1127 07:16:23.633214 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a767b03-47f5-4cf2-b573-f30e3aa922b5-scripts" (OuterVolumeSpecName: "scripts") pod "4a767b03-47f5-4cf2-b573-f30e3aa922b5" (UID: "4a767b03-47f5-4cf2-b573-f30e3aa922b5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:23 crc kubenswrapper[4971]: I1127 07:16:23.633239 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a767b03-47f5-4cf2-b573-f30e3aa922b5-kube-api-access-8wfts" (OuterVolumeSpecName: "kube-api-access-8wfts") pod "4a767b03-47f5-4cf2-b573-f30e3aa922b5" (UID: "4a767b03-47f5-4cf2-b573-f30e3aa922b5"). InnerVolumeSpecName "kube-api-access-8wfts". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:16:23 crc kubenswrapper[4971]: I1127 07:16:23.664180 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a767b03-47f5-4cf2-b573-f30e3aa922b5-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "4a767b03-47f5-4cf2-b573-f30e3aa922b5" (UID: "4a767b03-47f5-4cf2-b573-f30e3aa922b5"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:23 crc kubenswrapper[4971]: I1127 07:16:23.719221 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4a767b03-47f5-4cf2-b573-f30e3aa922b5-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:23 crc kubenswrapper[4971]: I1127 07:16:23.719255 4971 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4a767b03-47f5-4cf2-b573-f30e3aa922b5-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:23 crc kubenswrapper[4971]: I1127 07:16:23.719270 4971 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4a767b03-47f5-4cf2-b573-f30e3aa922b5-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:23 crc kubenswrapper[4971]: I1127 07:16:23.719282 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8wfts\" (UniqueName: \"kubernetes.io/projected/4a767b03-47f5-4cf2-b573-f30e3aa922b5-kube-api-access-8wfts\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:23 crc kubenswrapper[4971]: I1127 07:16:23.719291 4971 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4a767b03-47f5-4cf2-b573-f30e3aa922b5-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:23 crc kubenswrapper[4971]: I1127 07:16:23.744109 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-vpqrm"] Nov 27 07:16:23 crc kubenswrapper[4971]: W1127 07:16:23.751017 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbf3a9110_6490_49f0_9bcc_bbd3bd595d58.slice/crio-1a71cffb604fdba8ec6e068fb701dca89293bd76382123f8cbcbe9cc8660e96d WatchSource:0}: Error finding container 1a71cffb604fdba8ec6e068fb701dca89293bd76382123f8cbcbe9cc8660e96d: Status 404 returned error can't find the container with id 1a71cffb604fdba8ec6e068fb701dca89293bd76382123f8cbcbe9cc8660e96d Nov 27 07:16:23 crc kubenswrapper[4971]: I1127 07:16:23.758499 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a767b03-47f5-4cf2-b573-f30e3aa922b5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4a767b03-47f5-4cf2-b573-f30e3aa922b5" (UID: "4a767b03-47f5-4cf2-b573-f30e3aa922b5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:23 crc kubenswrapper[4971]: I1127 07:16:23.766817 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-b4zt8"] Nov 27 07:16:23 crc kubenswrapper[4971]: I1127 07:16:23.795836 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a767b03-47f5-4cf2-b573-f30e3aa922b5-config-data" (OuterVolumeSpecName: "config-data") pod "4a767b03-47f5-4cf2-b573-f30e3aa922b5" (UID: "4a767b03-47f5-4cf2-b573-f30e3aa922b5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:23 crc kubenswrapper[4971]: I1127 07:16:23.823004 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a767b03-47f5-4cf2-b573-f30e3aa922b5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:23 crc kubenswrapper[4971]: I1127 07:16:23.823057 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a767b03-47f5-4cf2-b573-f30e3aa922b5-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:23 crc kubenswrapper[4971]: I1127 07:16:23.890725 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-6022-account-create-update-hwnvl"] Nov 27 07:16:23 crc kubenswrapper[4971]: W1127 07:16:23.894068 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf379bced_5e57_4106_86b4_437cf8bce9b4.slice/crio-322c0902e687c51e7822adfc23299e21fb17afbb3b31906568102f43f9ff23df WatchSource:0}: Error finding container 322c0902e687c51e7822adfc23299e21fb17afbb3b31906568102f43f9ff23df: Status 404 returned error can't find the container with id 322c0902e687c51e7822adfc23299e21fb17afbb3b31906568102f43f9ff23df Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.005437 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-6991-account-create-update-flxv8"] Nov 27 07:16:24 crc kubenswrapper[4971]: W1127 07:16:24.022830 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod66f97b8c_cee1_4869_b034_c66f2830b14e.slice/crio-b588086e01fecb7c4f1eeb3abbe1d3a3bbec4d950d49548a9948d24b1379c13d WatchSource:0}: Error finding container b588086e01fecb7c4f1eeb3abbe1d3a3bbec4d950d49548a9948d24b1379c13d: Status 404 returned error can't find the container with id b588086e01fecb7c4f1eeb3abbe1d3a3bbec4d950d49548a9948d24b1379c13d Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.030716 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-p4742"] Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.053911 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"95491a98-26dd-4fe9-95a8-09e1c3f7d3f6","Type":"ContainerDied","Data":"bd52c95c5ffc86358bbf0fa7678fb985bf8fe272f9ce498f25ceb2043a80dd1b"} Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.054028 4971 scope.go:117] "RemoveContainer" containerID="5f69ff59df757c223b52667faed26c43c2f9179d089f63d894199a3c579b0475" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.054207 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.061409 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"36d86c04-b5e4-4334-832b-10953c1d0b1d","Type":"ContainerStarted","Data":"605fc6ed04781d6ae79576290eb45958f2fcc0fff09021399050e8cc200f2306"} Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.071696 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-b4zt8" event={"ID":"eb3b1410-ced5-4047-8aee-eb6e28fd5fb5","Type":"ContainerStarted","Data":"f8c04a3373e562bc809e9e4fc35383c8fef8faa16f99ddf1854711cb4b91495c"} Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.071840 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-b4zt8" event={"ID":"eb3b1410-ced5-4047-8aee-eb6e28fd5fb5","Type":"ContainerStarted","Data":"a700186b7a38a3f198188d40d846503bc2bfd278ff9d7510f573b7d73d7da463"} Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.108494 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.420345615 podStartE2EDuration="13.108472383s" podCreationTimestamp="2025-11-27 07:16:11 +0000 UTC" firstStartedPulling="2025-11-27 07:16:12.391842338 +0000 UTC m=+1410.583886256" lastFinishedPulling="2025-11-27 07:16:23.079969106 +0000 UTC m=+1421.272013024" observedRunningTime="2025-11-27 07:16:24.084126257 +0000 UTC m=+1422.276170175" watchObservedRunningTime="2025-11-27 07:16:24.108472383 +0000 UTC m=+1422.300516301" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.113405 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-91e4-account-create-update-qxzp8" event={"ID":"fc08a45a-a721-4947-97d4-1592a8547091","Type":"ContainerStarted","Data":"19a65f12d0fa7695d3edd4168e5a8ef629cdc60be7faa6681d12e8d04a8cf497"} Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.113456 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-91e4-account-create-update-qxzp8" event={"ID":"fc08a45a-a721-4947-97d4-1592a8547091","Type":"ContainerStarted","Data":"fb377194116965acfb5aa959d204d4a8718f7506c8da5a352538c5706ad9f0bf"} Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.145843 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4a767b03-47f5-4cf2-b573-f30e3aa922b5","Type":"ContainerDied","Data":"7f3bb04a37ee11808c4d59f2a925ac76e9a36af845b03328e0d04568125bc3e8"} Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.145871 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.147042 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.161491 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.165058 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-db-create-b4zt8" podStartSLOduration=2.165040078 podStartE2EDuration="2.165040078s" podCreationTimestamp="2025-11-27 07:16:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:16:24.124375682 +0000 UTC m=+1422.316419590" watchObservedRunningTime="2025-11-27 07:16:24.165040078 +0000 UTC m=+1422.357083996" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.167084 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-6991-account-create-update-flxv8" event={"ID":"10d9f99e-353e-4988-a85f-0173312c0a24","Type":"ContainerStarted","Data":"674a438191e1af4ab93630e1e36a381092a72696dad46af52de98a84e39b444e"} Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.168110 4971 scope.go:117] "RemoveContainer" containerID="8401472fdb05ee764b52e1e4288e43d7d114c36a189f7cff80d4d75bbc639485" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.195487 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-vpqrm" event={"ID":"bf3a9110-6490-49f0-9bcc-bbd3bd595d58","Type":"ContainerStarted","Data":"61d2130ec7c4d80ade741707d5c73217fac765808c16c9e87c4e84949f761922"} Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.195562 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-vpqrm" event={"ID":"bf3a9110-6490-49f0-9bcc-bbd3bd595d58","Type":"ContainerStarted","Data":"1a71cffb604fdba8ec6e068fb701dca89293bd76382123f8cbcbe9cc8660e96d"} Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.197134 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Nov 27 07:16:24 crc kubenswrapper[4971]: E1127 07:16:24.198258 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a767b03-47f5-4cf2-b573-f30e3aa922b5" containerName="proxy-httpd" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.198286 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a767b03-47f5-4cf2-b573-f30e3aa922b5" containerName="proxy-httpd" Nov 27 07:16:24 crc kubenswrapper[4971]: E1127 07:16:24.198317 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a767b03-47f5-4cf2-b573-f30e3aa922b5" containerName="sg-core" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.198325 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a767b03-47f5-4cf2-b573-f30e3aa922b5" containerName="sg-core" Nov 27 07:16:24 crc kubenswrapper[4971]: E1127 07:16:24.198349 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a767b03-47f5-4cf2-b573-f30e3aa922b5" containerName="ceilometer-notification-agent" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.198357 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a767b03-47f5-4cf2-b573-f30e3aa922b5" containerName="ceilometer-notification-agent" Nov 27 07:16:24 crc kubenswrapper[4971]: E1127 07:16:24.198389 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95491a98-26dd-4fe9-95a8-09e1c3f7d3f6" containerName="kube-state-metrics" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.198398 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="95491a98-26dd-4fe9-95a8-09e1c3f7d3f6" containerName="kube-state-metrics" Nov 27 07:16:24 crc kubenswrapper[4971]: E1127 07:16:24.198449 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a767b03-47f5-4cf2-b573-f30e3aa922b5" containerName="ceilometer-central-agent" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.198457 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a767b03-47f5-4cf2-b573-f30e3aa922b5" containerName="ceilometer-central-agent" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.204264 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="95491a98-26dd-4fe9-95a8-09e1c3f7d3f6" containerName="kube-state-metrics" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.204305 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a767b03-47f5-4cf2-b573-f30e3aa922b5" containerName="proxy-httpd" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.204333 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a767b03-47f5-4cf2-b573-f30e3aa922b5" containerName="ceilometer-notification-agent" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.204344 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a767b03-47f5-4cf2-b573-f30e3aa922b5" containerName="ceilometer-central-agent" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.204377 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a767b03-47f5-4cf2-b573-f30e3aa922b5" containerName="sg-core" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.205287 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.207772 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-9x6zw" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.210283 4971 generic.go:334] "Generic (PLEG): container finished" podID="8cbb2c33-b8a0-4220-916a-ea0a8d738755" containerID="aa0a8caacb07764852e7600c71557c1dc12f7c2b4791c9c93c1bf945ac233ecc" exitCode=143 Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.210381 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8cbb2c33-b8a0-4220-916a-ea0a8d738755","Type":"ContainerDied","Data":"aa0a8caacb07764852e7600c71557c1dc12f7c2b4791c9c93c1bf945ac233ecc"} Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.212643 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.212937 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.226235 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-p4742" event={"ID":"66f97b8c-cee1-4869-b034-c66f2830b14e","Type":"ContainerStarted","Data":"b588086e01fecb7c4f1eeb3abbe1d3a3bbec4d950d49548a9948d24b1379c13d"} Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.229919 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-91e4-account-create-update-qxzp8" podStartSLOduration=2.229886896 podStartE2EDuration="2.229886896s" podCreationTimestamp="2025-11-27 07:16:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:16:24.139049828 +0000 UTC m=+1422.331093746" watchObservedRunningTime="2025-11-27 07:16:24.229886896 +0000 UTC m=+1422.421930814" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.247709 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-6022-account-create-update-hwnvl" event={"ID":"f379bced-5e57-4106-86b4-437cf8bce9b4","Type":"ContainerStarted","Data":"322c0902e687c51e7822adfc23299e21fb17afbb3b31906568102f43f9ff23df"} Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.278586 4971 scope.go:117] "RemoveContainer" containerID="4826b19de310d9c87ffd3ed8e71d8044651260f608a892b79b4f24eecd62924a" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.285974 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.294354 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-db-create-vpqrm" podStartSLOduration=2.2943355739999998 podStartE2EDuration="2.294335574s" podCreationTimestamp="2025-11-27 07:16:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:16:24.213174876 +0000 UTC m=+1422.405218794" watchObservedRunningTime="2025-11-27 07:16:24.294335574 +0000 UTC m=+1422.486379492" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.327890 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.336524 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.347044 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2b1d941-1738-4967-a97c-6a7b2c36531c-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"a2b1d941-1738-4967-a97c-6a7b2c36531c\") " pod="openstack/kube-state-metrics-0" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.347193 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/a2b1d941-1738-4967-a97c-6a7b2c36531c-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"a2b1d941-1738-4967-a97c-6a7b2c36531c\") " pod="openstack/kube-state-metrics-0" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.347271 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2b1d941-1738-4967-a97c-6a7b2c36531c-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"a2b1d941-1738-4967-a97c-6a7b2c36531c\") " pod="openstack/kube-state-metrics-0" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.347312 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mcr6v\" (UniqueName: \"kubernetes.io/projected/a2b1d941-1738-4967-a97c-6a7b2c36531c-kube-api-access-mcr6v\") pod \"kube-state-metrics-0\" (UID: \"a2b1d941-1738-4967-a97c-6a7b2c36531c\") " pod="openstack/kube-state-metrics-0" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.347298 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-6022-account-create-update-hwnvl" podStartSLOduration=2.347272691 podStartE2EDuration="2.347272691s" podCreationTimestamp="2025-11-27 07:16:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:16:24.272441214 +0000 UTC m=+1422.464485132" watchObservedRunningTime="2025-11-27 07:16:24.347272691 +0000 UTC m=+1422.539316619" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.361176 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.364126 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.366330 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.372175 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.373466 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:16:24 crc kubenswrapper[4971]: E1127 07:16:24.401196 4971 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod95491a98_26dd_4fe9_95a8_09e1c3f7d3f6.slice/crio-bd52c95c5ffc86358bbf0fa7678fb985bf8fe272f9ce498f25ceb2043a80dd1b\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod95491a98_26dd_4fe9_95a8_09e1c3f7d3f6.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfc08a45a_a721_4947_97d4_1592a8547091.slice/crio-19a65f12d0fa7695d3edd4168e5a8ef629cdc60be7faa6681d12e8d04a8cf497.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4a767b03_47f5_4cf2_b573_f30e3aa922b5.slice/crio-7f3bb04a37ee11808c4d59f2a925ac76e9a36af845b03328e0d04568125bc3e8\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4a767b03_47f5_4cf2_b573_f30e3aa922b5.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfc08a45a_a721_4947_97d4_1592a8547091.slice/crio-conmon-19a65f12d0fa7695d3edd4168e5a8ef629cdc60be7faa6681d12e8d04a8cf497.scope\": RecentStats: unable to find data in memory cache]" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.449164 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/a2b1d941-1738-4967-a97c-6a7b2c36531c-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"a2b1d941-1738-4967-a97c-6a7b2c36531c\") " pod="openstack/kube-state-metrics-0" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.449218 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0c866847-6859-4897-8c84-c12182259560-log-httpd\") pod \"ceilometer-0\" (UID: \"0c866847-6859-4897-8c84-c12182259560\") " pod="openstack/ceilometer-0" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.449251 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c866847-6859-4897-8c84-c12182259560-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0c866847-6859-4897-8c84-c12182259560\") " pod="openstack/ceilometer-0" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.449280 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0c866847-6859-4897-8c84-c12182259560-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0c866847-6859-4897-8c84-c12182259560\") " pod="openstack/ceilometer-0" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.449331 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2b1d941-1738-4967-a97c-6a7b2c36531c-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"a2b1d941-1738-4967-a97c-6a7b2c36531c\") " pod="openstack/kube-state-metrics-0" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.449359 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r4p4k\" (UniqueName: \"kubernetes.io/projected/0c866847-6859-4897-8c84-c12182259560-kube-api-access-r4p4k\") pod \"ceilometer-0\" (UID: \"0c866847-6859-4897-8c84-c12182259560\") " pod="openstack/ceilometer-0" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.449383 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mcr6v\" (UniqueName: \"kubernetes.io/projected/a2b1d941-1738-4967-a97c-6a7b2c36531c-kube-api-access-mcr6v\") pod \"kube-state-metrics-0\" (UID: \"a2b1d941-1738-4967-a97c-6a7b2c36531c\") " pod="openstack/kube-state-metrics-0" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.449454 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0c866847-6859-4897-8c84-c12182259560-scripts\") pod \"ceilometer-0\" (UID: \"0c866847-6859-4897-8c84-c12182259560\") " pod="openstack/ceilometer-0" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.449475 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c866847-6859-4897-8c84-c12182259560-config-data\") pod \"ceilometer-0\" (UID: \"0c866847-6859-4897-8c84-c12182259560\") " pod="openstack/ceilometer-0" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.449492 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2b1d941-1738-4967-a97c-6a7b2c36531c-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"a2b1d941-1738-4967-a97c-6a7b2c36531c\") " pod="openstack/kube-state-metrics-0" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.449545 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0c866847-6859-4897-8c84-c12182259560-run-httpd\") pod \"ceilometer-0\" (UID: \"0c866847-6859-4897-8c84-c12182259560\") " pod="openstack/ceilometer-0" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.456060 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2b1d941-1738-4967-a97c-6a7b2c36531c-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"a2b1d941-1738-4967-a97c-6a7b2c36531c\") " pod="openstack/kube-state-metrics-0" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.457110 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2b1d941-1738-4967-a97c-6a7b2c36531c-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"a2b1d941-1738-4967-a97c-6a7b2c36531c\") " pod="openstack/kube-state-metrics-0" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.457236 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/a2b1d941-1738-4967-a97c-6a7b2c36531c-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"a2b1d941-1738-4967-a97c-6a7b2c36531c\") " pod="openstack/kube-state-metrics-0" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.470875 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mcr6v\" (UniqueName: \"kubernetes.io/projected/a2b1d941-1738-4967-a97c-6a7b2c36531c-kube-api-access-mcr6v\") pod \"kube-state-metrics-0\" (UID: \"a2b1d941-1738-4967-a97c-6a7b2c36531c\") " pod="openstack/kube-state-metrics-0" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.537036 4971 scope.go:117] "RemoveContainer" containerID="90758f96d363ce7f90865abbaa6ed510ec7f2279d57b85d2451ba4860bd5e8dd" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.537054 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.553500 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0c866847-6859-4897-8c84-c12182259560-scripts\") pod \"ceilometer-0\" (UID: \"0c866847-6859-4897-8c84-c12182259560\") " pod="openstack/ceilometer-0" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.553844 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c866847-6859-4897-8c84-c12182259560-config-data\") pod \"ceilometer-0\" (UID: \"0c866847-6859-4897-8c84-c12182259560\") " pod="openstack/ceilometer-0" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.553889 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0c866847-6859-4897-8c84-c12182259560-run-httpd\") pod \"ceilometer-0\" (UID: \"0c866847-6859-4897-8c84-c12182259560\") " pod="openstack/ceilometer-0" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.554014 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0c866847-6859-4897-8c84-c12182259560-log-httpd\") pod \"ceilometer-0\" (UID: \"0c866847-6859-4897-8c84-c12182259560\") " pod="openstack/ceilometer-0" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.554033 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c866847-6859-4897-8c84-c12182259560-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0c866847-6859-4897-8c84-c12182259560\") " pod="openstack/ceilometer-0" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.554054 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0c866847-6859-4897-8c84-c12182259560-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0c866847-6859-4897-8c84-c12182259560\") " pod="openstack/ceilometer-0" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.554096 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r4p4k\" (UniqueName: \"kubernetes.io/projected/0c866847-6859-4897-8c84-c12182259560-kube-api-access-r4p4k\") pod \"ceilometer-0\" (UID: \"0c866847-6859-4897-8c84-c12182259560\") " pod="openstack/ceilometer-0" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.554938 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0c866847-6859-4897-8c84-c12182259560-log-httpd\") pod \"ceilometer-0\" (UID: \"0c866847-6859-4897-8c84-c12182259560\") " pod="openstack/ceilometer-0" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.556005 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0c866847-6859-4897-8c84-c12182259560-run-httpd\") pod \"ceilometer-0\" (UID: \"0c866847-6859-4897-8c84-c12182259560\") " pod="openstack/ceilometer-0" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.558447 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0c866847-6859-4897-8c84-c12182259560-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0c866847-6859-4897-8c84-c12182259560\") " pod="openstack/ceilometer-0" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.560278 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c866847-6859-4897-8c84-c12182259560-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0c866847-6859-4897-8c84-c12182259560\") " pod="openstack/ceilometer-0" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.560420 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0c866847-6859-4897-8c84-c12182259560-scripts\") pod \"ceilometer-0\" (UID: \"0c866847-6859-4897-8c84-c12182259560\") " pod="openstack/ceilometer-0" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.562629 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c866847-6859-4897-8c84-c12182259560-config-data\") pod \"ceilometer-0\" (UID: \"0c866847-6859-4897-8c84-c12182259560\") " pod="openstack/ceilometer-0" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.569709 4971 scope.go:117] "RemoveContainer" containerID="e33500b78ddcfba0deb8dea898fe276e9aa9b11dc5cc25915d38d7c59aa1dff0" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.574284 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r4p4k\" (UniqueName: \"kubernetes.io/projected/0c866847-6859-4897-8c84-c12182259560-kube-api-access-r4p4k\") pod \"ceilometer-0\" (UID: \"0c866847-6859-4897-8c84-c12182259560\") " pod="openstack/ceilometer-0" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.578756 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a767b03-47f5-4cf2-b573-f30e3aa922b5" path="/var/lib/kubelet/pods/4a767b03-47f5-4cf2-b573-f30e3aa922b5/volumes" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.579442 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="95491a98-26dd-4fe9-95a8-09e1c3f7d3f6" path="/var/lib/kubelet/pods/95491a98-26dd-4fe9-95a8-09e1c3f7d3f6/volumes" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.801145 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="02f2a8a6-cfc5-4973-9f54-e4647a8e4552" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.149:9292/healthcheck\": read tcp 10.217.0.2:39684->10.217.0.149:9292: read: connection reset by peer" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.801519 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="02f2a8a6-cfc5-4973-9f54-e4647a8e4552" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.149:9292/healthcheck\": read tcp 10.217.0.2:39670->10.217.0.149:9292: read: connection reset by peer" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.840259 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.845034 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 07:16:24 crc kubenswrapper[4971]: W1127 07:16:24.879195 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda2b1d941_1738_4967_a97c_6a7b2c36531c.slice/crio-2a4188f91760c23554f5eac7dfb3f6ac1589fb2bbdd197bfbdf22259d93bade4 WatchSource:0}: Error finding container 2a4188f91760c23554f5eac7dfb3f6ac1589fb2bbdd197bfbdf22259d93bade4: Status 404 returned error can't find the container with id 2a4188f91760c23554f5eac7dfb3f6ac1589fb2bbdd197bfbdf22259d93bade4 Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.979496 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-66674dfb5f-52hcq" Nov 27 07:16:24 crc kubenswrapper[4971]: I1127 07:16:24.994894 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-66674dfb5f-52hcq" Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.138879 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.281906 4971 generic.go:334] "Generic (PLEG): container finished" podID="02f2a8a6-cfc5-4973-9f54-e4647a8e4552" containerID="295d83fb85f340aebaee5fe652faaafa4bfaa16381ff8087ec2330b20ed3fd1d" exitCode=0 Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.281980 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"02f2a8a6-cfc5-4973-9f54-e4647a8e4552","Type":"ContainerDied","Data":"295d83fb85f340aebaee5fe652faaafa4bfaa16381ff8087ec2330b20ed3fd1d"} Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.283835 4971 generic.go:334] "Generic (PLEG): container finished" podID="fc08a45a-a721-4947-97d4-1592a8547091" containerID="19a65f12d0fa7695d3edd4168e5a8ef629cdc60be7faa6681d12e8d04a8cf497" exitCode=0 Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.283887 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-91e4-account-create-update-qxzp8" event={"ID":"fc08a45a-a721-4947-97d4-1592a8547091","Type":"ContainerDied","Data":"19a65f12d0fa7695d3edd4168e5a8ef629cdc60be7faa6681d12e8d04a8cf497"} Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.290006 4971 generic.go:334] "Generic (PLEG): container finished" podID="bf3a9110-6490-49f0-9bcc-bbd3bd595d58" containerID="61d2130ec7c4d80ade741707d5c73217fac765808c16c9e87c4e84949f761922" exitCode=0 Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.290126 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-vpqrm" event={"ID":"bf3a9110-6490-49f0-9bcc-bbd3bd595d58","Type":"ContainerDied","Data":"61d2130ec7c4d80ade741707d5c73217fac765808c16c9e87c4e84949f761922"} Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.291774 4971 generic.go:334] "Generic (PLEG): container finished" podID="66f97b8c-cee1-4869-b034-c66f2830b14e" containerID="7ac204abd5fcebddae836cc6aa15172601127932e17bded94537e79b8097d011" exitCode=0 Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.291843 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-p4742" event={"ID":"66f97b8c-cee1-4869-b034-c66f2830b14e","Type":"ContainerDied","Data":"7ac204abd5fcebddae836cc6aa15172601127932e17bded94537e79b8097d011"} Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.293413 4971 generic.go:334] "Generic (PLEG): container finished" podID="f379bced-5e57-4106-86b4-437cf8bce9b4" containerID="24c84e1eb9991d718d731ee29d65ce90cc0654b42f107c87f521aa3fcf209447" exitCode=0 Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.293454 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-6022-account-create-update-hwnvl" event={"ID":"f379bced-5e57-4106-86b4-437cf8bce9b4","Type":"ContainerDied","Data":"24c84e1eb9991d718d731ee29d65ce90cc0654b42f107c87f521aa3fcf209447"} Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.294872 4971 generic.go:334] "Generic (PLEG): container finished" podID="10d9f99e-353e-4988-a85f-0173312c0a24" containerID="8c44ec84f8a34017d255eeeb24780b4156d325f048da346d4c067d9aa0b36d07" exitCode=0 Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.294935 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-6991-account-create-update-flxv8" event={"ID":"10d9f99e-353e-4988-a85f-0173312c0a24","Type":"ContainerDied","Data":"8c44ec84f8a34017d255eeeb24780b4156d325f048da346d4c067d9aa0b36d07"} Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.296409 4971 generic.go:334] "Generic (PLEG): container finished" podID="eb3b1410-ced5-4047-8aee-eb6e28fd5fb5" containerID="f8c04a3373e562bc809e9e4fc35383c8fef8faa16f99ddf1854711cb4b91495c" exitCode=0 Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.296479 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-b4zt8" event={"ID":"eb3b1410-ced5-4047-8aee-eb6e28fd5fb5","Type":"ContainerDied","Data":"f8c04a3373e562bc809e9e4fc35383c8fef8faa16f99ddf1854711cb4b91495c"} Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.301266 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"a2b1d941-1738-4967-a97c-6a7b2c36531c","Type":"ContainerStarted","Data":"2a4188f91760c23554f5eac7dfb3f6ac1589fb2bbdd197bfbdf22259d93bade4"} Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.440407 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 27 07:16:25 crc kubenswrapper[4971]: W1127 07:16:25.473342 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0c866847_6859_4897_8c84_c12182259560.slice/crio-a65b4f1a0843ab40b691ae33a70d99d7a10ad6df28ce9022a56452fb487909f1 WatchSource:0}: Error finding container a65b4f1a0843ab40b691ae33a70d99d7a10ad6df28ce9022a56452fb487909f1: Status 404 returned error can't find the container with id a65b4f1a0843ab40b691ae33a70d99d7a10ad6df28ce9022a56452fb487909f1 Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.499767 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.575414 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\" (UID: \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\") " Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.575558 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-config-data\") pod \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\" (UID: \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\") " Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.575685 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-logs\") pod \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\" (UID: \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\") " Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.575821 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-combined-ca-bundle\") pod \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\" (UID: \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\") " Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.575849 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-scripts\") pod \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\" (UID: \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\") " Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.575876 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-public-tls-certs\") pod \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\" (UID: \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\") " Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.575924 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-httpd-run\") pod \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\" (UID: \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\") " Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.575960 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gkwt5\" (UniqueName: \"kubernetes.io/projected/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-kube-api-access-gkwt5\") pod \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\" (UID: \"02f2a8a6-cfc5-4973-9f54-e4647a8e4552\") " Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.577148 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-logs" (OuterVolumeSpecName: "logs") pod "02f2a8a6-cfc5-4973-9f54-e4647a8e4552" (UID: "02f2a8a6-cfc5-4973-9f54-e4647a8e4552"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.577291 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "02f2a8a6-cfc5-4973-9f54-e4647a8e4552" (UID: "02f2a8a6-cfc5-4973-9f54-e4647a8e4552"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.581654 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-scripts" (OuterVolumeSpecName: "scripts") pod "02f2a8a6-cfc5-4973-9f54-e4647a8e4552" (UID: "02f2a8a6-cfc5-4973-9f54-e4647a8e4552"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.582861 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-kube-api-access-gkwt5" (OuterVolumeSpecName: "kube-api-access-gkwt5") pod "02f2a8a6-cfc5-4973-9f54-e4647a8e4552" (UID: "02f2a8a6-cfc5-4973-9f54-e4647a8e4552"). InnerVolumeSpecName "kube-api-access-gkwt5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.585695 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "02f2a8a6-cfc5-4973-9f54-e4647a8e4552" (UID: "02f2a8a6-cfc5-4973-9f54-e4647a8e4552"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.615029 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "02f2a8a6-cfc5-4973-9f54-e4647a8e4552" (UID: "02f2a8a6-cfc5-4973-9f54-e4647a8e4552"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.637227 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "02f2a8a6-cfc5-4973-9f54-e4647a8e4552" (UID: "02f2a8a6-cfc5-4973-9f54-e4647a8e4552"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.656348 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-config-data" (OuterVolumeSpecName: "config-data") pod "02f2a8a6-cfc5-4973-9f54-e4647a8e4552" (UID: "02f2a8a6-cfc5-4973-9f54-e4647a8e4552"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.678493 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.678801 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.679190 4971 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.679292 4971 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.679376 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gkwt5\" (UniqueName: \"kubernetes.io/projected/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-kube-api-access-gkwt5\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.679482 4971 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.679588 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.679690 4971 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/02f2a8a6-cfc5-4973-9f54-e4647a8e4552-logs\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.714077 4971 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Nov 27 07:16:25 crc kubenswrapper[4971]: I1127 07:16:25.781724 4971 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.331419 4971 generic.go:334] "Generic (PLEG): container finished" podID="8cbb2c33-b8a0-4220-916a-ea0a8d738755" containerID="1cfd5131dc75f97aea2ccb4abbcadf7829f00887f56b2a68f2cc459a21f9b7d0" exitCode=0 Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.331489 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8cbb2c33-b8a0-4220-916a-ea0a8d738755","Type":"ContainerDied","Data":"1cfd5131dc75f97aea2ccb4abbcadf7829f00887f56b2a68f2cc459a21f9b7d0"} Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.333527 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"a2b1d941-1738-4967-a97c-6a7b2c36531c","Type":"ContainerStarted","Data":"b06e49f36aeb9385737ef57f67601c2cecfa59d6329f3deb519661b9fff43052"} Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.333675 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.334620 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0c866847-6859-4897-8c84-c12182259560","Type":"ContainerStarted","Data":"a65b4f1a0843ab40b691ae33a70d99d7a10ad6df28ce9022a56452fb487909f1"} Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.338715 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.339078 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"02f2a8a6-cfc5-4973-9f54-e4647a8e4552","Type":"ContainerDied","Data":"29966680f44886cf8df7f44cf45c781f5b1f7f9c1991c08deb7e4bf4901a8a69"} Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.339125 4971 scope.go:117] "RemoveContainer" containerID="295d83fb85f340aebaee5fe652faaafa4bfaa16381ff8087ec2330b20ed3fd1d" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.394725 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=1.7358789369999998 podStartE2EDuration="2.394703547s" podCreationTimestamp="2025-11-27 07:16:24 +0000 UTC" firstStartedPulling="2025-11-27 07:16:24.893785555 +0000 UTC m=+1423.085829473" lastFinishedPulling="2025-11-27 07:16:25.552610165 +0000 UTC m=+1423.744654083" observedRunningTime="2025-11-27 07:16:26.366968949 +0000 UTC m=+1424.559012887" watchObservedRunningTime="2025-11-27 07:16:26.394703547 +0000 UTC m=+1424.586747475" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.437395 4971 scope.go:117] "RemoveContainer" containerID="77ce808b9ead19bdd862809257befb60bc498510116c4ba5427f504d52563215" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.442500 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.468197 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.482518 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Nov 27 07:16:26 crc kubenswrapper[4971]: E1127 07:16:26.483167 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02f2a8a6-cfc5-4973-9f54-e4647a8e4552" containerName="glance-log" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.483183 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="02f2a8a6-cfc5-4973-9f54-e4647a8e4552" containerName="glance-log" Nov 27 07:16:26 crc kubenswrapper[4971]: E1127 07:16:26.483213 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02f2a8a6-cfc5-4973-9f54-e4647a8e4552" containerName="glance-httpd" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.483223 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="02f2a8a6-cfc5-4973-9f54-e4647a8e4552" containerName="glance-httpd" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.483476 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="02f2a8a6-cfc5-4973-9f54-e4647a8e4552" containerName="glance-httpd" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.483498 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="02f2a8a6-cfc5-4973-9f54-e4647a8e4552" containerName="glance-log" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.484787 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.492691 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.493236 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.493444 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.568064 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="02f2a8a6-cfc5-4973-9f54-e4647a8e4552" path="/var/lib/kubelet/pods/02f2a8a6-cfc5-4973-9f54-e4647a8e4552/volumes" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.597163 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1206c914-fbe7-4e8f-8470-861b0ebf75de-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"1206c914-fbe7-4e8f-8470-861b0ebf75de\") " pod="openstack/glance-default-external-api-0" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.597221 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1206c914-fbe7-4e8f-8470-861b0ebf75de-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"1206c914-fbe7-4e8f-8470-861b0ebf75de\") " pod="openstack/glance-default-external-api-0" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.597282 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58j4x\" (UniqueName: \"kubernetes.io/projected/1206c914-fbe7-4e8f-8470-861b0ebf75de-kube-api-access-58j4x\") pod \"glance-default-external-api-0\" (UID: \"1206c914-fbe7-4e8f-8470-861b0ebf75de\") " pod="openstack/glance-default-external-api-0" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.597373 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1206c914-fbe7-4e8f-8470-861b0ebf75de-logs\") pod \"glance-default-external-api-0\" (UID: \"1206c914-fbe7-4e8f-8470-861b0ebf75de\") " pod="openstack/glance-default-external-api-0" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.597395 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1206c914-fbe7-4e8f-8470-861b0ebf75de-scripts\") pod \"glance-default-external-api-0\" (UID: \"1206c914-fbe7-4e8f-8470-861b0ebf75de\") " pod="openstack/glance-default-external-api-0" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.597437 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1206c914-fbe7-4e8f-8470-861b0ebf75de-config-data\") pod \"glance-default-external-api-0\" (UID: \"1206c914-fbe7-4e8f-8470-861b0ebf75de\") " pod="openstack/glance-default-external-api-0" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.597458 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"1206c914-fbe7-4e8f-8470-861b0ebf75de\") " pod="openstack/glance-default-external-api-0" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.597477 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1206c914-fbe7-4e8f-8470-861b0ebf75de-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"1206c914-fbe7-4e8f-8470-861b0ebf75de\") " pod="openstack/glance-default-external-api-0" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.699768 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1206c914-fbe7-4e8f-8470-861b0ebf75de-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"1206c914-fbe7-4e8f-8470-861b0ebf75de\") " pod="openstack/glance-default-external-api-0" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.700185 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58j4x\" (UniqueName: \"kubernetes.io/projected/1206c914-fbe7-4e8f-8470-861b0ebf75de-kube-api-access-58j4x\") pod \"glance-default-external-api-0\" (UID: \"1206c914-fbe7-4e8f-8470-861b0ebf75de\") " pod="openstack/glance-default-external-api-0" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.700305 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1206c914-fbe7-4e8f-8470-861b0ebf75de-logs\") pod \"glance-default-external-api-0\" (UID: \"1206c914-fbe7-4e8f-8470-861b0ebf75de\") " pod="openstack/glance-default-external-api-0" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.700341 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1206c914-fbe7-4e8f-8470-861b0ebf75de-scripts\") pod \"glance-default-external-api-0\" (UID: \"1206c914-fbe7-4e8f-8470-861b0ebf75de\") " pod="openstack/glance-default-external-api-0" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.700405 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1206c914-fbe7-4e8f-8470-861b0ebf75de-config-data\") pod \"glance-default-external-api-0\" (UID: \"1206c914-fbe7-4e8f-8470-861b0ebf75de\") " pod="openstack/glance-default-external-api-0" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.700440 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"1206c914-fbe7-4e8f-8470-861b0ebf75de\") " pod="openstack/glance-default-external-api-0" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.700463 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1206c914-fbe7-4e8f-8470-861b0ebf75de-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"1206c914-fbe7-4e8f-8470-861b0ebf75de\") " pod="openstack/glance-default-external-api-0" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.700570 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1206c914-fbe7-4e8f-8470-861b0ebf75de-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"1206c914-fbe7-4e8f-8470-861b0ebf75de\") " pod="openstack/glance-default-external-api-0" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.706209 4971 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"1206c914-fbe7-4e8f-8470-861b0ebf75de\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-external-api-0" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.706662 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1206c914-fbe7-4e8f-8470-861b0ebf75de-logs\") pod \"glance-default-external-api-0\" (UID: \"1206c914-fbe7-4e8f-8470-861b0ebf75de\") " pod="openstack/glance-default-external-api-0" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.706878 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1206c914-fbe7-4e8f-8470-861b0ebf75de-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"1206c914-fbe7-4e8f-8470-861b0ebf75de\") " pod="openstack/glance-default-external-api-0" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.716953 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1206c914-fbe7-4e8f-8470-861b0ebf75de-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"1206c914-fbe7-4e8f-8470-861b0ebf75de\") " pod="openstack/glance-default-external-api-0" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.718744 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1206c914-fbe7-4e8f-8470-861b0ebf75de-scripts\") pod \"glance-default-external-api-0\" (UID: \"1206c914-fbe7-4e8f-8470-861b0ebf75de\") " pod="openstack/glance-default-external-api-0" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.718769 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1206c914-fbe7-4e8f-8470-861b0ebf75de-config-data\") pod \"glance-default-external-api-0\" (UID: \"1206c914-fbe7-4e8f-8470-861b0ebf75de\") " pod="openstack/glance-default-external-api-0" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.724795 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1206c914-fbe7-4e8f-8470-861b0ebf75de-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"1206c914-fbe7-4e8f-8470-861b0ebf75de\") " pod="openstack/glance-default-external-api-0" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.730090 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58j4x\" (UniqueName: \"kubernetes.io/projected/1206c914-fbe7-4e8f-8470-861b0ebf75de-kube-api-access-58j4x\") pod \"glance-default-external-api-0\" (UID: \"1206c914-fbe7-4e8f-8470-861b0ebf75de\") " pod="openstack/glance-default-external-api-0" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.739990 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"1206c914-fbe7-4e8f-8470-861b0ebf75de\") " pod="openstack/glance-default-external-api-0" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.830460 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.831648 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-vpqrm" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.904604 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ksvnd\" (UniqueName: \"kubernetes.io/projected/bf3a9110-6490-49f0-9bcc-bbd3bd595d58-kube-api-access-ksvnd\") pod \"bf3a9110-6490-49f0-9bcc-bbd3bd595d58\" (UID: \"bf3a9110-6490-49f0-9bcc-bbd3bd595d58\") " Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.904814 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bf3a9110-6490-49f0-9bcc-bbd3bd595d58-operator-scripts\") pod \"bf3a9110-6490-49f0-9bcc-bbd3bd595d58\" (UID: \"bf3a9110-6490-49f0-9bcc-bbd3bd595d58\") " Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.905684 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf3a9110-6490-49f0-9bcc-bbd3bd595d58-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "bf3a9110-6490-49f0-9bcc-bbd3bd595d58" (UID: "bf3a9110-6490-49f0-9bcc-bbd3bd595d58"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:16:26 crc kubenswrapper[4971]: I1127 07:16:26.917700 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf3a9110-6490-49f0-9bcc-bbd3bd595d58-kube-api-access-ksvnd" (OuterVolumeSpecName: "kube-api-access-ksvnd") pod "bf3a9110-6490-49f0-9bcc-bbd3bd595d58" (UID: "bf3a9110-6490-49f0-9bcc-bbd3bd595d58"). InnerVolumeSpecName "kube-api-access-ksvnd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.008628 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bf3a9110-6490-49f0-9bcc-bbd3bd595d58-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.008659 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ksvnd\" (UniqueName: \"kubernetes.io/projected/bf3a9110-6490-49f0-9bcc-bbd3bd595d58-kube-api-access-ksvnd\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.138596 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-6991-account-create-update-flxv8" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.150976 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-6022-account-create-update-hwnvl" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.176254 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-91e4-account-create-update-qxzp8" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.196630 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-b4zt8" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.212436 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/10d9f99e-353e-4988-a85f-0173312c0a24-operator-scripts\") pod \"10d9f99e-353e-4988-a85f-0173312c0a24\" (UID: \"10d9f99e-353e-4988-a85f-0173312c0a24\") " Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.212729 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7zpvx\" (UniqueName: \"kubernetes.io/projected/10d9f99e-353e-4988-a85f-0173312c0a24-kube-api-access-7zpvx\") pod \"10d9f99e-353e-4988-a85f-0173312c0a24\" (UID: \"10d9f99e-353e-4988-a85f-0173312c0a24\") " Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.216053 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/10d9f99e-353e-4988-a85f-0173312c0a24-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "10d9f99e-353e-4988-a85f-0173312c0a24" (UID: "10d9f99e-353e-4988-a85f-0173312c0a24"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.216973 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.221836 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10d9f99e-353e-4988-a85f-0173312c0a24-kube-api-access-7zpvx" (OuterVolumeSpecName: "kube-api-access-7zpvx") pod "10d9f99e-353e-4988-a85f-0173312c0a24" (UID: "10d9f99e-353e-4988-a85f-0173312c0a24"). InnerVolumeSpecName "kube-api-access-7zpvx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.238555 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-p4742" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.314723 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-59x2g\" (UniqueName: \"kubernetes.io/projected/f379bced-5e57-4106-86b4-437cf8bce9b4-kube-api-access-59x2g\") pod \"f379bced-5e57-4106-86b4-437cf8bce9b4\" (UID: \"f379bced-5e57-4106-86b4-437cf8bce9b4\") " Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.314767 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-glkvs\" (UniqueName: \"kubernetes.io/projected/eb3b1410-ced5-4047-8aee-eb6e28fd5fb5-kube-api-access-glkvs\") pod \"eb3b1410-ced5-4047-8aee-eb6e28fd5fb5\" (UID: \"eb3b1410-ced5-4047-8aee-eb6e28fd5fb5\") " Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.314790 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66f97b8c-cee1-4869-b034-c66f2830b14e-operator-scripts\") pod \"66f97b8c-cee1-4869-b034-c66f2830b14e\" (UID: \"66f97b8c-cee1-4869-b034-c66f2830b14e\") " Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.314820 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f379bced-5e57-4106-86b4-437cf8bce9b4-operator-scripts\") pod \"f379bced-5e57-4106-86b4-437cf8bce9b4\" (UID: \"f379bced-5e57-4106-86b4-437cf8bce9b4\") " Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.314846 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8cbb2c33-b8a0-4220-916a-ea0a8d738755-httpd-run\") pod \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\" (UID: \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\") " Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.314934 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\" (UID: \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\") " Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.314996 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8cbb2c33-b8a0-4220-916a-ea0a8d738755-scripts\") pod \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\" (UID: \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\") " Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.315041 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzvtg\" (UniqueName: \"kubernetes.io/projected/8cbb2c33-b8a0-4220-916a-ea0a8d738755-kube-api-access-lzvtg\") pod \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\" (UID: \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\") " Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.315080 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8cbb2c33-b8a0-4220-916a-ea0a8d738755-internal-tls-certs\") pod \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\" (UID: \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\") " Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.315116 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8cbb2c33-b8a0-4220-916a-ea0a8d738755-config-data\") pod \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\" (UID: \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\") " Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.315144 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fc08a45a-a721-4947-97d4-1592a8547091-operator-scripts\") pod \"fc08a45a-a721-4947-97d4-1592a8547091\" (UID: \"fc08a45a-a721-4947-97d4-1592a8547091\") " Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.315165 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb3b1410-ced5-4047-8aee-eb6e28fd5fb5-operator-scripts\") pod \"eb3b1410-ced5-4047-8aee-eb6e28fd5fb5\" (UID: \"eb3b1410-ced5-4047-8aee-eb6e28fd5fb5\") " Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.315185 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cbb2c33-b8a0-4220-916a-ea0a8d738755-combined-ca-bundle\") pod \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\" (UID: \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\") " Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.315221 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7s6nf\" (UniqueName: \"kubernetes.io/projected/fc08a45a-a721-4947-97d4-1592a8547091-kube-api-access-7s6nf\") pod \"fc08a45a-a721-4947-97d4-1592a8547091\" (UID: \"fc08a45a-a721-4947-97d4-1592a8547091\") " Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.315265 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8cbb2c33-b8a0-4220-916a-ea0a8d738755-logs\") pod \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\" (UID: \"8cbb2c33-b8a0-4220-916a-ea0a8d738755\") " Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.315358 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-884zm\" (UniqueName: \"kubernetes.io/projected/66f97b8c-cee1-4869-b034-c66f2830b14e-kube-api-access-884zm\") pod \"66f97b8c-cee1-4869-b034-c66f2830b14e\" (UID: \"66f97b8c-cee1-4869-b034-c66f2830b14e\") " Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.315829 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7zpvx\" (UniqueName: \"kubernetes.io/projected/10d9f99e-353e-4988-a85f-0173312c0a24-kube-api-access-7zpvx\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.315842 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/10d9f99e-353e-4988-a85f-0173312c0a24-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.321317 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cbb2c33-b8a0-4220-916a-ea0a8d738755-scripts" (OuterVolumeSpecName: "scripts") pod "8cbb2c33-b8a0-4220-916a-ea0a8d738755" (UID: "8cbb2c33-b8a0-4220-916a-ea0a8d738755"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.322188 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66f97b8c-cee1-4869-b034-c66f2830b14e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "66f97b8c-cee1-4869-b034-c66f2830b14e" (UID: "66f97b8c-cee1-4869-b034-c66f2830b14e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.322215 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb3b1410-ced5-4047-8aee-eb6e28fd5fb5-kube-api-access-glkvs" (OuterVolumeSpecName: "kube-api-access-glkvs") pod "eb3b1410-ced5-4047-8aee-eb6e28fd5fb5" (UID: "eb3b1410-ced5-4047-8aee-eb6e28fd5fb5"). InnerVolumeSpecName "kube-api-access-glkvs". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.322582 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f379bced-5e57-4106-86b4-437cf8bce9b4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f379bced-5e57-4106-86b4-437cf8bce9b4" (UID: "f379bced-5e57-4106-86b4-437cf8bce9b4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.322875 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8cbb2c33-b8a0-4220-916a-ea0a8d738755-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "8cbb2c33-b8a0-4220-916a-ea0a8d738755" (UID: "8cbb2c33-b8a0-4220-916a-ea0a8d738755"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.323149 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66f97b8c-cee1-4869-b034-c66f2830b14e-kube-api-access-884zm" (OuterVolumeSpecName: "kube-api-access-884zm") pod "66f97b8c-cee1-4869-b034-c66f2830b14e" (UID: "66f97b8c-cee1-4869-b034-c66f2830b14e"). InnerVolumeSpecName "kube-api-access-884zm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.323352 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc08a45a-a721-4947-97d4-1592a8547091-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fc08a45a-a721-4947-97d4-1592a8547091" (UID: "fc08a45a-a721-4947-97d4-1592a8547091"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.323756 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb3b1410-ced5-4047-8aee-eb6e28fd5fb5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "eb3b1410-ced5-4047-8aee-eb6e28fd5fb5" (UID: "eb3b1410-ced5-4047-8aee-eb6e28fd5fb5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.326589 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f379bced-5e57-4106-86b4-437cf8bce9b4-kube-api-access-59x2g" (OuterVolumeSpecName: "kube-api-access-59x2g") pod "f379bced-5e57-4106-86b4-437cf8bce9b4" (UID: "f379bced-5e57-4106-86b4-437cf8bce9b4"). InnerVolumeSpecName "kube-api-access-59x2g". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.327320 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8cbb2c33-b8a0-4220-916a-ea0a8d738755-logs" (OuterVolumeSpecName: "logs") pod "8cbb2c33-b8a0-4220-916a-ea0a8d738755" (UID: "8cbb2c33-b8a0-4220-916a-ea0a8d738755"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.330033 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc08a45a-a721-4947-97d4-1592a8547091-kube-api-access-7s6nf" (OuterVolumeSpecName: "kube-api-access-7s6nf") pod "fc08a45a-a721-4947-97d4-1592a8547091" (UID: "fc08a45a-a721-4947-97d4-1592a8547091"). InnerVolumeSpecName "kube-api-access-7s6nf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.342906 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "glance") pod "8cbb2c33-b8a0-4220-916a-ea0a8d738755" (UID: "8cbb2c33-b8a0-4220-916a-ea0a8d738755"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.346602 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cbb2c33-b8a0-4220-916a-ea0a8d738755-kube-api-access-lzvtg" (OuterVolumeSpecName: "kube-api-access-lzvtg") pod "8cbb2c33-b8a0-4220-916a-ea0a8d738755" (UID: "8cbb2c33-b8a0-4220-916a-ea0a8d738755"). InnerVolumeSpecName "kube-api-access-lzvtg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.357718 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cbb2c33-b8a0-4220-916a-ea0a8d738755-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8cbb2c33-b8a0-4220-916a-ea0a8d738755" (UID: "8cbb2c33-b8a0-4220-916a-ea0a8d738755"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.365096 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0c866847-6859-4897-8c84-c12182259560","Type":"ContainerStarted","Data":"b36c06b8b5b3ba2f7537d58b4fbf007996a34f9f62eb5c879206923518859e9a"} Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.365139 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0c866847-6859-4897-8c84-c12182259560","Type":"ContainerStarted","Data":"ea6edaa2ec3284a3b69171167488b5e98c04a7e803e9cc993860eb2e2633b2ca"} Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.371367 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-6991-account-create-update-flxv8" event={"ID":"10d9f99e-353e-4988-a85f-0173312c0a24","Type":"ContainerDied","Data":"674a438191e1af4ab93630e1e36a381092a72696dad46af52de98a84e39b444e"} Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.371408 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="674a438191e1af4ab93630e1e36a381092a72696dad46af52de98a84e39b444e" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.371479 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-6991-account-create-update-flxv8" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.400986 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-vpqrm" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.401046 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-vpqrm" event={"ID":"bf3a9110-6490-49f0-9bcc-bbd3bd595d58","Type":"ContainerDied","Data":"1a71cffb604fdba8ec6e068fb701dca89293bd76382123f8cbcbe9cc8660e96d"} Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.401192 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1a71cffb604fdba8ec6e068fb701dca89293bd76382123f8cbcbe9cc8660e96d" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.410548 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-p4742" event={"ID":"66f97b8c-cee1-4869-b034-c66f2830b14e","Type":"ContainerDied","Data":"b588086e01fecb7c4f1eeb3abbe1d3a3bbec4d950d49548a9948d24b1379c13d"} Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.410597 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b588086e01fecb7c4f1eeb3abbe1d3a3bbec4d950d49548a9948d24b1379c13d" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.410713 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-p4742" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.416582 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-b4zt8" event={"ID":"eb3b1410-ced5-4047-8aee-eb6e28fd5fb5","Type":"ContainerDied","Data":"a700186b7a38a3f198188d40d846503bc2bfd278ff9d7510f573b7d73d7da463"} Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.416729 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a700186b7a38a3f198188d40d846503bc2bfd278ff9d7510f573b7d73d7da463" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.416609 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-b4zt8" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.426960 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-91e4-account-create-update-qxzp8" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.436744 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cbb2c33-b8a0-4220-916a-ea0a8d738755-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "8cbb2c33-b8a0-4220-916a-ea0a8d738755" (UID: "8cbb2c33-b8a0-4220-916a-ea0a8d738755"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.441047 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-91e4-account-create-update-qxzp8" event={"ID":"fc08a45a-a721-4947-97d4-1592a8547091","Type":"ContainerDied","Data":"fb377194116965acfb5aa959d204d4a8718f7506c8da5a352538c5706ad9f0bf"} Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.441138 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fb377194116965acfb5aa959d204d4a8718f7506c8da5a352538c5706ad9f0bf" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.463851 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.465768 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8cbb2c33-b8a0-4220-916a-ea0a8d738755","Type":"ContainerDied","Data":"c7a47c0dd688f261ce9c99e58714d85b6aae87df2552888de94bfbe81c4c217f"} Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.465843 4971 scope.go:117] "RemoveContainer" containerID="1cfd5131dc75f97aea2ccb4abbcadf7829f00887f56b2a68f2cc459a21f9b7d0" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.469136 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-884zm\" (UniqueName: \"kubernetes.io/projected/66f97b8c-cee1-4869-b034-c66f2830b14e-kube-api-access-884zm\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.469175 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-59x2g\" (UniqueName: \"kubernetes.io/projected/f379bced-5e57-4106-86b4-437cf8bce9b4-kube-api-access-59x2g\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.469187 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-glkvs\" (UniqueName: \"kubernetes.io/projected/eb3b1410-ced5-4047-8aee-eb6e28fd5fb5-kube-api-access-glkvs\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.469199 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66f97b8c-cee1-4869-b034-c66f2830b14e-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.469209 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f379bced-5e57-4106-86b4-437cf8bce9b4-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.469222 4971 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8cbb2c33-b8a0-4220-916a-ea0a8d738755-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.469252 4971 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.469266 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8cbb2c33-b8a0-4220-916a-ea0a8d738755-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.469276 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzvtg\" (UniqueName: \"kubernetes.io/projected/8cbb2c33-b8a0-4220-916a-ea0a8d738755-kube-api-access-lzvtg\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.469285 4971 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8cbb2c33-b8a0-4220-916a-ea0a8d738755-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.469295 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fc08a45a-a721-4947-97d4-1592a8547091-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.469310 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb3b1410-ced5-4047-8aee-eb6e28fd5fb5-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.469321 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cbb2c33-b8a0-4220-916a-ea0a8d738755-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.469331 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7s6nf\" (UniqueName: \"kubernetes.io/projected/fc08a45a-a721-4947-97d4-1592a8547091-kube-api-access-7s6nf\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.469339 4971 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8cbb2c33-b8a0-4220-916a-ea0a8d738755-logs\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.476229 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cbb2c33-b8a0-4220-916a-ea0a8d738755-config-data" (OuterVolumeSpecName: "config-data") pod "8cbb2c33-b8a0-4220-916a-ea0a8d738755" (UID: "8cbb2c33-b8a0-4220-916a-ea0a8d738755"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.492362 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-6022-account-create-update-hwnvl" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.493203 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-6022-account-create-update-hwnvl" event={"ID":"f379bced-5e57-4106-86b4-437cf8bce9b4","Type":"ContainerDied","Data":"322c0902e687c51e7822adfc23299e21fb17afbb3b31906568102f43f9ff23df"} Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.493254 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="322c0902e687c51e7822adfc23299e21fb17afbb3b31906568102f43f9ff23df" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.514314 4971 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.571442 4971 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.571480 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8cbb2c33-b8a0-4220-916a-ea0a8d738755-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.575777 4971 scope.go:117] "RemoveContainer" containerID="aa0a8caacb07764852e7600c71557c1dc12f7c2b4791c9c93c1bf945ac233ecc" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.680462 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.819753 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.837445 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.901513 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 27 07:16:27 crc kubenswrapper[4971]: E1127 07:16:27.902161 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc08a45a-a721-4947-97d4-1592a8547091" containerName="mariadb-account-create-update" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.902185 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc08a45a-a721-4947-97d4-1592a8547091" containerName="mariadb-account-create-update" Nov 27 07:16:27 crc kubenswrapper[4971]: E1127 07:16:27.902196 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f379bced-5e57-4106-86b4-437cf8bce9b4" containerName="mariadb-account-create-update" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.902207 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="f379bced-5e57-4106-86b4-437cf8bce9b4" containerName="mariadb-account-create-update" Nov 27 07:16:27 crc kubenswrapper[4971]: E1127 07:16:27.902230 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cbb2c33-b8a0-4220-916a-ea0a8d738755" containerName="glance-httpd" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.902238 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cbb2c33-b8a0-4220-916a-ea0a8d738755" containerName="glance-httpd" Nov 27 07:16:27 crc kubenswrapper[4971]: E1127 07:16:27.902259 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cbb2c33-b8a0-4220-916a-ea0a8d738755" containerName="glance-log" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.902269 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cbb2c33-b8a0-4220-916a-ea0a8d738755" containerName="glance-log" Nov 27 07:16:27 crc kubenswrapper[4971]: E1127 07:16:27.902280 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb3b1410-ced5-4047-8aee-eb6e28fd5fb5" containerName="mariadb-database-create" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.902290 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb3b1410-ced5-4047-8aee-eb6e28fd5fb5" containerName="mariadb-database-create" Nov 27 07:16:27 crc kubenswrapper[4971]: E1127 07:16:27.902305 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66f97b8c-cee1-4869-b034-c66f2830b14e" containerName="mariadb-database-create" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.902313 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="66f97b8c-cee1-4869-b034-c66f2830b14e" containerName="mariadb-database-create" Nov 27 07:16:27 crc kubenswrapper[4971]: E1127 07:16:27.902328 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf3a9110-6490-49f0-9bcc-bbd3bd595d58" containerName="mariadb-database-create" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.902336 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf3a9110-6490-49f0-9bcc-bbd3bd595d58" containerName="mariadb-database-create" Nov 27 07:16:27 crc kubenswrapper[4971]: E1127 07:16:27.902363 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10d9f99e-353e-4988-a85f-0173312c0a24" containerName="mariadb-account-create-update" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.902371 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="10d9f99e-353e-4988-a85f-0173312c0a24" containerName="mariadb-account-create-update" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.902606 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cbb2c33-b8a0-4220-916a-ea0a8d738755" containerName="glance-httpd" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.902628 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf3a9110-6490-49f0-9bcc-bbd3bd595d58" containerName="mariadb-database-create" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.902639 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="10d9f99e-353e-4988-a85f-0173312c0a24" containerName="mariadb-account-create-update" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.902653 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="66f97b8c-cee1-4869-b034-c66f2830b14e" containerName="mariadb-database-create" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.902668 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb3b1410-ced5-4047-8aee-eb6e28fd5fb5" containerName="mariadb-database-create" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.902684 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cbb2c33-b8a0-4220-916a-ea0a8d738755" containerName="glance-log" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.902692 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc08a45a-a721-4947-97d4-1592a8547091" containerName="mariadb-account-create-update" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.902700 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="f379bced-5e57-4106-86b4-437cf8bce9b4" containerName="mariadb-account-create-update" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.903993 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.909064 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.909263 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Nov 27 07:16:27 crc kubenswrapper[4971]: I1127 07:16:27.915742 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 27 07:16:28 crc kubenswrapper[4971]: I1127 07:16:28.081073 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c2e2055e-1200-46e8-a49e-c6b490702c9b-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c2e2055e-1200-46e8-a49e-c6b490702c9b\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:16:28 crc kubenswrapper[4971]: I1127 07:16:28.081458 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c2e2055e-1200-46e8-a49e-c6b490702c9b-logs\") pod \"glance-default-internal-api-0\" (UID: \"c2e2055e-1200-46e8-a49e-c6b490702c9b\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:16:28 crc kubenswrapper[4971]: I1127 07:16:28.081484 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2e2055e-1200-46e8-a49e-c6b490702c9b-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c2e2055e-1200-46e8-a49e-c6b490702c9b\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:16:28 crc kubenswrapper[4971]: I1127 07:16:28.081565 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xpdz5\" (UniqueName: \"kubernetes.io/projected/c2e2055e-1200-46e8-a49e-c6b490702c9b-kube-api-access-xpdz5\") pod \"glance-default-internal-api-0\" (UID: \"c2e2055e-1200-46e8-a49e-c6b490702c9b\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:16:28 crc kubenswrapper[4971]: I1127 07:16:28.081598 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c2e2055e-1200-46e8-a49e-c6b490702c9b-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"c2e2055e-1200-46e8-a49e-c6b490702c9b\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:16:28 crc kubenswrapper[4971]: I1127 07:16:28.081765 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c2e2055e-1200-46e8-a49e-c6b490702c9b-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c2e2055e-1200-46e8-a49e-c6b490702c9b\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:16:28 crc kubenswrapper[4971]: I1127 07:16:28.081932 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2e2055e-1200-46e8-a49e-c6b490702c9b-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c2e2055e-1200-46e8-a49e-c6b490702c9b\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:16:28 crc kubenswrapper[4971]: I1127 07:16:28.082004 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"c2e2055e-1200-46e8-a49e-c6b490702c9b\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:16:28 crc kubenswrapper[4971]: I1127 07:16:28.183562 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xpdz5\" (UniqueName: \"kubernetes.io/projected/c2e2055e-1200-46e8-a49e-c6b490702c9b-kube-api-access-xpdz5\") pod \"glance-default-internal-api-0\" (UID: \"c2e2055e-1200-46e8-a49e-c6b490702c9b\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:16:28 crc kubenswrapper[4971]: I1127 07:16:28.183617 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c2e2055e-1200-46e8-a49e-c6b490702c9b-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"c2e2055e-1200-46e8-a49e-c6b490702c9b\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:16:28 crc kubenswrapper[4971]: I1127 07:16:28.183682 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c2e2055e-1200-46e8-a49e-c6b490702c9b-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c2e2055e-1200-46e8-a49e-c6b490702c9b\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:16:28 crc kubenswrapper[4971]: I1127 07:16:28.183735 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2e2055e-1200-46e8-a49e-c6b490702c9b-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c2e2055e-1200-46e8-a49e-c6b490702c9b\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:16:28 crc kubenswrapper[4971]: I1127 07:16:28.183771 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"c2e2055e-1200-46e8-a49e-c6b490702c9b\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:16:28 crc kubenswrapper[4971]: I1127 07:16:28.183830 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c2e2055e-1200-46e8-a49e-c6b490702c9b-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c2e2055e-1200-46e8-a49e-c6b490702c9b\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:16:28 crc kubenswrapper[4971]: I1127 07:16:28.183878 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c2e2055e-1200-46e8-a49e-c6b490702c9b-logs\") pod \"glance-default-internal-api-0\" (UID: \"c2e2055e-1200-46e8-a49e-c6b490702c9b\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:16:28 crc kubenswrapper[4971]: I1127 07:16:28.183902 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2e2055e-1200-46e8-a49e-c6b490702c9b-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c2e2055e-1200-46e8-a49e-c6b490702c9b\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:16:28 crc kubenswrapper[4971]: I1127 07:16:28.186207 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c2e2055e-1200-46e8-a49e-c6b490702c9b-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c2e2055e-1200-46e8-a49e-c6b490702c9b\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:16:28 crc kubenswrapper[4971]: I1127 07:16:28.186576 4971 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"c2e2055e-1200-46e8-a49e-c6b490702c9b\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-internal-api-0" Nov 27 07:16:28 crc kubenswrapper[4971]: I1127 07:16:28.186910 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c2e2055e-1200-46e8-a49e-c6b490702c9b-logs\") pod \"glance-default-internal-api-0\" (UID: \"c2e2055e-1200-46e8-a49e-c6b490702c9b\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:16:28 crc kubenswrapper[4971]: I1127 07:16:28.195832 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2e2055e-1200-46e8-a49e-c6b490702c9b-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c2e2055e-1200-46e8-a49e-c6b490702c9b\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:16:28 crc kubenswrapper[4971]: I1127 07:16:28.195872 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c2e2055e-1200-46e8-a49e-c6b490702c9b-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"c2e2055e-1200-46e8-a49e-c6b490702c9b\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:16:28 crc kubenswrapper[4971]: I1127 07:16:28.196342 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2e2055e-1200-46e8-a49e-c6b490702c9b-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c2e2055e-1200-46e8-a49e-c6b490702c9b\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:16:28 crc kubenswrapper[4971]: I1127 07:16:28.198182 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c2e2055e-1200-46e8-a49e-c6b490702c9b-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c2e2055e-1200-46e8-a49e-c6b490702c9b\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:16:28 crc kubenswrapper[4971]: I1127 07:16:28.233400 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xpdz5\" (UniqueName: \"kubernetes.io/projected/c2e2055e-1200-46e8-a49e-c6b490702c9b-kube-api-access-xpdz5\") pod \"glance-default-internal-api-0\" (UID: \"c2e2055e-1200-46e8-a49e-c6b490702c9b\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:16:28 crc kubenswrapper[4971]: I1127 07:16:28.260831 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"c2e2055e-1200-46e8-a49e-c6b490702c9b\") " pod="openstack/glance-default-internal-api-0" Nov 27 07:16:28 crc kubenswrapper[4971]: I1127 07:16:28.501606 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0c866847-6859-4897-8c84-c12182259560","Type":"ContainerStarted","Data":"9355088936fda35d62710a9838328c39cde61ad03d110279a6958deea76a995b"} Nov 27 07:16:28 crc kubenswrapper[4971]: I1127 07:16:28.504568 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"1206c914-fbe7-4e8f-8470-861b0ebf75de","Type":"ContainerStarted","Data":"80065ffd1cc70ab9f5e46cbb1026eb2c1f6926d1938126123dd083ba516fd507"} Nov 27 07:16:28 crc kubenswrapper[4971]: I1127 07:16:28.504618 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"1206c914-fbe7-4e8f-8470-861b0ebf75de","Type":"ContainerStarted","Data":"b377f64f7799f517297f59bd0335b9e821a1d5096028224130d2cd37237cd078"} Nov 27 07:16:28 crc kubenswrapper[4971]: I1127 07:16:28.532401 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 27 07:16:28 crc kubenswrapper[4971]: I1127 07:16:28.573892 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cbb2c33-b8a0-4220-916a-ea0a8d738755" path="/var/lib/kubelet/pods/8cbb2c33-b8a0-4220-916a-ea0a8d738755/volumes" Nov 27 07:16:29 crc kubenswrapper[4971]: I1127 07:16:29.144433 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 27 07:16:29 crc kubenswrapper[4971]: W1127 07:16:29.146012 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc2e2055e_1200_46e8_a49e_c6b490702c9b.slice/crio-1fbc398a0c3a5b193a2d87ab95eda033cbc5cace8dbae7113e580a4b7fa2860b WatchSource:0}: Error finding container 1fbc398a0c3a5b193a2d87ab95eda033cbc5cace8dbae7113e580a4b7fa2860b: Status 404 returned error can't find the container with id 1fbc398a0c3a5b193a2d87ab95eda033cbc5cace8dbae7113e580a4b7fa2860b Nov 27 07:16:29 crc kubenswrapper[4971]: I1127 07:16:29.520279 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"1206c914-fbe7-4e8f-8470-861b0ebf75de","Type":"ContainerStarted","Data":"5668516f8e5f98d27ae8ef1c9b2857419b64f8bc06f8aeffeddbb87691114627"} Nov 27 07:16:29 crc kubenswrapper[4971]: I1127 07:16:29.525741 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c2e2055e-1200-46e8-a49e-c6b490702c9b","Type":"ContainerStarted","Data":"1fbc398a0c3a5b193a2d87ab95eda033cbc5cace8dbae7113e580a4b7fa2860b"} Nov 27 07:16:30 crc kubenswrapper[4971]: I1127 07:16:30.553259 4971 generic.go:334] "Generic (PLEG): container finished" podID="0c866847-6859-4897-8c84-c12182259560" containerID="2437579390cce3b766c3e2a0396bce99060dd2ee617e397d1a1875c743a9bebb" exitCode=1 Nov 27 07:16:30 crc kubenswrapper[4971]: I1127 07:16:30.553504 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0c866847-6859-4897-8c84-c12182259560" containerName="ceilometer-central-agent" containerID="cri-o://ea6edaa2ec3284a3b69171167488b5e98c04a7e803e9cc993860eb2e2633b2ca" gracePeriod=30 Nov 27 07:16:30 crc kubenswrapper[4971]: I1127 07:16:30.553574 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0c866847-6859-4897-8c84-c12182259560" containerName="sg-core" containerID="cri-o://9355088936fda35d62710a9838328c39cde61ad03d110279a6958deea76a995b" gracePeriod=30 Nov 27 07:16:30 crc kubenswrapper[4971]: I1127 07:16:30.553597 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0c866847-6859-4897-8c84-c12182259560" containerName="ceilometer-notification-agent" containerID="cri-o://b36c06b8b5b3ba2f7537d58b4fbf007996a34f9f62eb5c879206923518859e9a" gracePeriod=30 Nov 27 07:16:30 crc kubenswrapper[4971]: I1127 07:16:30.564029 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0c866847-6859-4897-8c84-c12182259560","Type":"ContainerDied","Data":"2437579390cce3b766c3e2a0396bce99060dd2ee617e397d1a1875c743a9bebb"} Nov 27 07:16:30 crc kubenswrapper[4971]: I1127 07:16:30.564073 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c2e2055e-1200-46e8-a49e-c6b490702c9b","Type":"ContainerStarted","Data":"c813e7acb7f4a74ba8b74e4f55535ea0e0307a8468b0623d9ec10735c4807fad"} Nov 27 07:16:30 crc kubenswrapper[4971]: I1127 07:16:30.564086 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c2e2055e-1200-46e8-a49e-c6b490702c9b","Type":"ContainerStarted","Data":"8dbea1c5d8961b687109edee2265a2311adecbc668e362357af80dca1ef37927"} Nov 27 07:16:30 crc kubenswrapper[4971]: I1127 07:16:30.582227 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.5822078269999995 podStartE2EDuration="4.582207827s" podCreationTimestamp="2025-11-27 07:16:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:16:29.58081334 +0000 UTC m=+1427.772857268" watchObservedRunningTime="2025-11-27 07:16:30.582207827 +0000 UTC m=+1428.774251745" Nov 27 07:16:30 crc kubenswrapper[4971]: I1127 07:16:30.606438 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.6064171590000003 podStartE2EDuration="3.606417159s" podCreationTimestamp="2025-11-27 07:16:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:16:30.595596918 +0000 UTC m=+1428.787640836" watchObservedRunningTime="2025-11-27 07:16:30.606417159 +0000 UTC m=+1428.798461077" Nov 27 07:16:31 crc kubenswrapper[4971]: I1127 07:16:31.573035 4971 generic.go:334] "Generic (PLEG): container finished" podID="0c866847-6859-4897-8c84-c12182259560" containerID="9355088936fda35d62710a9838328c39cde61ad03d110279a6958deea76a995b" exitCode=2 Nov 27 07:16:31 crc kubenswrapper[4971]: I1127 07:16:31.573112 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0c866847-6859-4897-8c84-c12182259560","Type":"ContainerDied","Data":"9355088936fda35d62710a9838328c39cde61ad03d110279a6958deea76a995b"} Nov 27 07:16:32 crc kubenswrapper[4971]: I1127 07:16:32.584695 4971 generic.go:334] "Generic (PLEG): container finished" podID="0c866847-6859-4897-8c84-c12182259560" containerID="b36c06b8b5b3ba2f7537d58b4fbf007996a34f9f62eb5c879206923518859e9a" exitCode=0 Nov 27 07:16:32 crc kubenswrapper[4971]: I1127 07:16:32.584754 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0c866847-6859-4897-8c84-c12182259560","Type":"ContainerDied","Data":"b36c06b8b5b3ba2f7537d58b4fbf007996a34f9f62eb5c879206923518859e9a"} Nov 27 07:16:32 crc kubenswrapper[4971]: I1127 07:16:32.841199 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-dj8cq"] Nov 27 07:16:32 crc kubenswrapper[4971]: I1127 07:16:32.843351 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-dj8cq" Nov 27 07:16:32 crc kubenswrapper[4971]: I1127 07:16:32.847251 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-cgmvk" Nov 27 07:16:32 crc kubenswrapper[4971]: I1127 07:16:32.847330 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Nov 27 07:16:32 crc kubenswrapper[4971]: I1127 07:16:32.847421 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Nov 27 07:16:32 crc kubenswrapper[4971]: I1127 07:16:32.851177 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-dj8cq"] Nov 27 07:16:32 crc kubenswrapper[4971]: I1127 07:16:32.993985 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/146ec507-b566-4056-92f6-2f12b2d6f11c-config-data\") pod \"nova-cell0-conductor-db-sync-dj8cq\" (UID: \"146ec507-b566-4056-92f6-2f12b2d6f11c\") " pod="openstack/nova-cell0-conductor-db-sync-dj8cq" Nov 27 07:16:32 crc kubenswrapper[4971]: I1127 07:16:32.994056 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96sc9\" (UniqueName: \"kubernetes.io/projected/146ec507-b566-4056-92f6-2f12b2d6f11c-kube-api-access-96sc9\") pod \"nova-cell0-conductor-db-sync-dj8cq\" (UID: \"146ec507-b566-4056-92f6-2f12b2d6f11c\") " pod="openstack/nova-cell0-conductor-db-sync-dj8cq" Nov 27 07:16:32 crc kubenswrapper[4971]: I1127 07:16:32.994171 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/146ec507-b566-4056-92f6-2f12b2d6f11c-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-dj8cq\" (UID: \"146ec507-b566-4056-92f6-2f12b2d6f11c\") " pod="openstack/nova-cell0-conductor-db-sync-dj8cq" Nov 27 07:16:32 crc kubenswrapper[4971]: I1127 07:16:32.994270 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/146ec507-b566-4056-92f6-2f12b2d6f11c-scripts\") pod \"nova-cell0-conductor-db-sync-dj8cq\" (UID: \"146ec507-b566-4056-92f6-2f12b2d6f11c\") " pod="openstack/nova-cell0-conductor-db-sync-dj8cq" Nov 27 07:16:33 crc kubenswrapper[4971]: I1127 07:16:33.096607 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/146ec507-b566-4056-92f6-2f12b2d6f11c-config-data\") pod \"nova-cell0-conductor-db-sync-dj8cq\" (UID: \"146ec507-b566-4056-92f6-2f12b2d6f11c\") " pod="openstack/nova-cell0-conductor-db-sync-dj8cq" Nov 27 07:16:33 crc kubenswrapper[4971]: I1127 07:16:33.097163 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96sc9\" (UniqueName: \"kubernetes.io/projected/146ec507-b566-4056-92f6-2f12b2d6f11c-kube-api-access-96sc9\") pod \"nova-cell0-conductor-db-sync-dj8cq\" (UID: \"146ec507-b566-4056-92f6-2f12b2d6f11c\") " pod="openstack/nova-cell0-conductor-db-sync-dj8cq" Nov 27 07:16:33 crc kubenswrapper[4971]: I1127 07:16:33.097203 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/146ec507-b566-4056-92f6-2f12b2d6f11c-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-dj8cq\" (UID: \"146ec507-b566-4056-92f6-2f12b2d6f11c\") " pod="openstack/nova-cell0-conductor-db-sync-dj8cq" Nov 27 07:16:33 crc kubenswrapper[4971]: I1127 07:16:33.097252 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/146ec507-b566-4056-92f6-2f12b2d6f11c-scripts\") pod \"nova-cell0-conductor-db-sync-dj8cq\" (UID: \"146ec507-b566-4056-92f6-2f12b2d6f11c\") " pod="openstack/nova-cell0-conductor-db-sync-dj8cq" Nov 27 07:16:33 crc kubenswrapper[4971]: I1127 07:16:33.104844 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/146ec507-b566-4056-92f6-2f12b2d6f11c-scripts\") pod \"nova-cell0-conductor-db-sync-dj8cq\" (UID: \"146ec507-b566-4056-92f6-2f12b2d6f11c\") " pod="openstack/nova-cell0-conductor-db-sync-dj8cq" Nov 27 07:16:33 crc kubenswrapper[4971]: I1127 07:16:33.104879 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/146ec507-b566-4056-92f6-2f12b2d6f11c-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-dj8cq\" (UID: \"146ec507-b566-4056-92f6-2f12b2d6f11c\") " pod="openstack/nova-cell0-conductor-db-sync-dj8cq" Nov 27 07:16:33 crc kubenswrapper[4971]: I1127 07:16:33.111356 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/146ec507-b566-4056-92f6-2f12b2d6f11c-config-data\") pod \"nova-cell0-conductor-db-sync-dj8cq\" (UID: \"146ec507-b566-4056-92f6-2f12b2d6f11c\") " pod="openstack/nova-cell0-conductor-db-sync-dj8cq" Nov 27 07:16:33 crc kubenswrapper[4971]: I1127 07:16:33.115382 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96sc9\" (UniqueName: \"kubernetes.io/projected/146ec507-b566-4056-92f6-2f12b2d6f11c-kube-api-access-96sc9\") pod \"nova-cell0-conductor-db-sync-dj8cq\" (UID: \"146ec507-b566-4056-92f6-2f12b2d6f11c\") " pod="openstack/nova-cell0-conductor-db-sync-dj8cq" Nov 27 07:16:33 crc kubenswrapper[4971]: I1127 07:16:33.172197 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-dj8cq" Nov 27 07:16:33 crc kubenswrapper[4971]: I1127 07:16:33.693678 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-dj8cq"] Nov 27 07:16:34 crc kubenswrapper[4971]: I1127 07:16:34.565763 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Nov 27 07:16:34 crc kubenswrapper[4971]: I1127 07:16:34.621055 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-dj8cq" event={"ID":"146ec507-b566-4056-92f6-2f12b2d6f11c","Type":"ContainerStarted","Data":"8d60b9b824532010a0ea2ccc5c08f04d0a42fa9ef20a90f1bb325338b7d522b0"} Nov 27 07:16:35 crc kubenswrapper[4971]: I1127 07:16:35.633410 4971 generic.go:334] "Generic (PLEG): container finished" podID="0c866847-6859-4897-8c84-c12182259560" containerID="ea6edaa2ec3284a3b69171167488b5e98c04a7e803e9cc993860eb2e2633b2ca" exitCode=0 Nov 27 07:16:35 crc kubenswrapper[4971]: I1127 07:16:35.633849 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0c866847-6859-4897-8c84-c12182259560","Type":"ContainerDied","Data":"ea6edaa2ec3284a3b69171167488b5e98c04a7e803e9cc993860eb2e2633b2ca"} Nov 27 07:16:35 crc kubenswrapper[4971]: I1127 07:16:35.829730 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 07:16:35 crc kubenswrapper[4971]: I1127 07:16:35.960246 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0c866847-6859-4897-8c84-c12182259560-log-httpd\") pod \"0c866847-6859-4897-8c84-c12182259560\" (UID: \"0c866847-6859-4897-8c84-c12182259560\") " Nov 27 07:16:35 crc kubenswrapper[4971]: I1127 07:16:35.960675 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0c866847-6859-4897-8c84-c12182259560-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "0c866847-6859-4897-8c84-c12182259560" (UID: "0c866847-6859-4897-8c84-c12182259560"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:16:35 crc kubenswrapper[4971]: I1127 07:16:35.961216 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c866847-6859-4897-8c84-c12182259560-combined-ca-bundle\") pod \"0c866847-6859-4897-8c84-c12182259560\" (UID: \"0c866847-6859-4897-8c84-c12182259560\") " Nov 27 07:16:35 crc kubenswrapper[4971]: I1127 07:16:35.961269 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0c866847-6859-4897-8c84-c12182259560-sg-core-conf-yaml\") pod \"0c866847-6859-4897-8c84-c12182259560\" (UID: \"0c866847-6859-4897-8c84-c12182259560\") " Nov 27 07:16:35 crc kubenswrapper[4971]: I1127 07:16:35.961382 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0c866847-6859-4897-8c84-c12182259560-run-httpd\") pod \"0c866847-6859-4897-8c84-c12182259560\" (UID: \"0c866847-6859-4897-8c84-c12182259560\") " Nov 27 07:16:35 crc kubenswrapper[4971]: I1127 07:16:35.961417 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0c866847-6859-4897-8c84-c12182259560-scripts\") pod \"0c866847-6859-4897-8c84-c12182259560\" (UID: \"0c866847-6859-4897-8c84-c12182259560\") " Nov 27 07:16:35 crc kubenswrapper[4971]: I1127 07:16:35.961449 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r4p4k\" (UniqueName: \"kubernetes.io/projected/0c866847-6859-4897-8c84-c12182259560-kube-api-access-r4p4k\") pod \"0c866847-6859-4897-8c84-c12182259560\" (UID: \"0c866847-6859-4897-8c84-c12182259560\") " Nov 27 07:16:35 crc kubenswrapper[4971]: I1127 07:16:35.962362 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c866847-6859-4897-8c84-c12182259560-config-data\") pod \"0c866847-6859-4897-8c84-c12182259560\" (UID: \"0c866847-6859-4897-8c84-c12182259560\") " Nov 27 07:16:35 crc kubenswrapper[4971]: I1127 07:16:35.961643 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0c866847-6859-4897-8c84-c12182259560-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "0c866847-6859-4897-8c84-c12182259560" (UID: "0c866847-6859-4897-8c84-c12182259560"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:16:35 crc kubenswrapper[4971]: I1127 07:16:35.963326 4971 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0c866847-6859-4897-8c84-c12182259560-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:35 crc kubenswrapper[4971]: I1127 07:16:35.963350 4971 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0c866847-6859-4897-8c84-c12182259560-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:35 crc kubenswrapper[4971]: I1127 07:16:35.968123 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c866847-6859-4897-8c84-c12182259560-scripts" (OuterVolumeSpecName: "scripts") pod "0c866847-6859-4897-8c84-c12182259560" (UID: "0c866847-6859-4897-8c84-c12182259560"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:35 crc kubenswrapper[4971]: I1127 07:16:35.968333 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c866847-6859-4897-8c84-c12182259560-kube-api-access-r4p4k" (OuterVolumeSpecName: "kube-api-access-r4p4k") pod "0c866847-6859-4897-8c84-c12182259560" (UID: "0c866847-6859-4897-8c84-c12182259560"). InnerVolumeSpecName "kube-api-access-r4p4k". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:16:35 crc kubenswrapper[4971]: I1127 07:16:35.995678 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c866847-6859-4897-8c84-c12182259560-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "0c866847-6859-4897-8c84-c12182259560" (UID: "0c866847-6859-4897-8c84-c12182259560"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.038660 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c866847-6859-4897-8c84-c12182259560-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0c866847-6859-4897-8c84-c12182259560" (UID: "0c866847-6859-4897-8c84-c12182259560"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.064924 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c866847-6859-4897-8c84-c12182259560-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.064951 4971 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0c866847-6859-4897-8c84-c12182259560-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.064961 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0c866847-6859-4897-8c84-c12182259560-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.064969 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r4p4k\" (UniqueName: \"kubernetes.io/projected/0c866847-6859-4897-8c84-c12182259560-kube-api-access-r4p4k\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.075939 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c866847-6859-4897-8c84-c12182259560-config-data" (OuterVolumeSpecName: "config-data") pod "0c866847-6859-4897-8c84-c12182259560" (UID: "0c866847-6859-4897-8c84-c12182259560"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.172099 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c866847-6859-4897-8c84-c12182259560-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.650218 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0c866847-6859-4897-8c84-c12182259560","Type":"ContainerDied","Data":"a65b4f1a0843ab40b691ae33a70d99d7a10ad6df28ce9022a56452fb487909f1"} Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.650280 4971 scope.go:117] "RemoveContainer" containerID="2437579390cce3b766c3e2a0396bce99060dd2ee617e397d1a1875c743a9bebb" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.650304 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.676314 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.689915 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.703738 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:16:36 crc kubenswrapper[4971]: E1127 07:16:36.704601 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c866847-6859-4897-8c84-c12182259560" containerName="proxy-httpd" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.704670 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c866847-6859-4897-8c84-c12182259560" containerName="proxy-httpd" Nov 27 07:16:36 crc kubenswrapper[4971]: E1127 07:16:36.704769 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c866847-6859-4897-8c84-c12182259560" containerName="ceilometer-notification-agent" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.704822 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c866847-6859-4897-8c84-c12182259560" containerName="ceilometer-notification-agent" Nov 27 07:16:36 crc kubenswrapper[4971]: E1127 07:16:36.704876 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c866847-6859-4897-8c84-c12182259560" containerName="ceilometer-central-agent" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.704928 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c866847-6859-4897-8c84-c12182259560" containerName="ceilometer-central-agent" Nov 27 07:16:36 crc kubenswrapper[4971]: E1127 07:16:36.705012 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c866847-6859-4897-8c84-c12182259560" containerName="sg-core" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.705063 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c866847-6859-4897-8c84-c12182259560" containerName="sg-core" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.705308 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c866847-6859-4897-8c84-c12182259560" containerName="ceilometer-central-agent" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.705375 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c866847-6859-4897-8c84-c12182259560" containerName="sg-core" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.705439 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c866847-6859-4897-8c84-c12182259560" containerName="proxy-httpd" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.705499 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c866847-6859-4897-8c84-c12182259560" containerName="ceilometer-notification-agent" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.708391 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.710473 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.710792 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.710974 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.718320 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.783145 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62899a94-028e-4eb3-810e-33e04fb66eb1-scripts\") pod \"ceilometer-0\" (UID: \"62899a94-028e-4eb3-810e-33e04fb66eb1\") " pod="openstack/ceilometer-0" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.783243 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kb4wj\" (UniqueName: \"kubernetes.io/projected/62899a94-028e-4eb3-810e-33e04fb66eb1-kube-api-access-kb4wj\") pod \"ceilometer-0\" (UID: \"62899a94-028e-4eb3-810e-33e04fb66eb1\") " pod="openstack/ceilometer-0" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.783377 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/62899a94-028e-4eb3-810e-33e04fb66eb1-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"62899a94-028e-4eb3-810e-33e04fb66eb1\") " pod="openstack/ceilometer-0" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.783668 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62899a94-028e-4eb3-810e-33e04fb66eb1-run-httpd\") pod \"ceilometer-0\" (UID: \"62899a94-028e-4eb3-810e-33e04fb66eb1\") " pod="openstack/ceilometer-0" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.783766 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62899a94-028e-4eb3-810e-33e04fb66eb1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"62899a94-028e-4eb3-810e-33e04fb66eb1\") " pod="openstack/ceilometer-0" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.783888 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62899a94-028e-4eb3-810e-33e04fb66eb1-config-data\") pod \"ceilometer-0\" (UID: \"62899a94-028e-4eb3-810e-33e04fb66eb1\") " pod="openstack/ceilometer-0" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.784289 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62899a94-028e-4eb3-810e-33e04fb66eb1-log-httpd\") pod \"ceilometer-0\" (UID: \"62899a94-028e-4eb3-810e-33e04fb66eb1\") " pod="openstack/ceilometer-0" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.784430 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/62899a94-028e-4eb3-810e-33e04fb66eb1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"62899a94-028e-4eb3-810e-33e04fb66eb1\") " pod="openstack/ceilometer-0" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.831363 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.831411 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.863898 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.873364 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.888762 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62899a94-028e-4eb3-810e-33e04fb66eb1-scripts\") pod \"ceilometer-0\" (UID: \"62899a94-028e-4eb3-810e-33e04fb66eb1\") " pod="openstack/ceilometer-0" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.888845 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kb4wj\" (UniqueName: \"kubernetes.io/projected/62899a94-028e-4eb3-810e-33e04fb66eb1-kube-api-access-kb4wj\") pod \"ceilometer-0\" (UID: \"62899a94-028e-4eb3-810e-33e04fb66eb1\") " pod="openstack/ceilometer-0" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.888876 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/62899a94-028e-4eb3-810e-33e04fb66eb1-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"62899a94-028e-4eb3-810e-33e04fb66eb1\") " pod="openstack/ceilometer-0" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.888931 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62899a94-028e-4eb3-810e-33e04fb66eb1-run-httpd\") pod \"ceilometer-0\" (UID: \"62899a94-028e-4eb3-810e-33e04fb66eb1\") " pod="openstack/ceilometer-0" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.888960 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62899a94-028e-4eb3-810e-33e04fb66eb1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"62899a94-028e-4eb3-810e-33e04fb66eb1\") " pod="openstack/ceilometer-0" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.888977 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62899a94-028e-4eb3-810e-33e04fb66eb1-config-data\") pod \"ceilometer-0\" (UID: \"62899a94-028e-4eb3-810e-33e04fb66eb1\") " pod="openstack/ceilometer-0" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.889009 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62899a94-028e-4eb3-810e-33e04fb66eb1-log-httpd\") pod \"ceilometer-0\" (UID: \"62899a94-028e-4eb3-810e-33e04fb66eb1\") " pod="openstack/ceilometer-0" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.889035 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/62899a94-028e-4eb3-810e-33e04fb66eb1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"62899a94-028e-4eb3-810e-33e04fb66eb1\") " pod="openstack/ceilometer-0" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.889495 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62899a94-028e-4eb3-810e-33e04fb66eb1-log-httpd\") pod \"ceilometer-0\" (UID: \"62899a94-028e-4eb3-810e-33e04fb66eb1\") " pod="openstack/ceilometer-0" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.889751 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62899a94-028e-4eb3-810e-33e04fb66eb1-run-httpd\") pod \"ceilometer-0\" (UID: \"62899a94-028e-4eb3-810e-33e04fb66eb1\") " pod="openstack/ceilometer-0" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.894000 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62899a94-028e-4eb3-810e-33e04fb66eb1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"62899a94-028e-4eb3-810e-33e04fb66eb1\") " pod="openstack/ceilometer-0" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.896943 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/62899a94-028e-4eb3-810e-33e04fb66eb1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"62899a94-028e-4eb3-810e-33e04fb66eb1\") " pod="openstack/ceilometer-0" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.902323 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/62899a94-028e-4eb3-810e-33e04fb66eb1-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"62899a94-028e-4eb3-810e-33e04fb66eb1\") " pod="openstack/ceilometer-0" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.902728 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62899a94-028e-4eb3-810e-33e04fb66eb1-scripts\") pod \"ceilometer-0\" (UID: \"62899a94-028e-4eb3-810e-33e04fb66eb1\") " pod="openstack/ceilometer-0" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.908625 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62899a94-028e-4eb3-810e-33e04fb66eb1-config-data\") pod \"ceilometer-0\" (UID: \"62899a94-028e-4eb3-810e-33e04fb66eb1\") " pod="openstack/ceilometer-0" Nov 27 07:16:36 crc kubenswrapper[4971]: I1127 07:16:36.920377 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kb4wj\" (UniqueName: \"kubernetes.io/projected/62899a94-028e-4eb3-810e-33e04fb66eb1-kube-api-access-kb4wj\") pod \"ceilometer-0\" (UID: \"62899a94-028e-4eb3-810e-33e04fb66eb1\") " pod="openstack/ceilometer-0" Nov 27 07:16:37 crc kubenswrapper[4971]: I1127 07:16:37.033221 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 07:16:37 crc kubenswrapper[4971]: I1127 07:16:37.659582 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Nov 27 07:16:37 crc kubenswrapper[4971]: I1127 07:16:37.659632 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Nov 27 07:16:38 crc kubenswrapper[4971]: I1127 07:16:38.533326 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Nov 27 07:16:38 crc kubenswrapper[4971]: I1127 07:16:38.533628 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Nov 27 07:16:38 crc kubenswrapper[4971]: I1127 07:16:38.565246 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0c866847-6859-4897-8c84-c12182259560" path="/var/lib/kubelet/pods/0c866847-6859-4897-8c84-c12182259560/volumes" Nov 27 07:16:38 crc kubenswrapper[4971]: I1127 07:16:38.569653 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Nov 27 07:16:38 crc kubenswrapper[4971]: I1127 07:16:38.591865 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Nov 27 07:16:38 crc kubenswrapper[4971]: I1127 07:16:38.669242 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Nov 27 07:16:38 crc kubenswrapper[4971]: I1127 07:16:38.670256 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Nov 27 07:16:39 crc kubenswrapper[4971]: I1127 07:16:39.714341 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Nov 27 07:16:39 crc kubenswrapper[4971]: I1127 07:16:39.714469 4971 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 27 07:16:39 crc kubenswrapper[4971]: I1127 07:16:39.718662 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Nov 27 07:16:40 crc kubenswrapper[4971]: I1127 07:16:40.684487 4971 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 27 07:16:40 crc kubenswrapper[4971]: I1127 07:16:40.684827 4971 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 27 07:16:40 crc kubenswrapper[4971]: I1127 07:16:40.793126 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Nov 27 07:16:40 crc kubenswrapper[4971]: I1127 07:16:40.828645 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Nov 27 07:16:41 crc kubenswrapper[4971]: I1127 07:16:41.211647 4971 scope.go:117] "RemoveContainer" containerID="9355088936fda35d62710a9838328c39cde61ad03d110279a6958deea76a995b" Nov 27 07:16:41 crc kubenswrapper[4971]: I1127 07:16:41.272263 4971 scope.go:117] "RemoveContainer" containerID="b36c06b8b5b3ba2f7537d58b4fbf007996a34f9f62eb5c879206923518859e9a" Nov 27 07:16:41 crc kubenswrapper[4971]: I1127 07:16:41.669065 4971 scope.go:117] "RemoveContainer" containerID="ea6edaa2ec3284a3b69171167488b5e98c04a7e803e9cc993860eb2e2633b2ca" Nov 27 07:16:41 crc kubenswrapper[4971]: W1127 07:16:41.743629 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod62899a94_028e_4eb3_810e_33e04fb66eb1.slice/crio-b93435106c2862a8ec43840673dcf5a98f526f6509af891a45c9f02579b27ca4 WatchSource:0}: Error finding container b93435106c2862a8ec43840673dcf5a98f526f6509af891a45c9f02579b27ca4: Status 404 returned error can't find the container with id b93435106c2862a8ec43840673dcf5a98f526f6509af891a45c9f02579b27ca4 Nov 27 07:16:41 crc kubenswrapper[4971]: I1127 07:16:41.747254 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:16:42 crc kubenswrapper[4971]: I1127 07:16:42.730242 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-dj8cq" event={"ID":"146ec507-b566-4056-92f6-2f12b2d6f11c","Type":"ContainerStarted","Data":"311e0d520fd8e54093bc7d5a472d0e50638192c04dc79858d7a39b858f1c705d"} Nov 27 07:16:42 crc kubenswrapper[4971]: I1127 07:16:42.731930 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"62899a94-028e-4eb3-810e-33e04fb66eb1","Type":"ContainerStarted","Data":"e9300ca155bdecafbe1487e3e83745c1e51f92cda3b78b46b8e9361b7907aff7"} Nov 27 07:16:42 crc kubenswrapper[4971]: I1127 07:16:42.731959 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"62899a94-028e-4eb3-810e-33e04fb66eb1","Type":"ContainerStarted","Data":"b93435106c2862a8ec43840673dcf5a98f526f6509af891a45c9f02579b27ca4"} Nov 27 07:16:42 crc kubenswrapper[4971]: I1127 07:16:42.779942 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-dj8cq" podStartSLOduration=3.2011937 podStartE2EDuration="10.779923483s" podCreationTimestamp="2025-11-27 07:16:32 +0000 UTC" firstStartedPulling="2025-11-27 07:16:33.705322522 +0000 UTC m=+1431.897366440" lastFinishedPulling="2025-11-27 07:16:41.284052305 +0000 UTC m=+1439.476096223" observedRunningTime="2025-11-27 07:16:42.773250263 +0000 UTC m=+1440.965294181" watchObservedRunningTime="2025-11-27 07:16:42.779923483 +0000 UTC m=+1440.971967401" Nov 27 07:16:44 crc kubenswrapper[4971]: I1127 07:16:44.485093 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:16:44 crc kubenswrapper[4971]: I1127 07:16:44.758367 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"62899a94-028e-4eb3-810e-33e04fb66eb1","Type":"ContainerStarted","Data":"2a8f63f30e2227a559b64cce3a0b558855c1d16151162e61f6563b5071a40882"} Nov 27 07:16:45 crc kubenswrapper[4971]: I1127 07:16:45.767795 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"62899a94-028e-4eb3-810e-33e04fb66eb1","Type":"ContainerStarted","Data":"e8285844545d24c4db8c275d5baa24cea88dd206bd052a43ba0038ee9bfe057f"} Nov 27 07:16:46 crc kubenswrapper[4971]: I1127 07:16:46.781609 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"62899a94-028e-4eb3-810e-33e04fb66eb1","Type":"ContainerStarted","Data":"11d27bcdcf37cc2dba6eebda83d3b9b018e269d0014f0acdb7c5f1a75eb88dc9"} Nov 27 07:16:46 crc kubenswrapper[4971]: I1127 07:16:46.781796 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="62899a94-028e-4eb3-810e-33e04fb66eb1" containerName="ceilometer-central-agent" containerID="cri-o://e9300ca155bdecafbe1487e3e83745c1e51f92cda3b78b46b8e9361b7907aff7" gracePeriod=30 Nov 27 07:16:46 crc kubenswrapper[4971]: I1127 07:16:46.781803 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="62899a94-028e-4eb3-810e-33e04fb66eb1" containerName="sg-core" containerID="cri-o://e8285844545d24c4db8c275d5baa24cea88dd206bd052a43ba0038ee9bfe057f" gracePeriod=30 Nov 27 07:16:46 crc kubenswrapper[4971]: I1127 07:16:46.782091 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 27 07:16:46 crc kubenswrapper[4971]: I1127 07:16:46.781872 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="62899a94-028e-4eb3-810e-33e04fb66eb1" containerName="ceilometer-notification-agent" containerID="cri-o://2a8f63f30e2227a559b64cce3a0b558855c1d16151162e61f6563b5071a40882" gracePeriod=30 Nov 27 07:16:46 crc kubenswrapper[4971]: I1127 07:16:46.781825 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="62899a94-028e-4eb3-810e-33e04fb66eb1" containerName="proxy-httpd" containerID="cri-o://11d27bcdcf37cc2dba6eebda83d3b9b018e269d0014f0acdb7c5f1a75eb88dc9" gracePeriod=30 Nov 27 07:16:46 crc kubenswrapper[4971]: I1127 07:16:46.814508 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=6.332442289 podStartE2EDuration="10.81448426s" podCreationTimestamp="2025-11-27 07:16:36 +0000 UTC" firstStartedPulling="2025-11-27 07:16:41.748375253 +0000 UTC m=+1439.940419161" lastFinishedPulling="2025-11-27 07:16:46.230417214 +0000 UTC m=+1444.422461132" observedRunningTime="2025-11-27 07:16:46.806510315 +0000 UTC m=+1444.998554243" watchObservedRunningTime="2025-11-27 07:16:46.81448426 +0000 UTC m=+1445.006528178" Nov 27 07:16:47 crc kubenswrapper[4971]: I1127 07:16:47.793791 4971 generic.go:334] "Generic (PLEG): container finished" podID="62899a94-028e-4eb3-810e-33e04fb66eb1" containerID="11d27bcdcf37cc2dba6eebda83d3b9b018e269d0014f0acdb7c5f1a75eb88dc9" exitCode=0 Nov 27 07:16:47 crc kubenswrapper[4971]: I1127 07:16:47.794116 4971 generic.go:334] "Generic (PLEG): container finished" podID="62899a94-028e-4eb3-810e-33e04fb66eb1" containerID="e8285844545d24c4db8c275d5baa24cea88dd206bd052a43ba0038ee9bfe057f" exitCode=2 Nov 27 07:16:47 crc kubenswrapper[4971]: I1127 07:16:47.794135 4971 generic.go:334] "Generic (PLEG): container finished" podID="62899a94-028e-4eb3-810e-33e04fb66eb1" containerID="2a8f63f30e2227a559b64cce3a0b558855c1d16151162e61f6563b5071a40882" exitCode=0 Nov 27 07:16:47 crc kubenswrapper[4971]: I1127 07:16:47.793912 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"62899a94-028e-4eb3-810e-33e04fb66eb1","Type":"ContainerDied","Data":"11d27bcdcf37cc2dba6eebda83d3b9b018e269d0014f0acdb7c5f1a75eb88dc9"} Nov 27 07:16:47 crc kubenswrapper[4971]: I1127 07:16:47.794198 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"62899a94-028e-4eb3-810e-33e04fb66eb1","Type":"ContainerDied","Data":"e8285844545d24c4db8c275d5baa24cea88dd206bd052a43ba0038ee9bfe057f"} Nov 27 07:16:47 crc kubenswrapper[4971]: I1127 07:16:47.794218 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"62899a94-028e-4eb3-810e-33e04fb66eb1","Type":"ContainerDied","Data":"2a8f63f30e2227a559b64cce3a0b558855c1d16151162e61f6563b5071a40882"} Nov 27 07:16:51 crc kubenswrapper[4971]: I1127 07:16:51.663895 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 07:16:51 crc kubenswrapper[4971]: I1127 07:16:51.831275 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62899a94-028e-4eb3-810e-33e04fb66eb1-log-httpd\") pod \"62899a94-028e-4eb3-810e-33e04fb66eb1\" (UID: \"62899a94-028e-4eb3-810e-33e04fb66eb1\") " Nov 27 07:16:51 crc kubenswrapper[4971]: I1127 07:16:51.831394 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kb4wj\" (UniqueName: \"kubernetes.io/projected/62899a94-028e-4eb3-810e-33e04fb66eb1-kube-api-access-kb4wj\") pod \"62899a94-028e-4eb3-810e-33e04fb66eb1\" (UID: \"62899a94-028e-4eb3-810e-33e04fb66eb1\") " Nov 27 07:16:51 crc kubenswrapper[4971]: I1127 07:16:51.831418 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62899a94-028e-4eb3-810e-33e04fb66eb1-run-httpd\") pod \"62899a94-028e-4eb3-810e-33e04fb66eb1\" (UID: \"62899a94-028e-4eb3-810e-33e04fb66eb1\") " Nov 27 07:16:51 crc kubenswrapper[4971]: I1127 07:16:51.831516 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62899a94-028e-4eb3-810e-33e04fb66eb1-combined-ca-bundle\") pod \"62899a94-028e-4eb3-810e-33e04fb66eb1\" (UID: \"62899a94-028e-4eb3-810e-33e04fb66eb1\") " Nov 27 07:16:51 crc kubenswrapper[4971]: I1127 07:16:51.831572 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62899a94-028e-4eb3-810e-33e04fb66eb1-config-data\") pod \"62899a94-028e-4eb3-810e-33e04fb66eb1\" (UID: \"62899a94-028e-4eb3-810e-33e04fb66eb1\") " Nov 27 07:16:51 crc kubenswrapper[4971]: I1127 07:16:51.831656 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/62899a94-028e-4eb3-810e-33e04fb66eb1-sg-core-conf-yaml\") pod \"62899a94-028e-4eb3-810e-33e04fb66eb1\" (UID: \"62899a94-028e-4eb3-810e-33e04fb66eb1\") " Nov 27 07:16:51 crc kubenswrapper[4971]: I1127 07:16:51.831936 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62899a94-028e-4eb3-810e-33e04fb66eb1-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "62899a94-028e-4eb3-810e-33e04fb66eb1" (UID: "62899a94-028e-4eb3-810e-33e04fb66eb1"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:16:51 crc kubenswrapper[4971]: I1127 07:16:51.832369 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62899a94-028e-4eb3-810e-33e04fb66eb1-scripts\") pod \"62899a94-028e-4eb3-810e-33e04fb66eb1\" (UID: \"62899a94-028e-4eb3-810e-33e04fb66eb1\") " Nov 27 07:16:51 crc kubenswrapper[4971]: I1127 07:16:51.832402 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/62899a94-028e-4eb3-810e-33e04fb66eb1-ceilometer-tls-certs\") pod \"62899a94-028e-4eb3-810e-33e04fb66eb1\" (UID: \"62899a94-028e-4eb3-810e-33e04fb66eb1\") " Nov 27 07:16:51 crc kubenswrapper[4971]: I1127 07:16:51.832578 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62899a94-028e-4eb3-810e-33e04fb66eb1-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "62899a94-028e-4eb3-810e-33e04fb66eb1" (UID: "62899a94-028e-4eb3-810e-33e04fb66eb1"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:16:51 crc kubenswrapper[4971]: I1127 07:16:51.832907 4971 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62899a94-028e-4eb3-810e-33e04fb66eb1-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:51 crc kubenswrapper[4971]: I1127 07:16:51.832922 4971 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/62899a94-028e-4eb3-810e-33e04fb66eb1-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:51 crc kubenswrapper[4971]: I1127 07:16:51.838910 4971 generic.go:334] "Generic (PLEG): container finished" podID="62899a94-028e-4eb3-810e-33e04fb66eb1" containerID="e9300ca155bdecafbe1487e3e83745c1e51f92cda3b78b46b8e9361b7907aff7" exitCode=0 Nov 27 07:16:51 crc kubenswrapper[4971]: I1127 07:16:51.838956 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"62899a94-028e-4eb3-810e-33e04fb66eb1","Type":"ContainerDied","Data":"e9300ca155bdecafbe1487e3e83745c1e51f92cda3b78b46b8e9361b7907aff7"} Nov 27 07:16:51 crc kubenswrapper[4971]: I1127 07:16:51.839000 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"62899a94-028e-4eb3-810e-33e04fb66eb1","Type":"ContainerDied","Data":"b93435106c2862a8ec43840673dcf5a98f526f6509af891a45c9f02579b27ca4"} Nov 27 07:16:51 crc kubenswrapper[4971]: I1127 07:16:51.839020 4971 scope.go:117] "RemoveContainer" containerID="11d27bcdcf37cc2dba6eebda83d3b9b018e269d0014f0acdb7c5f1a75eb88dc9" Nov 27 07:16:51 crc kubenswrapper[4971]: I1127 07:16:51.839160 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62899a94-028e-4eb3-810e-33e04fb66eb1-scripts" (OuterVolumeSpecName: "scripts") pod "62899a94-028e-4eb3-810e-33e04fb66eb1" (UID: "62899a94-028e-4eb3-810e-33e04fb66eb1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:51 crc kubenswrapper[4971]: I1127 07:16:51.839167 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 07:16:51 crc kubenswrapper[4971]: I1127 07:16:51.840178 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62899a94-028e-4eb3-810e-33e04fb66eb1-kube-api-access-kb4wj" (OuterVolumeSpecName: "kube-api-access-kb4wj") pod "62899a94-028e-4eb3-810e-33e04fb66eb1" (UID: "62899a94-028e-4eb3-810e-33e04fb66eb1"). InnerVolumeSpecName "kube-api-access-kb4wj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:16:51 crc kubenswrapper[4971]: I1127 07:16:51.878943 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62899a94-028e-4eb3-810e-33e04fb66eb1-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "62899a94-028e-4eb3-810e-33e04fb66eb1" (UID: "62899a94-028e-4eb3-810e-33e04fb66eb1"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:51 crc kubenswrapper[4971]: I1127 07:16:51.903152 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62899a94-028e-4eb3-810e-33e04fb66eb1-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "62899a94-028e-4eb3-810e-33e04fb66eb1" (UID: "62899a94-028e-4eb3-810e-33e04fb66eb1"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:51 crc kubenswrapper[4971]: I1127 07:16:51.935508 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kb4wj\" (UniqueName: \"kubernetes.io/projected/62899a94-028e-4eb3-810e-33e04fb66eb1-kube-api-access-kb4wj\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:51 crc kubenswrapper[4971]: I1127 07:16:51.935822 4971 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/62899a94-028e-4eb3-810e-33e04fb66eb1-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:51 crc kubenswrapper[4971]: I1127 07:16:51.935939 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62899a94-028e-4eb3-810e-33e04fb66eb1-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:51 crc kubenswrapper[4971]: I1127 07:16:51.936017 4971 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/62899a94-028e-4eb3-810e-33e04fb66eb1-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:51 crc kubenswrapper[4971]: I1127 07:16:51.939559 4971 scope.go:117] "RemoveContainer" containerID="e8285844545d24c4db8c275d5baa24cea88dd206bd052a43ba0038ee9bfe057f" Nov 27 07:16:51 crc kubenswrapper[4971]: I1127 07:16:51.951117 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62899a94-028e-4eb3-810e-33e04fb66eb1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "62899a94-028e-4eb3-810e-33e04fb66eb1" (UID: "62899a94-028e-4eb3-810e-33e04fb66eb1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:51 crc kubenswrapper[4971]: I1127 07:16:51.954955 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62899a94-028e-4eb3-810e-33e04fb66eb1-config-data" (OuterVolumeSpecName: "config-data") pod "62899a94-028e-4eb3-810e-33e04fb66eb1" (UID: "62899a94-028e-4eb3-810e-33e04fb66eb1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:51 crc kubenswrapper[4971]: I1127 07:16:51.960635 4971 scope.go:117] "RemoveContainer" containerID="2a8f63f30e2227a559b64cce3a0b558855c1d16151162e61f6563b5071a40882" Nov 27 07:16:51 crc kubenswrapper[4971]: I1127 07:16:51.984930 4971 scope.go:117] "RemoveContainer" containerID="e9300ca155bdecafbe1487e3e83745c1e51f92cda3b78b46b8e9361b7907aff7" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.004756 4971 scope.go:117] "RemoveContainer" containerID="11d27bcdcf37cc2dba6eebda83d3b9b018e269d0014f0acdb7c5f1a75eb88dc9" Nov 27 07:16:52 crc kubenswrapper[4971]: E1127 07:16:52.005190 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"11d27bcdcf37cc2dba6eebda83d3b9b018e269d0014f0acdb7c5f1a75eb88dc9\": container with ID starting with 11d27bcdcf37cc2dba6eebda83d3b9b018e269d0014f0acdb7c5f1a75eb88dc9 not found: ID does not exist" containerID="11d27bcdcf37cc2dba6eebda83d3b9b018e269d0014f0acdb7c5f1a75eb88dc9" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.005230 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11d27bcdcf37cc2dba6eebda83d3b9b018e269d0014f0acdb7c5f1a75eb88dc9"} err="failed to get container status \"11d27bcdcf37cc2dba6eebda83d3b9b018e269d0014f0acdb7c5f1a75eb88dc9\": rpc error: code = NotFound desc = could not find container \"11d27bcdcf37cc2dba6eebda83d3b9b018e269d0014f0acdb7c5f1a75eb88dc9\": container with ID starting with 11d27bcdcf37cc2dba6eebda83d3b9b018e269d0014f0acdb7c5f1a75eb88dc9 not found: ID does not exist" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.005259 4971 scope.go:117] "RemoveContainer" containerID="e8285844545d24c4db8c275d5baa24cea88dd206bd052a43ba0038ee9bfe057f" Nov 27 07:16:52 crc kubenswrapper[4971]: E1127 07:16:52.005506 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e8285844545d24c4db8c275d5baa24cea88dd206bd052a43ba0038ee9bfe057f\": container with ID starting with e8285844545d24c4db8c275d5baa24cea88dd206bd052a43ba0038ee9bfe057f not found: ID does not exist" containerID="e8285844545d24c4db8c275d5baa24cea88dd206bd052a43ba0038ee9bfe057f" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.005526 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8285844545d24c4db8c275d5baa24cea88dd206bd052a43ba0038ee9bfe057f"} err="failed to get container status \"e8285844545d24c4db8c275d5baa24cea88dd206bd052a43ba0038ee9bfe057f\": rpc error: code = NotFound desc = could not find container \"e8285844545d24c4db8c275d5baa24cea88dd206bd052a43ba0038ee9bfe057f\": container with ID starting with e8285844545d24c4db8c275d5baa24cea88dd206bd052a43ba0038ee9bfe057f not found: ID does not exist" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.005626 4971 scope.go:117] "RemoveContainer" containerID="2a8f63f30e2227a559b64cce3a0b558855c1d16151162e61f6563b5071a40882" Nov 27 07:16:52 crc kubenswrapper[4971]: E1127 07:16:52.006068 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a8f63f30e2227a559b64cce3a0b558855c1d16151162e61f6563b5071a40882\": container with ID starting with 2a8f63f30e2227a559b64cce3a0b558855c1d16151162e61f6563b5071a40882 not found: ID does not exist" containerID="2a8f63f30e2227a559b64cce3a0b558855c1d16151162e61f6563b5071a40882" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.006104 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a8f63f30e2227a559b64cce3a0b558855c1d16151162e61f6563b5071a40882"} err="failed to get container status \"2a8f63f30e2227a559b64cce3a0b558855c1d16151162e61f6563b5071a40882\": rpc error: code = NotFound desc = could not find container \"2a8f63f30e2227a559b64cce3a0b558855c1d16151162e61f6563b5071a40882\": container with ID starting with 2a8f63f30e2227a559b64cce3a0b558855c1d16151162e61f6563b5071a40882 not found: ID does not exist" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.006136 4971 scope.go:117] "RemoveContainer" containerID="e9300ca155bdecafbe1487e3e83745c1e51f92cda3b78b46b8e9361b7907aff7" Nov 27 07:16:52 crc kubenswrapper[4971]: E1127 07:16:52.006491 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e9300ca155bdecafbe1487e3e83745c1e51f92cda3b78b46b8e9361b7907aff7\": container with ID starting with e9300ca155bdecafbe1487e3e83745c1e51f92cda3b78b46b8e9361b7907aff7 not found: ID does not exist" containerID="e9300ca155bdecafbe1487e3e83745c1e51f92cda3b78b46b8e9361b7907aff7" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.006529 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9300ca155bdecafbe1487e3e83745c1e51f92cda3b78b46b8e9361b7907aff7"} err="failed to get container status \"e9300ca155bdecafbe1487e3e83745c1e51f92cda3b78b46b8e9361b7907aff7\": rpc error: code = NotFound desc = could not find container \"e9300ca155bdecafbe1487e3e83745c1e51f92cda3b78b46b8e9361b7907aff7\": container with ID starting with e9300ca155bdecafbe1487e3e83745c1e51f92cda3b78b46b8e9361b7907aff7 not found: ID does not exist" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.038067 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62899a94-028e-4eb3-810e-33e04fb66eb1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.038096 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62899a94-028e-4eb3-810e-33e04fb66eb1-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.182049 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.197914 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.209551 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:16:52 crc kubenswrapper[4971]: E1127 07:16:52.210097 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62899a94-028e-4eb3-810e-33e04fb66eb1" containerName="ceilometer-central-agent" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.210117 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="62899a94-028e-4eb3-810e-33e04fb66eb1" containerName="ceilometer-central-agent" Nov 27 07:16:52 crc kubenswrapper[4971]: E1127 07:16:52.210133 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62899a94-028e-4eb3-810e-33e04fb66eb1" containerName="proxy-httpd" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.210139 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="62899a94-028e-4eb3-810e-33e04fb66eb1" containerName="proxy-httpd" Nov 27 07:16:52 crc kubenswrapper[4971]: E1127 07:16:52.210147 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62899a94-028e-4eb3-810e-33e04fb66eb1" containerName="sg-core" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.210153 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="62899a94-028e-4eb3-810e-33e04fb66eb1" containerName="sg-core" Nov 27 07:16:52 crc kubenswrapper[4971]: E1127 07:16:52.210166 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62899a94-028e-4eb3-810e-33e04fb66eb1" containerName="ceilometer-notification-agent" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.210172 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="62899a94-028e-4eb3-810e-33e04fb66eb1" containerName="ceilometer-notification-agent" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.210378 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="62899a94-028e-4eb3-810e-33e04fb66eb1" containerName="sg-core" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.210392 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="62899a94-028e-4eb3-810e-33e04fb66eb1" containerName="ceilometer-notification-agent" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.210401 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="62899a94-028e-4eb3-810e-33e04fb66eb1" containerName="proxy-httpd" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.210417 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="62899a94-028e-4eb3-810e-33e04fb66eb1" containerName="ceilometer-central-agent" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.212176 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.214895 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.214974 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.215182 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.221255 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.353523 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a9929765-53ba-4cdc-85f7-1e7c9568714b-log-httpd\") pod \"ceilometer-0\" (UID: \"a9929765-53ba-4cdc-85f7-1e7c9568714b\") " pod="openstack/ceilometer-0" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.353688 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9929765-53ba-4cdc-85f7-1e7c9568714b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a9929765-53ba-4cdc-85f7-1e7c9568714b\") " pod="openstack/ceilometer-0" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.353714 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9929765-53ba-4cdc-85f7-1e7c9568714b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"a9929765-53ba-4cdc-85f7-1e7c9568714b\") " pod="openstack/ceilometer-0" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.353738 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a9929765-53ba-4cdc-85f7-1e7c9568714b-run-httpd\") pod \"ceilometer-0\" (UID: \"a9929765-53ba-4cdc-85f7-1e7c9568714b\") " pod="openstack/ceilometer-0" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.353756 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xsgsm\" (UniqueName: \"kubernetes.io/projected/a9929765-53ba-4cdc-85f7-1e7c9568714b-kube-api-access-xsgsm\") pod \"ceilometer-0\" (UID: \"a9929765-53ba-4cdc-85f7-1e7c9568714b\") " pod="openstack/ceilometer-0" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.353775 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a9929765-53ba-4cdc-85f7-1e7c9568714b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a9929765-53ba-4cdc-85f7-1e7c9568714b\") " pod="openstack/ceilometer-0" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.353842 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a9929765-53ba-4cdc-85f7-1e7c9568714b-scripts\") pod \"ceilometer-0\" (UID: \"a9929765-53ba-4cdc-85f7-1e7c9568714b\") " pod="openstack/ceilometer-0" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.353872 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9929765-53ba-4cdc-85f7-1e7c9568714b-config-data\") pod \"ceilometer-0\" (UID: \"a9929765-53ba-4cdc-85f7-1e7c9568714b\") " pod="openstack/ceilometer-0" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.416112 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:16:52 crc kubenswrapper[4971]: E1127 07:16:52.417025 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[ceilometer-tls-certs combined-ca-bundle config-data kube-api-access-xsgsm log-httpd run-httpd scripts sg-core-conf-yaml], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/ceilometer-0" podUID="a9929765-53ba-4cdc-85f7-1e7c9568714b" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.455829 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a9929765-53ba-4cdc-85f7-1e7c9568714b-scripts\") pod \"ceilometer-0\" (UID: \"a9929765-53ba-4cdc-85f7-1e7c9568714b\") " pod="openstack/ceilometer-0" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.455880 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9929765-53ba-4cdc-85f7-1e7c9568714b-config-data\") pod \"ceilometer-0\" (UID: \"a9929765-53ba-4cdc-85f7-1e7c9568714b\") " pod="openstack/ceilometer-0" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.455935 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a9929765-53ba-4cdc-85f7-1e7c9568714b-log-httpd\") pod \"ceilometer-0\" (UID: \"a9929765-53ba-4cdc-85f7-1e7c9568714b\") " pod="openstack/ceilometer-0" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.455967 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9929765-53ba-4cdc-85f7-1e7c9568714b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a9929765-53ba-4cdc-85f7-1e7c9568714b\") " pod="openstack/ceilometer-0" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.455987 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9929765-53ba-4cdc-85f7-1e7c9568714b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"a9929765-53ba-4cdc-85f7-1e7c9568714b\") " pod="openstack/ceilometer-0" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.456009 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a9929765-53ba-4cdc-85f7-1e7c9568714b-run-httpd\") pod \"ceilometer-0\" (UID: \"a9929765-53ba-4cdc-85f7-1e7c9568714b\") " pod="openstack/ceilometer-0" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.456026 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xsgsm\" (UniqueName: \"kubernetes.io/projected/a9929765-53ba-4cdc-85f7-1e7c9568714b-kube-api-access-xsgsm\") pod \"ceilometer-0\" (UID: \"a9929765-53ba-4cdc-85f7-1e7c9568714b\") " pod="openstack/ceilometer-0" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.456047 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a9929765-53ba-4cdc-85f7-1e7c9568714b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a9929765-53ba-4cdc-85f7-1e7c9568714b\") " pod="openstack/ceilometer-0" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.457253 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a9929765-53ba-4cdc-85f7-1e7c9568714b-log-httpd\") pod \"ceilometer-0\" (UID: \"a9929765-53ba-4cdc-85f7-1e7c9568714b\") " pod="openstack/ceilometer-0" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.457323 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a9929765-53ba-4cdc-85f7-1e7c9568714b-run-httpd\") pod \"ceilometer-0\" (UID: \"a9929765-53ba-4cdc-85f7-1e7c9568714b\") " pod="openstack/ceilometer-0" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.460227 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a9929765-53ba-4cdc-85f7-1e7c9568714b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a9929765-53ba-4cdc-85f7-1e7c9568714b\") " pod="openstack/ceilometer-0" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.460795 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9929765-53ba-4cdc-85f7-1e7c9568714b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"a9929765-53ba-4cdc-85f7-1e7c9568714b\") " pod="openstack/ceilometer-0" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.461223 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9929765-53ba-4cdc-85f7-1e7c9568714b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a9929765-53ba-4cdc-85f7-1e7c9568714b\") " pod="openstack/ceilometer-0" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.462062 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9929765-53ba-4cdc-85f7-1e7c9568714b-config-data\") pod \"ceilometer-0\" (UID: \"a9929765-53ba-4cdc-85f7-1e7c9568714b\") " pod="openstack/ceilometer-0" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.468954 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a9929765-53ba-4cdc-85f7-1e7c9568714b-scripts\") pod \"ceilometer-0\" (UID: \"a9929765-53ba-4cdc-85f7-1e7c9568714b\") " pod="openstack/ceilometer-0" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.475496 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xsgsm\" (UniqueName: \"kubernetes.io/projected/a9929765-53ba-4cdc-85f7-1e7c9568714b-kube-api-access-xsgsm\") pod \"ceilometer-0\" (UID: \"a9929765-53ba-4cdc-85f7-1e7c9568714b\") " pod="openstack/ceilometer-0" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.567562 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62899a94-028e-4eb3-810e-33e04fb66eb1" path="/var/lib/kubelet/pods/62899a94-028e-4eb3-810e-33e04fb66eb1/volumes" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.849777 4971 generic.go:334] "Generic (PLEG): container finished" podID="146ec507-b566-4056-92f6-2f12b2d6f11c" containerID="311e0d520fd8e54093bc7d5a472d0e50638192c04dc79858d7a39b858f1c705d" exitCode=0 Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.849893 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.850659 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-dj8cq" event={"ID":"146ec507-b566-4056-92f6-2f12b2d6f11c","Type":"ContainerDied","Data":"311e0d520fd8e54093bc7d5a472d0e50638192c04dc79858d7a39b858f1c705d"} Nov 27 07:16:52 crc kubenswrapper[4971]: I1127 07:16:52.894612 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.066599 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9929765-53ba-4cdc-85f7-1e7c9568714b-combined-ca-bundle\") pod \"a9929765-53ba-4cdc-85f7-1e7c9568714b\" (UID: \"a9929765-53ba-4cdc-85f7-1e7c9568714b\") " Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.066760 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xsgsm\" (UniqueName: \"kubernetes.io/projected/a9929765-53ba-4cdc-85f7-1e7c9568714b-kube-api-access-xsgsm\") pod \"a9929765-53ba-4cdc-85f7-1e7c9568714b\" (UID: \"a9929765-53ba-4cdc-85f7-1e7c9568714b\") " Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.066805 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9929765-53ba-4cdc-85f7-1e7c9568714b-ceilometer-tls-certs\") pod \"a9929765-53ba-4cdc-85f7-1e7c9568714b\" (UID: \"a9929765-53ba-4cdc-85f7-1e7c9568714b\") " Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.066912 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9929765-53ba-4cdc-85f7-1e7c9568714b-config-data\") pod \"a9929765-53ba-4cdc-85f7-1e7c9568714b\" (UID: \"a9929765-53ba-4cdc-85f7-1e7c9568714b\") " Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.066985 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a9929765-53ba-4cdc-85f7-1e7c9568714b-sg-core-conf-yaml\") pod \"a9929765-53ba-4cdc-85f7-1e7c9568714b\" (UID: \"a9929765-53ba-4cdc-85f7-1e7c9568714b\") " Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.067047 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a9929765-53ba-4cdc-85f7-1e7c9568714b-log-httpd\") pod \"a9929765-53ba-4cdc-85f7-1e7c9568714b\" (UID: \"a9929765-53ba-4cdc-85f7-1e7c9568714b\") " Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.067108 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a9929765-53ba-4cdc-85f7-1e7c9568714b-run-httpd\") pod \"a9929765-53ba-4cdc-85f7-1e7c9568714b\" (UID: \"a9929765-53ba-4cdc-85f7-1e7c9568714b\") " Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.067165 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a9929765-53ba-4cdc-85f7-1e7c9568714b-scripts\") pod \"a9929765-53ba-4cdc-85f7-1e7c9568714b\" (UID: \"a9929765-53ba-4cdc-85f7-1e7c9568714b\") " Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.067860 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a9929765-53ba-4cdc-85f7-1e7c9568714b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a9929765-53ba-4cdc-85f7-1e7c9568714b" (UID: "a9929765-53ba-4cdc-85f7-1e7c9568714b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.068061 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a9929765-53ba-4cdc-85f7-1e7c9568714b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a9929765-53ba-4cdc-85f7-1e7c9568714b" (UID: "a9929765-53ba-4cdc-85f7-1e7c9568714b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.071246 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9929765-53ba-4cdc-85f7-1e7c9568714b-config-data" (OuterVolumeSpecName: "config-data") pod "a9929765-53ba-4cdc-85f7-1e7c9568714b" (UID: "a9929765-53ba-4cdc-85f7-1e7c9568714b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.071430 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9929765-53ba-4cdc-85f7-1e7c9568714b-kube-api-access-xsgsm" (OuterVolumeSpecName: "kube-api-access-xsgsm") pod "a9929765-53ba-4cdc-85f7-1e7c9568714b" (UID: "a9929765-53ba-4cdc-85f7-1e7c9568714b"). InnerVolumeSpecName "kube-api-access-xsgsm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.073112 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9929765-53ba-4cdc-85f7-1e7c9568714b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a9929765-53ba-4cdc-85f7-1e7c9568714b" (UID: "a9929765-53ba-4cdc-85f7-1e7c9568714b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.075564 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9929765-53ba-4cdc-85f7-1e7c9568714b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a9929765-53ba-4cdc-85f7-1e7c9568714b" (UID: "a9929765-53ba-4cdc-85f7-1e7c9568714b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.079259 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9929765-53ba-4cdc-85f7-1e7c9568714b-scripts" (OuterVolumeSpecName: "scripts") pod "a9929765-53ba-4cdc-85f7-1e7c9568714b" (UID: "a9929765-53ba-4cdc-85f7-1e7c9568714b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.082924 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9929765-53ba-4cdc-85f7-1e7c9568714b-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "a9929765-53ba-4cdc-85f7-1e7c9568714b" (UID: "a9929765-53ba-4cdc-85f7-1e7c9568714b"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.169452 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9929765-53ba-4cdc-85f7-1e7c9568714b-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.169495 4971 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a9929765-53ba-4cdc-85f7-1e7c9568714b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.169510 4971 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a9929765-53ba-4cdc-85f7-1e7c9568714b-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.169521 4971 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a9929765-53ba-4cdc-85f7-1e7c9568714b-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.169544 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a9929765-53ba-4cdc-85f7-1e7c9568714b-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.169556 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9929765-53ba-4cdc-85f7-1e7c9568714b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.169569 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xsgsm\" (UniqueName: \"kubernetes.io/projected/a9929765-53ba-4cdc-85f7-1e7c9568714b-kube-api-access-xsgsm\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.169581 4971 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9929765-53ba-4cdc-85f7-1e7c9568714b-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.857589 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.931237 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.949379 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.960074 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.962605 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.965915 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.966004 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.966170 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.968831 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.986713 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76095246-397f-49df-9758-e41ef115f04e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"76095246-397f-49df-9758-e41ef115f04e\") " pod="openstack/ceilometer-0" Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.987102 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/76095246-397f-49df-9758-e41ef115f04e-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"76095246-397f-49df-9758-e41ef115f04e\") " pod="openstack/ceilometer-0" Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.987153 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/76095246-397f-49df-9758-e41ef115f04e-log-httpd\") pod \"ceilometer-0\" (UID: \"76095246-397f-49df-9758-e41ef115f04e\") " pod="openstack/ceilometer-0" Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.987195 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/76095246-397f-49df-9758-e41ef115f04e-run-httpd\") pod \"ceilometer-0\" (UID: \"76095246-397f-49df-9758-e41ef115f04e\") " pod="openstack/ceilometer-0" Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.987292 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4gl8g\" (UniqueName: \"kubernetes.io/projected/76095246-397f-49df-9758-e41ef115f04e-kube-api-access-4gl8g\") pod \"ceilometer-0\" (UID: \"76095246-397f-49df-9758-e41ef115f04e\") " pod="openstack/ceilometer-0" Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.987452 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/76095246-397f-49df-9758-e41ef115f04e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"76095246-397f-49df-9758-e41ef115f04e\") " pod="openstack/ceilometer-0" Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.987613 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/76095246-397f-49df-9758-e41ef115f04e-scripts\") pod \"ceilometer-0\" (UID: \"76095246-397f-49df-9758-e41ef115f04e\") " pod="openstack/ceilometer-0" Nov 27 07:16:53 crc kubenswrapper[4971]: I1127 07:16:53.987731 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76095246-397f-49df-9758-e41ef115f04e-config-data\") pod \"ceilometer-0\" (UID: \"76095246-397f-49df-9758-e41ef115f04e\") " pod="openstack/ceilometer-0" Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.088691 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76095246-397f-49df-9758-e41ef115f04e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"76095246-397f-49df-9758-e41ef115f04e\") " pod="openstack/ceilometer-0" Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.088747 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/76095246-397f-49df-9758-e41ef115f04e-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"76095246-397f-49df-9758-e41ef115f04e\") " pod="openstack/ceilometer-0" Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.088792 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/76095246-397f-49df-9758-e41ef115f04e-log-httpd\") pod \"ceilometer-0\" (UID: \"76095246-397f-49df-9758-e41ef115f04e\") " pod="openstack/ceilometer-0" Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.088818 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/76095246-397f-49df-9758-e41ef115f04e-run-httpd\") pod \"ceilometer-0\" (UID: \"76095246-397f-49df-9758-e41ef115f04e\") " pod="openstack/ceilometer-0" Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.088876 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4gl8g\" (UniqueName: \"kubernetes.io/projected/76095246-397f-49df-9758-e41ef115f04e-kube-api-access-4gl8g\") pod \"ceilometer-0\" (UID: \"76095246-397f-49df-9758-e41ef115f04e\") " pod="openstack/ceilometer-0" Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.088956 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/76095246-397f-49df-9758-e41ef115f04e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"76095246-397f-49df-9758-e41ef115f04e\") " pod="openstack/ceilometer-0" Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.089025 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/76095246-397f-49df-9758-e41ef115f04e-scripts\") pod \"ceilometer-0\" (UID: \"76095246-397f-49df-9758-e41ef115f04e\") " pod="openstack/ceilometer-0" Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.089080 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76095246-397f-49df-9758-e41ef115f04e-config-data\") pod \"ceilometer-0\" (UID: \"76095246-397f-49df-9758-e41ef115f04e\") " pod="openstack/ceilometer-0" Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.090751 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/76095246-397f-49df-9758-e41ef115f04e-run-httpd\") pod \"ceilometer-0\" (UID: \"76095246-397f-49df-9758-e41ef115f04e\") " pod="openstack/ceilometer-0" Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.091413 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/76095246-397f-49df-9758-e41ef115f04e-log-httpd\") pod \"ceilometer-0\" (UID: \"76095246-397f-49df-9758-e41ef115f04e\") " pod="openstack/ceilometer-0" Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.095372 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/76095246-397f-49df-9758-e41ef115f04e-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"76095246-397f-49df-9758-e41ef115f04e\") " pod="openstack/ceilometer-0" Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.098746 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/76095246-397f-49df-9758-e41ef115f04e-scripts\") pod \"ceilometer-0\" (UID: \"76095246-397f-49df-9758-e41ef115f04e\") " pod="openstack/ceilometer-0" Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.098809 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76095246-397f-49df-9758-e41ef115f04e-config-data\") pod \"ceilometer-0\" (UID: \"76095246-397f-49df-9758-e41ef115f04e\") " pod="openstack/ceilometer-0" Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.106296 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/76095246-397f-49df-9758-e41ef115f04e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"76095246-397f-49df-9758-e41ef115f04e\") " pod="openstack/ceilometer-0" Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.106777 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4gl8g\" (UniqueName: \"kubernetes.io/projected/76095246-397f-49df-9758-e41ef115f04e-kube-api-access-4gl8g\") pod \"ceilometer-0\" (UID: \"76095246-397f-49df-9758-e41ef115f04e\") " pod="openstack/ceilometer-0" Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.107712 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76095246-397f-49df-9758-e41ef115f04e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"76095246-397f-49df-9758-e41ef115f04e\") " pod="openstack/ceilometer-0" Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.228309 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-dj8cq" Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.287167 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.396276 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/146ec507-b566-4056-92f6-2f12b2d6f11c-config-data\") pod \"146ec507-b566-4056-92f6-2f12b2d6f11c\" (UID: \"146ec507-b566-4056-92f6-2f12b2d6f11c\") " Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.396345 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/146ec507-b566-4056-92f6-2f12b2d6f11c-scripts\") pod \"146ec507-b566-4056-92f6-2f12b2d6f11c\" (UID: \"146ec507-b566-4056-92f6-2f12b2d6f11c\") " Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.396501 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-96sc9\" (UniqueName: \"kubernetes.io/projected/146ec507-b566-4056-92f6-2f12b2d6f11c-kube-api-access-96sc9\") pod \"146ec507-b566-4056-92f6-2f12b2d6f11c\" (UID: \"146ec507-b566-4056-92f6-2f12b2d6f11c\") " Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.396568 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/146ec507-b566-4056-92f6-2f12b2d6f11c-combined-ca-bundle\") pod \"146ec507-b566-4056-92f6-2f12b2d6f11c\" (UID: \"146ec507-b566-4056-92f6-2f12b2d6f11c\") " Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.401797 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/146ec507-b566-4056-92f6-2f12b2d6f11c-kube-api-access-96sc9" (OuterVolumeSpecName: "kube-api-access-96sc9") pod "146ec507-b566-4056-92f6-2f12b2d6f11c" (UID: "146ec507-b566-4056-92f6-2f12b2d6f11c"). InnerVolumeSpecName "kube-api-access-96sc9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.402857 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/146ec507-b566-4056-92f6-2f12b2d6f11c-scripts" (OuterVolumeSpecName: "scripts") pod "146ec507-b566-4056-92f6-2f12b2d6f11c" (UID: "146ec507-b566-4056-92f6-2f12b2d6f11c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.425872 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/146ec507-b566-4056-92f6-2f12b2d6f11c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "146ec507-b566-4056-92f6-2f12b2d6f11c" (UID: "146ec507-b566-4056-92f6-2f12b2d6f11c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.427978 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/146ec507-b566-4056-92f6-2f12b2d6f11c-config-data" (OuterVolumeSpecName: "config-data") pod "146ec507-b566-4056-92f6-2f12b2d6f11c" (UID: "146ec507-b566-4056-92f6-2f12b2d6f11c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.498978 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-96sc9\" (UniqueName: \"kubernetes.io/projected/146ec507-b566-4056-92f6-2f12b2d6f11c-kube-api-access-96sc9\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.499014 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/146ec507-b566-4056-92f6-2f12b2d6f11c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.499027 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/146ec507-b566-4056-92f6-2f12b2d6f11c-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.499041 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/146ec507-b566-4056-92f6-2f12b2d6f11c-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.560210 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9929765-53ba-4cdc-85f7-1e7c9568714b" path="/var/lib/kubelet/pods/a9929765-53ba-4cdc-85f7-1e7c9568714b/volumes" Nov 27 07:16:54 crc kubenswrapper[4971]: W1127 07:16:54.775373 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod76095246_397f_49df_9758_e41ef115f04e.slice/crio-5ef49813369074fb3b7cc4c248b5d6fdb4e8668cbaaa888aba7e7e00a0d24fe9 WatchSource:0}: Error finding container 5ef49813369074fb3b7cc4c248b5d6fdb4e8668cbaaa888aba7e7e00a0d24fe9: Status 404 returned error can't find the container with id 5ef49813369074fb3b7cc4c248b5d6fdb4e8668cbaaa888aba7e7e00a0d24fe9 Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.777186 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.866968 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"76095246-397f-49df-9758-e41ef115f04e","Type":"ContainerStarted","Data":"5ef49813369074fb3b7cc4c248b5d6fdb4e8668cbaaa888aba7e7e00a0d24fe9"} Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.868836 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-dj8cq" event={"ID":"146ec507-b566-4056-92f6-2f12b2d6f11c","Type":"ContainerDied","Data":"8d60b9b824532010a0ea2ccc5c08f04d0a42fa9ef20a90f1bb325338b7d522b0"} Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.868882 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8d60b9b824532010a0ea2ccc5c08f04d0a42fa9ef20a90f1bb325338b7d522b0" Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.868897 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-dj8cq" Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.974776 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 27 07:16:54 crc kubenswrapper[4971]: E1127 07:16:54.975284 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="146ec507-b566-4056-92f6-2f12b2d6f11c" containerName="nova-cell0-conductor-db-sync" Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.975307 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="146ec507-b566-4056-92f6-2f12b2d6f11c" containerName="nova-cell0-conductor-db-sync" Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.975574 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="146ec507-b566-4056-92f6-2f12b2d6f11c" containerName="nova-cell0-conductor-db-sync" Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.976428 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.986776 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-cgmvk" Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.986796 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Nov 27 07:16:54 crc kubenswrapper[4971]: I1127 07:16:54.987400 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 27 07:16:55 crc kubenswrapper[4971]: I1127 07:16:55.040060 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6b9n7\" (UniqueName: \"kubernetes.io/projected/bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad-kube-api-access-6b9n7\") pod \"nova-cell0-conductor-0\" (UID: \"bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad\") " pod="openstack/nova-cell0-conductor-0" Nov 27 07:16:55 crc kubenswrapper[4971]: I1127 07:16:55.040133 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad\") " pod="openstack/nova-cell0-conductor-0" Nov 27 07:16:55 crc kubenswrapper[4971]: I1127 07:16:55.040348 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad\") " pod="openstack/nova-cell0-conductor-0" Nov 27 07:16:55 crc kubenswrapper[4971]: I1127 07:16:55.146357 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6b9n7\" (UniqueName: \"kubernetes.io/projected/bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad-kube-api-access-6b9n7\") pod \"nova-cell0-conductor-0\" (UID: \"bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad\") " pod="openstack/nova-cell0-conductor-0" Nov 27 07:16:55 crc kubenswrapper[4971]: I1127 07:16:55.146448 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad\") " pod="openstack/nova-cell0-conductor-0" Nov 27 07:16:55 crc kubenswrapper[4971]: I1127 07:16:55.146562 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad\") " pod="openstack/nova-cell0-conductor-0" Nov 27 07:16:55 crc kubenswrapper[4971]: I1127 07:16:55.152374 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad\") " pod="openstack/nova-cell0-conductor-0" Nov 27 07:16:55 crc kubenswrapper[4971]: I1127 07:16:55.152952 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad\") " pod="openstack/nova-cell0-conductor-0" Nov 27 07:16:55 crc kubenswrapper[4971]: I1127 07:16:55.166647 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6b9n7\" (UniqueName: \"kubernetes.io/projected/bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad-kube-api-access-6b9n7\") pod \"nova-cell0-conductor-0\" (UID: \"bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad\") " pod="openstack/nova-cell0-conductor-0" Nov 27 07:16:55 crc kubenswrapper[4971]: I1127 07:16:55.301582 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 27 07:16:55 crc kubenswrapper[4971]: I1127 07:16:55.748107 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 27 07:16:55 crc kubenswrapper[4971]: I1127 07:16:55.877154 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad","Type":"ContainerStarted","Data":"44f0d0b2d16a2e70a7f8a13193f0c24dce614ec56f01b2d28fa2d63b2985c3dc"} Nov 27 07:16:55 crc kubenswrapper[4971]: I1127 07:16:55.878622 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"76095246-397f-49df-9758-e41ef115f04e","Type":"ContainerStarted","Data":"0aa5575b639548fda0cd5e9bc080b30b926779f25a3673e3a45f9a5dfba89e65"} Nov 27 07:16:56 crc kubenswrapper[4971]: I1127 07:16:56.889310 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad","Type":"ContainerStarted","Data":"960448ee56e126a49d6f57a232fb215e9851f384a304f93ed3bcb04c83b92864"} Nov 27 07:16:56 crc kubenswrapper[4971]: I1127 07:16:56.889980 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Nov 27 07:16:56 crc kubenswrapper[4971]: I1127 07:16:56.891083 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"76095246-397f-49df-9758-e41ef115f04e","Type":"ContainerStarted","Data":"b476ee801887a991ec352e6d262b810d23270fe912044830298d84052a838b2d"} Nov 27 07:16:56 crc kubenswrapper[4971]: I1127 07:16:56.915854 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.915834869 podStartE2EDuration="2.915834869s" podCreationTimestamp="2025-11-27 07:16:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:16:56.909682583 +0000 UTC m=+1455.101726511" watchObservedRunningTime="2025-11-27 07:16:56.915834869 +0000 UTC m=+1455.107878787" Nov 27 07:16:57 crc kubenswrapper[4971]: I1127 07:16:57.914458 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"76095246-397f-49df-9758-e41ef115f04e","Type":"ContainerStarted","Data":"1480a56592f432ac9c7f7cdd1c22db97b04b9ca7ca9c7343b9a017325fd12817"} Nov 27 07:16:58 crc kubenswrapper[4971]: I1127 07:16:58.925770 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"76095246-397f-49df-9758-e41ef115f04e","Type":"ContainerStarted","Data":"9056a9ec05a7c23aa18ad63623b7cf69c34e7d323c0ae3f328595b2c37f437c4"} Nov 27 07:16:58 crc kubenswrapper[4971]: I1127 07:16:58.927124 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 27 07:16:58 crc kubenswrapper[4971]: I1127 07:16:58.951504 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.417365912 podStartE2EDuration="5.951481417s" podCreationTimestamp="2025-11-27 07:16:53 +0000 UTC" firstStartedPulling="2025-11-27 07:16:54.778151209 +0000 UTC m=+1452.970195127" lastFinishedPulling="2025-11-27 07:16:58.312266714 +0000 UTC m=+1456.504310632" observedRunningTime="2025-11-27 07:16:58.945847765 +0000 UTC m=+1457.137891683" watchObservedRunningTime="2025-11-27 07:16:58.951481417 +0000 UTC m=+1457.143525335" Nov 27 07:17:00 crc kubenswrapper[4971]: I1127 07:17:00.330086 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Nov 27 07:17:00 crc kubenswrapper[4971]: I1127 07:17:00.792131 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-k2bcl"] Nov 27 07:17:00 crc kubenswrapper[4971]: I1127 07:17:00.793719 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-k2bcl" Nov 27 07:17:00 crc kubenswrapper[4971]: I1127 07:17:00.797078 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Nov 27 07:17:00 crc kubenswrapper[4971]: I1127 07:17:00.797747 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Nov 27 07:17:00 crc kubenswrapper[4971]: I1127 07:17:00.801951 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-k2bcl"] Nov 27 07:17:00 crc kubenswrapper[4971]: I1127 07:17:00.868060 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e9e64a6-205f-4e28-87a7-d9061d5931d1-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-k2bcl\" (UID: \"4e9e64a6-205f-4e28-87a7-d9061d5931d1\") " pod="openstack/nova-cell0-cell-mapping-k2bcl" Nov 27 07:17:00 crc kubenswrapper[4971]: I1127 07:17:00.868543 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e9e64a6-205f-4e28-87a7-d9061d5931d1-scripts\") pod \"nova-cell0-cell-mapping-k2bcl\" (UID: \"4e9e64a6-205f-4e28-87a7-d9061d5931d1\") " pod="openstack/nova-cell0-cell-mapping-k2bcl" Nov 27 07:17:00 crc kubenswrapper[4971]: I1127 07:17:00.868712 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e9e64a6-205f-4e28-87a7-d9061d5931d1-config-data\") pod \"nova-cell0-cell-mapping-k2bcl\" (UID: \"4e9e64a6-205f-4e28-87a7-d9061d5931d1\") " pod="openstack/nova-cell0-cell-mapping-k2bcl" Nov 27 07:17:00 crc kubenswrapper[4971]: I1127 07:17:00.868763 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dz2zv\" (UniqueName: \"kubernetes.io/projected/4e9e64a6-205f-4e28-87a7-d9061d5931d1-kube-api-access-dz2zv\") pod \"nova-cell0-cell-mapping-k2bcl\" (UID: \"4e9e64a6-205f-4e28-87a7-d9061d5931d1\") " pod="openstack/nova-cell0-cell-mapping-k2bcl" Nov 27 07:17:00 crc kubenswrapper[4971]: I1127 07:17:00.971065 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dz2zv\" (UniqueName: \"kubernetes.io/projected/4e9e64a6-205f-4e28-87a7-d9061d5931d1-kube-api-access-dz2zv\") pod \"nova-cell0-cell-mapping-k2bcl\" (UID: \"4e9e64a6-205f-4e28-87a7-d9061d5931d1\") " pod="openstack/nova-cell0-cell-mapping-k2bcl" Nov 27 07:17:00 crc kubenswrapper[4971]: I1127 07:17:00.971267 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e9e64a6-205f-4e28-87a7-d9061d5931d1-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-k2bcl\" (UID: \"4e9e64a6-205f-4e28-87a7-d9061d5931d1\") " pod="openstack/nova-cell0-cell-mapping-k2bcl" Nov 27 07:17:00 crc kubenswrapper[4971]: I1127 07:17:00.971318 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e9e64a6-205f-4e28-87a7-d9061d5931d1-scripts\") pod \"nova-cell0-cell-mapping-k2bcl\" (UID: \"4e9e64a6-205f-4e28-87a7-d9061d5931d1\") " pod="openstack/nova-cell0-cell-mapping-k2bcl" Nov 27 07:17:00 crc kubenswrapper[4971]: I1127 07:17:00.971491 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e9e64a6-205f-4e28-87a7-d9061d5931d1-config-data\") pod \"nova-cell0-cell-mapping-k2bcl\" (UID: \"4e9e64a6-205f-4e28-87a7-d9061d5931d1\") " pod="openstack/nova-cell0-cell-mapping-k2bcl" Nov 27 07:17:00 crc kubenswrapper[4971]: I1127 07:17:00.996620 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e9e64a6-205f-4e28-87a7-d9061d5931d1-scripts\") pod \"nova-cell0-cell-mapping-k2bcl\" (UID: \"4e9e64a6-205f-4e28-87a7-d9061d5931d1\") " pod="openstack/nova-cell0-cell-mapping-k2bcl" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.000679 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e9e64a6-205f-4e28-87a7-d9061d5931d1-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-k2bcl\" (UID: \"4e9e64a6-205f-4e28-87a7-d9061d5931d1\") " pod="openstack/nova-cell0-cell-mapping-k2bcl" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.003310 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e9e64a6-205f-4e28-87a7-d9061d5931d1-config-data\") pod \"nova-cell0-cell-mapping-k2bcl\" (UID: \"4e9e64a6-205f-4e28-87a7-d9061d5931d1\") " pod="openstack/nova-cell0-cell-mapping-k2bcl" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.024297 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dz2zv\" (UniqueName: \"kubernetes.io/projected/4e9e64a6-205f-4e28-87a7-d9061d5931d1-kube-api-access-dz2zv\") pod \"nova-cell0-cell-mapping-k2bcl\" (UID: \"4e9e64a6-205f-4e28-87a7-d9061d5931d1\") " pod="openstack/nova-cell0-cell-mapping-k2bcl" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.061975 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.063862 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.071925 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.094552 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.113610 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-k2bcl" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.134770 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.136911 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.144823 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.177414 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7m9h\" (UniqueName: \"kubernetes.io/projected/f65ed59e-f6a3-41c1-a460-f95220581cc8-kube-api-access-l7m9h\") pod \"nova-metadata-0\" (UID: \"f65ed59e-f6a3-41c1-a460-f95220581cc8\") " pod="openstack/nova-metadata-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.179250 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f65ed59e-f6a3-41c1-a460-f95220581cc8-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f65ed59e-f6a3-41c1-a460-f95220581cc8\") " pod="openstack/nova-metadata-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.179360 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f65ed59e-f6a3-41c1-a460-f95220581cc8-logs\") pod \"nova-metadata-0\" (UID: \"f65ed59e-f6a3-41c1-a460-f95220581cc8\") " pod="openstack/nova-metadata-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.179525 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f65ed59e-f6a3-41c1-a460-f95220581cc8-config-data\") pod \"nova-metadata-0\" (UID: \"f65ed59e-f6a3-41c1-a460-f95220581cc8\") " pod="openstack/nova-metadata-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.207693 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.275604 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-648d5566bc-jrbwx"] Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.277399 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-648d5566bc-jrbwx" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.283005 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b03fc497-c5af-4176-9127-f0f1103aecca-ovsdbserver-sb\") pod \"dnsmasq-dns-648d5566bc-jrbwx\" (UID: \"b03fc497-c5af-4176-9127-f0f1103aecca\") " pod="openstack/dnsmasq-dns-648d5566bc-jrbwx" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.283050 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b59348c-fd2b-41f7-8826-54b28f71e3a0-config-data\") pod \"nova-api-0\" (UID: \"7b59348c-fd2b-41f7-8826-54b28f71e3a0\") " pod="openstack/nova-api-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.283084 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f65ed59e-f6a3-41c1-a460-f95220581cc8-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f65ed59e-f6a3-41c1-a460-f95220581cc8\") " pod="openstack/nova-metadata-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.283103 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f65ed59e-f6a3-41c1-a460-f95220581cc8-logs\") pod \"nova-metadata-0\" (UID: \"f65ed59e-f6a3-41c1-a460-f95220581cc8\") " pod="openstack/nova-metadata-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.283123 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f65ed59e-f6a3-41c1-a460-f95220581cc8-config-data\") pod \"nova-metadata-0\" (UID: \"f65ed59e-f6a3-41c1-a460-f95220581cc8\") " pod="openstack/nova-metadata-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.283151 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b59348c-fd2b-41f7-8826-54b28f71e3a0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7b59348c-fd2b-41f7-8826-54b28f71e3a0\") " pod="openstack/nova-api-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.283177 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b59348c-fd2b-41f7-8826-54b28f71e3a0-logs\") pod \"nova-api-0\" (UID: \"7b59348c-fd2b-41f7-8826-54b28f71e3a0\") " pod="openstack/nova-api-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.283196 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6g552\" (UniqueName: \"kubernetes.io/projected/7b59348c-fd2b-41f7-8826-54b28f71e3a0-kube-api-access-6g552\") pod \"nova-api-0\" (UID: \"7b59348c-fd2b-41f7-8826-54b28f71e3a0\") " pod="openstack/nova-api-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.283229 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b03fc497-c5af-4176-9127-f0f1103aecca-config\") pod \"dnsmasq-dns-648d5566bc-jrbwx\" (UID: \"b03fc497-c5af-4176-9127-f0f1103aecca\") " pod="openstack/dnsmasq-dns-648d5566bc-jrbwx" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.283249 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7qb7m\" (UniqueName: \"kubernetes.io/projected/b03fc497-c5af-4176-9127-f0f1103aecca-kube-api-access-7qb7m\") pod \"dnsmasq-dns-648d5566bc-jrbwx\" (UID: \"b03fc497-c5af-4176-9127-f0f1103aecca\") " pod="openstack/dnsmasq-dns-648d5566bc-jrbwx" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.283272 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b03fc497-c5af-4176-9127-f0f1103aecca-dns-svc\") pod \"dnsmasq-dns-648d5566bc-jrbwx\" (UID: \"b03fc497-c5af-4176-9127-f0f1103aecca\") " pod="openstack/dnsmasq-dns-648d5566bc-jrbwx" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.283314 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b03fc497-c5af-4176-9127-f0f1103aecca-dns-swift-storage-0\") pod \"dnsmasq-dns-648d5566bc-jrbwx\" (UID: \"b03fc497-c5af-4176-9127-f0f1103aecca\") " pod="openstack/dnsmasq-dns-648d5566bc-jrbwx" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.283335 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7m9h\" (UniqueName: \"kubernetes.io/projected/f65ed59e-f6a3-41c1-a460-f95220581cc8-kube-api-access-l7m9h\") pod \"nova-metadata-0\" (UID: \"f65ed59e-f6a3-41c1-a460-f95220581cc8\") " pod="openstack/nova-metadata-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.283369 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b03fc497-c5af-4176-9127-f0f1103aecca-ovsdbserver-nb\") pod \"dnsmasq-dns-648d5566bc-jrbwx\" (UID: \"b03fc497-c5af-4176-9127-f0f1103aecca\") " pod="openstack/dnsmasq-dns-648d5566bc-jrbwx" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.289747 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f65ed59e-f6a3-41c1-a460-f95220581cc8-logs\") pod \"nova-metadata-0\" (UID: \"f65ed59e-f6a3-41c1-a460-f95220581cc8\") " pod="openstack/nova-metadata-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.294724 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-648d5566bc-jrbwx"] Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.295145 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f65ed59e-f6a3-41c1-a460-f95220581cc8-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f65ed59e-f6a3-41c1-a460-f95220581cc8\") " pod="openstack/nova-metadata-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.306789 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.308327 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.320637 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.337294 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f65ed59e-f6a3-41c1-a460-f95220581cc8-config-data\") pod \"nova-metadata-0\" (UID: \"f65ed59e-f6a3-41c1-a460-f95220581cc8\") " pod="openstack/nova-metadata-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.337582 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.386828 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b59348c-fd2b-41f7-8826-54b28f71e3a0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7b59348c-fd2b-41f7-8826-54b28f71e3a0\") " pod="openstack/nova-api-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.386878 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b59348c-fd2b-41f7-8826-54b28f71e3a0-logs\") pod \"nova-api-0\" (UID: \"7b59348c-fd2b-41f7-8826-54b28f71e3a0\") " pod="openstack/nova-api-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.386907 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6g552\" (UniqueName: \"kubernetes.io/projected/7b59348c-fd2b-41f7-8826-54b28f71e3a0-kube-api-access-6g552\") pod \"nova-api-0\" (UID: \"7b59348c-fd2b-41f7-8826-54b28f71e3a0\") " pod="openstack/nova-api-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.386942 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b03fc497-c5af-4176-9127-f0f1103aecca-config\") pod \"dnsmasq-dns-648d5566bc-jrbwx\" (UID: \"b03fc497-c5af-4176-9127-f0f1103aecca\") " pod="openstack/dnsmasq-dns-648d5566bc-jrbwx" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.386963 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7qb7m\" (UniqueName: \"kubernetes.io/projected/b03fc497-c5af-4176-9127-f0f1103aecca-kube-api-access-7qb7m\") pod \"dnsmasq-dns-648d5566bc-jrbwx\" (UID: \"b03fc497-c5af-4176-9127-f0f1103aecca\") " pod="openstack/dnsmasq-dns-648d5566bc-jrbwx" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.386985 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b03fc497-c5af-4176-9127-f0f1103aecca-dns-svc\") pod \"dnsmasq-dns-648d5566bc-jrbwx\" (UID: \"b03fc497-c5af-4176-9127-f0f1103aecca\") " pod="openstack/dnsmasq-dns-648d5566bc-jrbwx" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.387026 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b03fc497-c5af-4176-9127-f0f1103aecca-dns-swift-storage-0\") pod \"dnsmasq-dns-648d5566bc-jrbwx\" (UID: \"b03fc497-c5af-4176-9127-f0f1103aecca\") " pod="openstack/dnsmasq-dns-648d5566bc-jrbwx" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.387068 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b03fc497-c5af-4176-9127-f0f1103aecca-ovsdbserver-nb\") pod \"dnsmasq-dns-648d5566bc-jrbwx\" (UID: \"b03fc497-c5af-4176-9127-f0f1103aecca\") " pod="openstack/dnsmasq-dns-648d5566bc-jrbwx" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.387131 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b03fc497-c5af-4176-9127-f0f1103aecca-ovsdbserver-sb\") pod \"dnsmasq-dns-648d5566bc-jrbwx\" (UID: \"b03fc497-c5af-4176-9127-f0f1103aecca\") " pod="openstack/dnsmasq-dns-648d5566bc-jrbwx" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.387176 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b59348c-fd2b-41f7-8826-54b28f71e3a0-config-data\") pod \"nova-api-0\" (UID: \"7b59348c-fd2b-41f7-8826-54b28f71e3a0\") " pod="openstack/nova-api-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.388484 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7m9h\" (UniqueName: \"kubernetes.io/projected/f65ed59e-f6a3-41c1-a460-f95220581cc8-kube-api-access-l7m9h\") pod \"nova-metadata-0\" (UID: \"f65ed59e-f6a3-41c1-a460-f95220581cc8\") " pod="openstack/nova-metadata-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.389475 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b03fc497-c5af-4176-9127-f0f1103aecca-dns-swift-storage-0\") pod \"dnsmasq-dns-648d5566bc-jrbwx\" (UID: \"b03fc497-c5af-4176-9127-f0f1103aecca\") " pod="openstack/dnsmasq-dns-648d5566bc-jrbwx" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.389680 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b03fc497-c5af-4176-9127-f0f1103aecca-dns-svc\") pod \"dnsmasq-dns-648d5566bc-jrbwx\" (UID: \"b03fc497-c5af-4176-9127-f0f1103aecca\") " pod="openstack/dnsmasq-dns-648d5566bc-jrbwx" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.390302 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b03fc497-c5af-4176-9127-f0f1103aecca-config\") pod \"dnsmasq-dns-648d5566bc-jrbwx\" (UID: \"b03fc497-c5af-4176-9127-f0f1103aecca\") " pod="openstack/dnsmasq-dns-648d5566bc-jrbwx" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.390825 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b03fc497-c5af-4176-9127-f0f1103aecca-ovsdbserver-nb\") pod \"dnsmasq-dns-648d5566bc-jrbwx\" (UID: \"b03fc497-c5af-4176-9127-f0f1103aecca\") " pod="openstack/dnsmasq-dns-648d5566bc-jrbwx" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.391393 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b03fc497-c5af-4176-9127-f0f1103aecca-ovsdbserver-sb\") pod \"dnsmasq-dns-648d5566bc-jrbwx\" (UID: \"b03fc497-c5af-4176-9127-f0f1103aecca\") " pod="openstack/dnsmasq-dns-648d5566bc-jrbwx" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.391444 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b59348c-fd2b-41f7-8826-54b28f71e3a0-logs\") pod \"nova-api-0\" (UID: \"7b59348c-fd2b-41f7-8826-54b28f71e3a0\") " pod="openstack/nova-api-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.425366 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b59348c-fd2b-41f7-8826-54b28f71e3a0-config-data\") pod \"nova-api-0\" (UID: \"7b59348c-fd2b-41f7-8826-54b28f71e3a0\") " pod="openstack/nova-api-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.426689 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.442010 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6g552\" (UniqueName: \"kubernetes.io/projected/7b59348c-fd2b-41f7-8826-54b28f71e3a0-kube-api-access-6g552\") pod \"nova-api-0\" (UID: \"7b59348c-fd2b-41f7-8826-54b28f71e3a0\") " pod="openstack/nova-api-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.446333 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7qb7m\" (UniqueName: \"kubernetes.io/projected/b03fc497-c5af-4176-9127-f0f1103aecca-kube-api-access-7qb7m\") pod \"dnsmasq-dns-648d5566bc-jrbwx\" (UID: \"b03fc497-c5af-4176-9127-f0f1103aecca\") " pod="openstack/dnsmasq-dns-648d5566bc-jrbwx" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.450567 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.458249 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-648d5566bc-jrbwx" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.473237 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.474955 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b59348c-fd2b-41f7-8826-54b28f71e3a0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7b59348c-fd2b-41f7-8826-54b28f71e3a0\") " pod="openstack/nova-api-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.482704 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.508261 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c37f29c-a94c-4c2b-87e1-62bffc246889-config-data\") pod \"nova-scheduler-0\" (UID: \"8c37f29c-a94c-4c2b-87e1-62bffc246889\") " pod="openstack/nova-scheduler-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.508356 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-npvlp\" (UniqueName: \"kubernetes.io/projected/8c37f29c-a94c-4c2b-87e1-62bffc246889-kube-api-access-npvlp\") pod \"nova-scheduler-0\" (UID: \"8c37f29c-a94c-4c2b-87e1-62bffc246889\") " pod="openstack/nova-scheduler-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.508566 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c37f29c-a94c-4c2b-87e1-62bffc246889-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8c37f29c-a94c-4c2b-87e1-62bffc246889\") " pod="openstack/nova-scheduler-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.611793 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6abff3e7-26fe-4194-b53c-5f346c554777-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"6abff3e7-26fe-4194-b53c-5f346c554777\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.611919 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c37f29c-a94c-4c2b-87e1-62bffc246889-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8c37f29c-a94c-4c2b-87e1-62bffc246889\") " pod="openstack/nova-scheduler-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.611986 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6abff3e7-26fe-4194-b53c-5f346c554777-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"6abff3e7-26fe-4194-b53c-5f346c554777\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.612070 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c37f29c-a94c-4c2b-87e1-62bffc246889-config-data\") pod \"nova-scheduler-0\" (UID: \"8c37f29c-a94c-4c2b-87e1-62bffc246889\") " pod="openstack/nova-scheduler-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.612138 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-npvlp\" (UniqueName: \"kubernetes.io/projected/8c37f29c-a94c-4c2b-87e1-62bffc246889-kube-api-access-npvlp\") pod \"nova-scheduler-0\" (UID: \"8c37f29c-a94c-4c2b-87e1-62bffc246889\") " pod="openstack/nova-scheduler-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.612172 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2625\" (UniqueName: \"kubernetes.io/projected/6abff3e7-26fe-4194-b53c-5f346c554777-kube-api-access-h2625\") pod \"nova-cell1-novncproxy-0\" (UID: \"6abff3e7-26fe-4194-b53c-5f346c554777\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.626129 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c37f29c-a94c-4c2b-87e1-62bffc246889-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8c37f29c-a94c-4c2b-87e1-62bffc246889\") " pod="openstack/nova-scheduler-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.628215 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.656319 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c37f29c-a94c-4c2b-87e1-62bffc246889-config-data\") pod \"nova-scheduler-0\" (UID: \"8c37f29c-a94c-4c2b-87e1-62bffc246889\") " pod="openstack/nova-scheduler-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.673277 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-npvlp\" (UniqueName: \"kubernetes.io/projected/8c37f29c-a94c-4c2b-87e1-62bffc246889-kube-api-access-npvlp\") pod \"nova-scheduler-0\" (UID: \"8c37f29c-a94c-4c2b-87e1-62bffc246889\") " pod="openstack/nova-scheduler-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.684949 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.717201 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2625\" (UniqueName: \"kubernetes.io/projected/6abff3e7-26fe-4194-b53c-5f346c554777-kube-api-access-h2625\") pod \"nova-cell1-novncproxy-0\" (UID: \"6abff3e7-26fe-4194-b53c-5f346c554777\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.717341 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6abff3e7-26fe-4194-b53c-5f346c554777-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"6abff3e7-26fe-4194-b53c-5f346c554777\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.717480 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6abff3e7-26fe-4194-b53c-5f346c554777-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"6abff3e7-26fe-4194-b53c-5f346c554777\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.729286 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6abff3e7-26fe-4194-b53c-5f346c554777-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"6abff3e7-26fe-4194-b53c-5f346c554777\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.732098 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6abff3e7-26fe-4194-b53c-5f346c554777-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"6abff3e7-26fe-4194-b53c-5f346c554777\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.769225 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2625\" (UniqueName: \"kubernetes.io/projected/6abff3e7-26fe-4194-b53c-5f346c554777-kube-api-access-h2625\") pod \"nova-cell1-novncproxy-0\" (UID: \"6abff3e7-26fe-4194-b53c-5f346c554777\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.853672 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 27 07:17:01 crc kubenswrapper[4971]: I1127 07:17:01.909437 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 27 07:17:02 crc kubenswrapper[4971]: I1127 07:17:02.123574 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-k2bcl"] Nov 27 07:17:02 crc kubenswrapper[4971]: I1127 07:17:02.245278 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-648d5566bc-jrbwx"] Nov 27 07:17:02 crc kubenswrapper[4971]: I1127 07:17:02.389234 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 07:17:02 crc kubenswrapper[4971]: I1127 07:17:02.412879 4971 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 27 07:17:02 crc kubenswrapper[4971]: I1127 07:17:02.520595 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 27 07:17:02 crc kubenswrapper[4971]: W1127 07:17:02.522662 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7b59348c_fd2b_41f7_8826_54b28f71e3a0.slice/crio-7fb58b198288231b69b1b0a215fc66f4755019a98b5efb8aab2afd1f14cdfdb0 WatchSource:0}: Error finding container 7fb58b198288231b69b1b0a215fc66f4755019a98b5efb8aab2afd1f14cdfdb0: Status 404 returned error can't find the container with id 7fb58b198288231b69b1b0a215fc66f4755019a98b5efb8aab2afd1f14cdfdb0 Nov 27 07:17:02 crc kubenswrapper[4971]: I1127 07:17:02.614421 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-6npvv"] Nov 27 07:17:02 crc kubenswrapper[4971]: I1127 07:17:02.619765 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-6npvv" Nov 27 07:17:02 crc kubenswrapper[4971]: I1127 07:17:02.623215 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Nov 27 07:17:02 crc kubenswrapper[4971]: I1127 07:17:02.623430 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Nov 27 07:17:02 crc kubenswrapper[4971]: I1127 07:17:02.655573 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 07:17:02 crc kubenswrapper[4971]: I1127 07:17:02.671594 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-6npvv"] Nov 27 07:17:02 crc kubenswrapper[4971]: I1127 07:17:02.688732 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 27 07:17:02 crc kubenswrapper[4971]: I1127 07:17:02.753719 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/158bee38-b35a-43f7-a8e2-547220d8f339-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-6npvv\" (UID: \"158bee38-b35a-43f7-a8e2-547220d8f339\") " pod="openstack/nova-cell1-conductor-db-sync-6npvv" Nov 27 07:17:02 crc kubenswrapper[4971]: I1127 07:17:02.753811 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/158bee38-b35a-43f7-a8e2-547220d8f339-config-data\") pod \"nova-cell1-conductor-db-sync-6npvv\" (UID: \"158bee38-b35a-43f7-a8e2-547220d8f339\") " pod="openstack/nova-cell1-conductor-db-sync-6npvv" Nov 27 07:17:02 crc kubenswrapper[4971]: I1127 07:17:02.753832 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/158bee38-b35a-43f7-a8e2-547220d8f339-scripts\") pod \"nova-cell1-conductor-db-sync-6npvv\" (UID: \"158bee38-b35a-43f7-a8e2-547220d8f339\") " pod="openstack/nova-cell1-conductor-db-sync-6npvv" Nov 27 07:17:02 crc kubenswrapper[4971]: I1127 07:17:02.754327 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twlhr\" (UniqueName: \"kubernetes.io/projected/158bee38-b35a-43f7-a8e2-547220d8f339-kube-api-access-twlhr\") pod \"nova-cell1-conductor-db-sync-6npvv\" (UID: \"158bee38-b35a-43f7-a8e2-547220d8f339\") " pod="openstack/nova-cell1-conductor-db-sync-6npvv" Nov 27 07:17:02 crc kubenswrapper[4971]: I1127 07:17:02.856622 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/158bee38-b35a-43f7-a8e2-547220d8f339-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-6npvv\" (UID: \"158bee38-b35a-43f7-a8e2-547220d8f339\") " pod="openstack/nova-cell1-conductor-db-sync-6npvv" Nov 27 07:17:02 crc kubenswrapper[4971]: I1127 07:17:02.856696 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/158bee38-b35a-43f7-a8e2-547220d8f339-config-data\") pod \"nova-cell1-conductor-db-sync-6npvv\" (UID: \"158bee38-b35a-43f7-a8e2-547220d8f339\") " pod="openstack/nova-cell1-conductor-db-sync-6npvv" Nov 27 07:17:02 crc kubenswrapper[4971]: I1127 07:17:02.856725 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/158bee38-b35a-43f7-a8e2-547220d8f339-scripts\") pod \"nova-cell1-conductor-db-sync-6npvv\" (UID: \"158bee38-b35a-43f7-a8e2-547220d8f339\") " pod="openstack/nova-cell1-conductor-db-sync-6npvv" Nov 27 07:17:02 crc kubenswrapper[4971]: I1127 07:17:02.856853 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twlhr\" (UniqueName: \"kubernetes.io/projected/158bee38-b35a-43f7-a8e2-547220d8f339-kube-api-access-twlhr\") pod \"nova-cell1-conductor-db-sync-6npvv\" (UID: \"158bee38-b35a-43f7-a8e2-547220d8f339\") " pod="openstack/nova-cell1-conductor-db-sync-6npvv" Nov 27 07:17:02 crc kubenswrapper[4971]: I1127 07:17:02.861645 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/158bee38-b35a-43f7-a8e2-547220d8f339-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-6npvv\" (UID: \"158bee38-b35a-43f7-a8e2-547220d8f339\") " pod="openstack/nova-cell1-conductor-db-sync-6npvv" Nov 27 07:17:02 crc kubenswrapper[4971]: I1127 07:17:02.861647 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/158bee38-b35a-43f7-a8e2-547220d8f339-scripts\") pod \"nova-cell1-conductor-db-sync-6npvv\" (UID: \"158bee38-b35a-43f7-a8e2-547220d8f339\") " pod="openstack/nova-cell1-conductor-db-sync-6npvv" Nov 27 07:17:02 crc kubenswrapper[4971]: I1127 07:17:02.862726 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/158bee38-b35a-43f7-a8e2-547220d8f339-config-data\") pod \"nova-cell1-conductor-db-sync-6npvv\" (UID: \"158bee38-b35a-43f7-a8e2-547220d8f339\") " pod="openstack/nova-cell1-conductor-db-sync-6npvv" Nov 27 07:17:02 crc kubenswrapper[4971]: I1127 07:17:02.877396 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twlhr\" (UniqueName: \"kubernetes.io/projected/158bee38-b35a-43f7-a8e2-547220d8f339-kube-api-access-twlhr\") pod \"nova-cell1-conductor-db-sync-6npvv\" (UID: \"158bee38-b35a-43f7-a8e2-547220d8f339\") " pod="openstack/nova-cell1-conductor-db-sync-6npvv" Nov 27 07:17:02 crc kubenswrapper[4971]: I1127 07:17:02.962934 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-6npvv" Nov 27 07:17:03 crc kubenswrapper[4971]: I1127 07:17:02.995387 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f65ed59e-f6a3-41c1-a460-f95220581cc8","Type":"ContainerStarted","Data":"0f316c5c5d2f4c39bf084cab83f3b3587e67da834ce936df12d4e201a6f1b6a2"} Nov 27 07:17:03 crc kubenswrapper[4971]: I1127 07:17:02.996678 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7b59348c-fd2b-41f7-8826-54b28f71e3a0","Type":"ContainerStarted","Data":"7fb58b198288231b69b1b0a215fc66f4755019a98b5efb8aab2afd1f14cdfdb0"} Nov 27 07:17:03 crc kubenswrapper[4971]: I1127 07:17:02.998596 4971 generic.go:334] "Generic (PLEG): container finished" podID="b03fc497-c5af-4176-9127-f0f1103aecca" containerID="006632e0a07d3a04353081708e90f331675590e5810529c3f7d8c0efbfdc6095" exitCode=0 Nov 27 07:17:03 crc kubenswrapper[4971]: I1127 07:17:02.998644 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-648d5566bc-jrbwx" event={"ID":"b03fc497-c5af-4176-9127-f0f1103aecca","Type":"ContainerDied","Data":"006632e0a07d3a04353081708e90f331675590e5810529c3f7d8c0efbfdc6095"} Nov 27 07:17:03 crc kubenswrapper[4971]: I1127 07:17:02.998661 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-648d5566bc-jrbwx" event={"ID":"b03fc497-c5af-4176-9127-f0f1103aecca","Type":"ContainerStarted","Data":"1b3dcd94ddef37e1bbc0019504e489e2b4e58774e4e63284ed552566d75c690a"} Nov 27 07:17:03 crc kubenswrapper[4971]: I1127 07:17:03.006983 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"6abff3e7-26fe-4194-b53c-5f346c554777","Type":"ContainerStarted","Data":"35108c570bfe437130037dce0277e52477ab4922395b53cffe3a5dd97c984e3a"} Nov 27 07:17:03 crc kubenswrapper[4971]: I1127 07:17:03.012298 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8c37f29c-a94c-4c2b-87e1-62bffc246889","Type":"ContainerStarted","Data":"4f215a9a6bea35ae5a2cc48039aae20ed532531dfe5c39d4eeec2f3811b3a2a0"} Nov 27 07:17:03 crc kubenswrapper[4971]: I1127 07:17:03.022212 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-k2bcl" event={"ID":"4e9e64a6-205f-4e28-87a7-d9061d5931d1","Type":"ContainerStarted","Data":"1b90d79708e13d8b636a1a0c7a9d004c9d25e1cdfbdf4629947028c179260d11"} Nov 27 07:17:03 crc kubenswrapper[4971]: I1127 07:17:03.022264 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-k2bcl" event={"ID":"4e9e64a6-205f-4e28-87a7-d9061d5931d1","Type":"ContainerStarted","Data":"0a272e37ac9a7cd9b4e62098dcfe1fc3c58b3368b1c0db6fc386b530bf14a775"} Nov 27 07:17:03 crc kubenswrapper[4971]: I1127 07:17:03.072643 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-k2bcl" podStartSLOduration=3.072619408 podStartE2EDuration="3.072619408s" podCreationTimestamp="2025-11-27 07:17:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:17:03.057978173 +0000 UTC m=+1461.250022121" watchObservedRunningTime="2025-11-27 07:17:03.072619408 +0000 UTC m=+1461.264663336" Nov 27 07:17:03 crc kubenswrapper[4971]: I1127 07:17:03.513632 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-6npvv"] Nov 27 07:17:04 crc kubenswrapper[4971]: I1127 07:17:04.045100 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-648d5566bc-jrbwx" event={"ID":"b03fc497-c5af-4176-9127-f0f1103aecca","Type":"ContainerStarted","Data":"4ce78a43f7b9babd3c1c69d0e1c4e4f2fe1fede1d641aa46c89646e8677c7552"} Nov 27 07:17:04 crc kubenswrapper[4971]: I1127 07:17:04.045831 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-648d5566bc-jrbwx" Nov 27 07:17:04 crc kubenswrapper[4971]: I1127 07:17:04.048870 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-6npvv" event={"ID":"158bee38-b35a-43f7-a8e2-547220d8f339","Type":"ContainerStarted","Data":"7844a05a6c63d726288200a10d028c8c4ca0f2610d820f5ec82c8c30a346ef9d"} Nov 27 07:17:04 crc kubenswrapper[4971]: I1127 07:17:04.050289 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-6npvv" event={"ID":"158bee38-b35a-43f7-a8e2-547220d8f339","Type":"ContainerStarted","Data":"e79bda654e3e5e68fdc4007bedd7d5e8119bc2424fc52aa528856a83c2cab3ad"} Nov 27 07:17:04 crc kubenswrapper[4971]: I1127 07:17:04.078501 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-648d5566bc-jrbwx" podStartSLOduration=3.078449284 podStartE2EDuration="3.078449284s" podCreationTimestamp="2025-11-27 07:17:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:17:04.068485515 +0000 UTC m=+1462.260529453" watchObservedRunningTime="2025-11-27 07:17:04.078449284 +0000 UTC m=+1462.270493202" Nov 27 07:17:04 crc kubenswrapper[4971]: I1127 07:17:04.123359 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-6npvv" podStartSLOduration=2.123336094 podStartE2EDuration="2.123336094s" podCreationTimestamp="2025-11-27 07:17:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:17:04.087925529 +0000 UTC m=+1462.279969457" watchObservedRunningTime="2025-11-27 07:17:04.123336094 +0000 UTC m=+1462.315380012" Nov 27 07:17:05 crc kubenswrapper[4971]: I1127 07:17:05.072413 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 27 07:17:05 crc kubenswrapper[4971]: I1127 07:17:05.092315 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 07:17:07 crc kubenswrapper[4971]: I1127 07:17:07.092329 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8c37f29c-a94c-4c2b-87e1-62bffc246889","Type":"ContainerStarted","Data":"fe828838b18c9b976f9afd340d33b50ae6a1e1ad189c3f7cef303c102d9f0311"} Nov 27 07:17:07 crc kubenswrapper[4971]: I1127 07:17:07.101413 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f65ed59e-f6a3-41c1-a460-f95220581cc8","Type":"ContainerStarted","Data":"a78d6a09cd65ac3c2f1c9565d2ac972879cffab04e5c91bc02ce918bf404b759"} Nov 27 07:17:07 crc kubenswrapper[4971]: I1127 07:17:07.101458 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f65ed59e-f6a3-41c1-a460-f95220581cc8","Type":"ContainerStarted","Data":"bfaa4c60dd339a87fa016e82ba96c723deea1acee8f45ee26b961f6e11d3a171"} Nov 27 07:17:07 crc kubenswrapper[4971]: I1127 07:17:07.101596 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="f65ed59e-f6a3-41c1-a460-f95220581cc8" containerName="nova-metadata-log" containerID="cri-o://bfaa4c60dd339a87fa016e82ba96c723deea1acee8f45ee26b961f6e11d3a171" gracePeriod=30 Nov 27 07:17:07 crc kubenswrapper[4971]: I1127 07:17:07.102759 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="f65ed59e-f6a3-41c1-a460-f95220581cc8" containerName="nova-metadata-metadata" containerID="cri-o://a78d6a09cd65ac3c2f1c9565d2ac972879cffab04e5c91bc02ce918bf404b759" gracePeriod=30 Nov 27 07:17:07 crc kubenswrapper[4971]: I1127 07:17:07.108685 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7b59348c-fd2b-41f7-8826-54b28f71e3a0","Type":"ContainerStarted","Data":"3817e83567971603f678466761ddbb7dc6a837bc262aeb0c2e6364436a020855"} Nov 27 07:17:07 crc kubenswrapper[4971]: I1127 07:17:07.109068 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7b59348c-fd2b-41f7-8826-54b28f71e3a0","Type":"ContainerStarted","Data":"197022aa48aedf07992a9135db0f21fa56cfab40635a7b4cefc0d0f0dbcf54f5"} Nov 27 07:17:07 crc kubenswrapper[4971]: I1127 07:17:07.116311 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"6abff3e7-26fe-4194-b53c-5f346c554777","Type":"ContainerStarted","Data":"62c5d6cd46420fb65a7f206aa0adabc71b71475222eed36141bfe65ea7e170a8"} Nov 27 07:17:07 crc kubenswrapper[4971]: I1127 07:17:07.116595 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="6abff3e7-26fe-4194-b53c-5f346c554777" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://62c5d6cd46420fb65a7f206aa0adabc71b71475222eed36141bfe65ea7e170a8" gracePeriod=30 Nov 27 07:17:07 crc kubenswrapper[4971]: I1127 07:17:07.129997 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.050558212 podStartE2EDuration="6.129977379s" podCreationTimestamp="2025-11-27 07:17:01 +0000 UTC" firstStartedPulling="2025-11-27 07:17:02.639147782 +0000 UTC m=+1460.831191700" lastFinishedPulling="2025-11-27 07:17:05.718566949 +0000 UTC m=+1463.910610867" observedRunningTime="2025-11-27 07:17:07.127078361 +0000 UTC m=+1465.319122289" watchObservedRunningTime="2025-11-27 07:17:07.129977379 +0000 UTC m=+1465.322021297" Nov 27 07:17:07 crc kubenswrapper[4971]: I1127 07:17:07.190386 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=3.154876894 podStartE2EDuration="6.190351577s" podCreationTimestamp="2025-11-27 07:17:01 +0000 UTC" firstStartedPulling="2025-11-27 07:17:02.681795691 +0000 UTC m=+1460.873839609" lastFinishedPulling="2025-11-27 07:17:05.717270384 +0000 UTC m=+1463.909314292" observedRunningTime="2025-11-27 07:17:07.145346013 +0000 UTC m=+1465.337389951" watchObservedRunningTime="2025-11-27 07:17:07.190351577 +0000 UTC m=+1465.382395505" Nov 27 07:17:07 crc kubenswrapper[4971]: I1127 07:17:07.213741 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.910378062 podStartE2EDuration="6.213711496s" podCreationTimestamp="2025-11-27 07:17:01 +0000 UTC" firstStartedPulling="2025-11-27 07:17:02.412667256 +0000 UTC m=+1460.604711174" lastFinishedPulling="2025-11-27 07:17:05.71600069 +0000 UTC m=+1463.908044608" observedRunningTime="2025-11-27 07:17:07.177373597 +0000 UTC m=+1465.369417535" watchObservedRunningTime="2025-11-27 07:17:07.213711496 +0000 UTC m=+1465.405755414" Nov 27 07:17:07 crc kubenswrapper[4971]: I1127 07:17:07.224129 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=4.040237654 podStartE2EDuration="7.224107037s" podCreationTimestamp="2025-11-27 07:17:00 +0000 UTC" firstStartedPulling="2025-11-27 07:17:02.538757775 +0000 UTC m=+1460.730801693" lastFinishedPulling="2025-11-27 07:17:05.722627158 +0000 UTC m=+1463.914671076" observedRunningTime="2025-11-27 07:17:07.204409116 +0000 UTC m=+1465.396453034" watchObservedRunningTime="2025-11-27 07:17:07.224107037 +0000 UTC m=+1465.416150945" Nov 27 07:17:07 crc kubenswrapper[4971]: I1127 07:17:07.757197 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 27 07:17:07 crc kubenswrapper[4971]: I1127 07:17:07.883628 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f65ed59e-f6a3-41c1-a460-f95220581cc8-logs\") pod \"f65ed59e-f6a3-41c1-a460-f95220581cc8\" (UID: \"f65ed59e-f6a3-41c1-a460-f95220581cc8\") " Nov 27 07:17:07 crc kubenswrapper[4971]: I1127 07:17:07.883706 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f65ed59e-f6a3-41c1-a460-f95220581cc8-config-data\") pod \"f65ed59e-f6a3-41c1-a460-f95220581cc8\" (UID: \"f65ed59e-f6a3-41c1-a460-f95220581cc8\") " Nov 27 07:17:07 crc kubenswrapper[4971]: I1127 07:17:07.883773 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l7m9h\" (UniqueName: \"kubernetes.io/projected/f65ed59e-f6a3-41c1-a460-f95220581cc8-kube-api-access-l7m9h\") pod \"f65ed59e-f6a3-41c1-a460-f95220581cc8\" (UID: \"f65ed59e-f6a3-41c1-a460-f95220581cc8\") " Nov 27 07:17:07 crc kubenswrapper[4971]: I1127 07:17:07.883857 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f65ed59e-f6a3-41c1-a460-f95220581cc8-combined-ca-bundle\") pod \"f65ed59e-f6a3-41c1-a460-f95220581cc8\" (UID: \"f65ed59e-f6a3-41c1-a460-f95220581cc8\") " Nov 27 07:17:07 crc kubenswrapper[4971]: I1127 07:17:07.884335 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f65ed59e-f6a3-41c1-a460-f95220581cc8-logs" (OuterVolumeSpecName: "logs") pod "f65ed59e-f6a3-41c1-a460-f95220581cc8" (UID: "f65ed59e-f6a3-41c1-a460-f95220581cc8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:17:07 crc kubenswrapper[4971]: I1127 07:17:07.894744 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f65ed59e-f6a3-41c1-a460-f95220581cc8-kube-api-access-l7m9h" (OuterVolumeSpecName: "kube-api-access-l7m9h") pod "f65ed59e-f6a3-41c1-a460-f95220581cc8" (UID: "f65ed59e-f6a3-41c1-a460-f95220581cc8"). InnerVolumeSpecName "kube-api-access-l7m9h". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:17:07 crc kubenswrapper[4971]: I1127 07:17:07.914590 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f65ed59e-f6a3-41c1-a460-f95220581cc8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f65ed59e-f6a3-41c1-a460-f95220581cc8" (UID: "f65ed59e-f6a3-41c1-a460-f95220581cc8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:17:07 crc kubenswrapper[4971]: I1127 07:17:07.920149 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f65ed59e-f6a3-41c1-a460-f95220581cc8-config-data" (OuterVolumeSpecName: "config-data") pod "f65ed59e-f6a3-41c1-a460-f95220581cc8" (UID: "f65ed59e-f6a3-41c1-a460-f95220581cc8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:17:07 crc kubenswrapper[4971]: I1127 07:17:07.986673 4971 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f65ed59e-f6a3-41c1-a460-f95220581cc8-logs\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:07 crc kubenswrapper[4971]: I1127 07:17:07.986718 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f65ed59e-f6a3-41c1-a460-f95220581cc8-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:07 crc kubenswrapper[4971]: I1127 07:17:07.986734 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l7m9h\" (UniqueName: \"kubernetes.io/projected/f65ed59e-f6a3-41c1-a460-f95220581cc8-kube-api-access-l7m9h\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:07 crc kubenswrapper[4971]: I1127 07:17:07.986750 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f65ed59e-f6a3-41c1-a460-f95220581cc8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.128309 4971 generic.go:334] "Generic (PLEG): container finished" podID="f65ed59e-f6a3-41c1-a460-f95220581cc8" containerID="a78d6a09cd65ac3c2f1c9565d2ac972879cffab04e5c91bc02ce918bf404b759" exitCode=0 Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.128346 4971 generic.go:334] "Generic (PLEG): container finished" podID="f65ed59e-f6a3-41c1-a460-f95220581cc8" containerID="bfaa4c60dd339a87fa016e82ba96c723deea1acee8f45ee26b961f6e11d3a171" exitCode=143 Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.128374 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.128404 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f65ed59e-f6a3-41c1-a460-f95220581cc8","Type":"ContainerDied","Data":"a78d6a09cd65ac3c2f1c9565d2ac972879cffab04e5c91bc02ce918bf404b759"} Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.128448 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f65ed59e-f6a3-41c1-a460-f95220581cc8","Type":"ContainerDied","Data":"bfaa4c60dd339a87fa016e82ba96c723deea1acee8f45ee26b961f6e11d3a171"} Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.128459 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f65ed59e-f6a3-41c1-a460-f95220581cc8","Type":"ContainerDied","Data":"0f316c5c5d2f4c39bf084cab83f3b3587e67da834ce936df12d4e201a6f1b6a2"} Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.128474 4971 scope.go:117] "RemoveContainer" containerID="a78d6a09cd65ac3c2f1c9565d2ac972879cffab04e5c91bc02ce918bf404b759" Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.167020 4971 scope.go:117] "RemoveContainer" containerID="bfaa4c60dd339a87fa016e82ba96c723deea1acee8f45ee26b961f6e11d3a171" Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.171751 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.189224 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.195716 4971 scope.go:117] "RemoveContainer" containerID="a78d6a09cd65ac3c2f1c9565d2ac972879cffab04e5c91bc02ce918bf404b759" Nov 27 07:17:08 crc kubenswrapper[4971]: E1127 07:17:08.200964 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a78d6a09cd65ac3c2f1c9565d2ac972879cffab04e5c91bc02ce918bf404b759\": container with ID starting with a78d6a09cd65ac3c2f1c9565d2ac972879cffab04e5c91bc02ce918bf404b759 not found: ID does not exist" containerID="a78d6a09cd65ac3c2f1c9565d2ac972879cffab04e5c91bc02ce918bf404b759" Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.201018 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a78d6a09cd65ac3c2f1c9565d2ac972879cffab04e5c91bc02ce918bf404b759"} err="failed to get container status \"a78d6a09cd65ac3c2f1c9565d2ac972879cffab04e5c91bc02ce918bf404b759\": rpc error: code = NotFound desc = could not find container \"a78d6a09cd65ac3c2f1c9565d2ac972879cffab04e5c91bc02ce918bf404b759\": container with ID starting with a78d6a09cd65ac3c2f1c9565d2ac972879cffab04e5c91bc02ce918bf404b759 not found: ID does not exist" Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.201054 4971 scope.go:117] "RemoveContainer" containerID="bfaa4c60dd339a87fa016e82ba96c723deea1acee8f45ee26b961f6e11d3a171" Nov 27 07:17:08 crc kubenswrapper[4971]: E1127 07:17:08.201466 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bfaa4c60dd339a87fa016e82ba96c723deea1acee8f45ee26b961f6e11d3a171\": container with ID starting with bfaa4c60dd339a87fa016e82ba96c723deea1acee8f45ee26b961f6e11d3a171 not found: ID does not exist" containerID="bfaa4c60dd339a87fa016e82ba96c723deea1acee8f45ee26b961f6e11d3a171" Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.201523 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfaa4c60dd339a87fa016e82ba96c723deea1acee8f45ee26b961f6e11d3a171"} err="failed to get container status \"bfaa4c60dd339a87fa016e82ba96c723deea1acee8f45ee26b961f6e11d3a171\": rpc error: code = NotFound desc = could not find container \"bfaa4c60dd339a87fa016e82ba96c723deea1acee8f45ee26b961f6e11d3a171\": container with ID starting with bfaa4c60dd339a87fa016e82ba96c723deea1acee8f45ee26b961f6e11d3a171 not found: ID does not exist" Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.201574 4971 scope.go:117] "RemoveContainer" containerID="a78d6a09cd65ac3c2f1c9565d2ac972879cffab04e5c91bc02ce918bf404b759" Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.201884 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a78d6a09cd65ac3c2f1c9565d2ac972879cffab04e5c91bc02ce918bf404b759"} err="failed to get container status \"a78d6a09cd65ac3c2f1c9565d2ac972879cffab04e5c91bc02ce918bf404b759\": rpc error: code = NotFound desc = could not find container \"a78d6a09cd65ac3c2f1c9565d2ac972879cffab04e5c91bc02ce918bf404b759\": container with ID starting with a78d6a09cd65ac3c2f1c9565d2ac972879cffab04e5c91bc02ce918bf404b759 not found: ID does not exist" Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.201913 4971 scope.go:117] "RemoveContainer" containerID="bfaa4c60dd339a87fa016e82ba96c723deea1acee8f45ee26b961f6e11d3a171" Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.202188 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfaa4c60dd339a87fa016e82ba96c723deea1acee8f45ee26b961f6e11d3a171"} err="failed to get container status \"bfaa4c60dd339a87fa016e82ba96c723deea1acee8f45ee26b961f6e11d3a171\": rpc error: code = NotFound desc = could not find container \"bfaa4c60dd339a87fa016e82ba96c723deea1acee8f45ee26b961f6e11d3a171\": container with ID starting with bfaa4c60dd339a87fa016e82ba96c723deea1acee8f45ee26b961f6e11d3a171 not found: ID does not exist" Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.206625 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 27 07:17:08 crc kubenswrapper[4971]: E1127 07:17:08.207313 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f65ed59e-f6a3-41c1-a460-f95220581cc8" containerName="nova-metadata-log" Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.207340 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="f65ed59e-f6a3-41c1-a460-f95220581cc8" containerName="nova-metadata-log" Nov 27 07:17:08 crc kubenswrapper[4971]: E1127 07:17:08.207358 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f65ed59e-f6a3-41c1-a460-f95220581cc8" containerName="nova-metadata-metadata" Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.207367 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="f65ed59e-f6a3-41c1-a460-f95220581cc8" containerName="nova-metadata-metadata" Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.207678 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="f65ed59e-f6a3-41c1-a460-f95220581cc8" containerName="nova-metadata-log" Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.207717 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="f65ed59e-f6a3-41c1-a460-f95220581cc8" containerName="nova-metadata-metadata" Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.209212 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.212411 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.212661 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.230877 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.292977 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/22792041-7917-428c-bdd2-ccaab3a09693-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"22792041-7917-428c-bdd2-ccaab3a09693\") " pod="openstack/nova-metadata-0" Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.293397 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22792041-7917-428c-bdd2-ccaab3a09693-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"22792041-7917-428c-bdd2-ccaab3a09693\") " pod="openstack/nova-metadata-0" Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.293509 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4f28b\" (UniqueName: \"kubernetes.io/projected/22792041-7917-428c-bdd2-ccaab3a09693-kube-api-access-4f28b\") pod \"nova-metadata-0\" (UID: \"22792041-7917-428c-bdd2-ccaab3a09693\") " pod="openstack/nova-metadata-0" Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.293590 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/22792041-7917-428c-bdd2-ccaab3a09693-logs\") pod \"nova-metadata-0\" (UID: \"22792041-7917-428c-bdd2-ccaab3a09693\") " pod="openstack/nova-metadata-0" Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.293676 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22792041-7917-428c-bdd2-ccaab3a09693-config-data\") pod \"nova-metadata-0\" (UID: \"22792041-7917-428c-bdd2-ccaab3a09693\") " pod="openstack/nova-metadata-0" Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.395397 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/22792041-7917-428c-bdd2-ccaab3a09693-logs\") pod \"nova-metadata-0\" (UID: \"22792041-7917-428c-bdd2-ccaab3a09693\") " pod="openstack/nova-metadata-0" Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.395478 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22792041-7917-428c-bdd2-ccaab3a09693-config-data\") pod \"nova-metadata-0\" (UID: \"22792041-7917-428c-bdd2-ccaab3a09693\") " pod="openstack/nova-metadata-0" Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.395586 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/22792041-7917-428c-bdd2-ccaab3a09693-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"22792041-7917-428c-bdd2-ccaab3a09693\") " pod="openstack/nova-metadata-0" Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.395663 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22792041-7917-428c-bdd2-ccaab3a09693-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"22792041-7917-428c-bdd2-ccaab3a09693\") " pod="openstack/nova-metadata-0" Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.395728 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4f28b\" (UniqueName: \"kubernetes.io/projected/22792041-7917-428c-bdd2-ccaab3a09693-kube-api-access-4f28b\") pod \"nova-metadata-0\" (UID: \"22792041-7917-428c-bdd2-ccaab3a09693\") " pod="openstack/nova-metadata-0" Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.395991 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/22792041-7917-428c-bdd2-ccaab3a09693-logs\") pod \"nova-metadata-0\" (UID: \"22792041-7917-428c-bdd2-ccaab3a09693\") " pod="openstack/nova-metadata-0" Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.400739 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/22792041-7917-428c-bdd2-ccaab3a09693-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"22792041-7917-428c-bdd2-ccaab3a09693\") " pod="openstack/nova-metadata-0" Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.400991 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22792041-7917-428c-bdd2-ccaab3a09693-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"22792041-7917-428c-bdd2-ccaab3a09693\") " pod="openstack/nova-metadata-0" Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.404374 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22792041-7917-428c-bdd2-ccaab3a09693-config-data\") pod \"nova-metadata-0\" (UID: \"22792041-7917-428c-bdd2-ccaab3a09693\") " pod="openstack/nova-metadata-0" Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.413096 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4f28b\" (UniqueName: \"kubernetes.io/projected/22792041-7917-428c-bdd2-ccaab3a09693-kube-api-access-4f28b\") pod \"nova-metadata-0\" (UID: \"22792041-7917-428c-bdd2-ccaab3a09693\") " pod="openstack/nova-metadata-0" Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.531432 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 27 07:17:08 crc kubenswrapper[4971]: I1127 07:17:08.566509 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f65ed59e-f6a3-41c1-a460-f95220581cc8" path="/var/lib/kubelet/pods/f65ed59e-f6a3-41c1-a460-f95220581cc8/volumes" Nov 27 07:17:09 crc kubenswrapper[4971]: I1127 07:17:09.017323 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 07:17:09 crc kubenswrapper[4971]: I1127 07:17:09.139815 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"22792041-7917-428c-bdd2-ccaab3a09693","Type":"ContainerStarted","Data":"c78ebe693ba70f40d68cfa78c72412e2fb1cbc678837bdc46e9eb25900bc69cc"} Nov 27 07:17:10 crc kubenswrapper[4971]: I1127 07:17:10.154697 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"22792041-7917-428c-bdd2-ccaab3a09693","Type":"ContainerStarted","Data":"d5133dfdad426e6738699db512a348f8d837cd871809a765334c58e2fec65b80"} Nov 27 07:17:10 crc kubenswrapper[4971]: I1127 07:17:10.155303 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"22792041-7917-428c-bdd2-ccaab3a09693","Type":"ContainerStarted","Data":"b0a8551a5229618823aabf1e798dfc1a18476d809bf1d614f5be22c29d8acad7"} Nov 27 07:17:10 crc kubenswrapper[4971]: I1127 07:17:10.185402 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.185382369 podStartE2EDuration="2.185382369s" podCreationTimestamp="2025-11-27 07:17:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:17:10.176438408 +0000 UTC m=+1468.368482326" watchObservedRunningTime="2025-11-27 07:17:10.185382369 +0000 UTC m=+1468.377426287" Nov 27 07:17:11 crc kubenswrapper[4971]: I1127 07:17:11.165959 4971 generic.go:334] "Generic (PLEG): container finished" podID="158bee38-b35a-43f7-a8e2-547220d8f339" containerID="7844a05a6c63d726288200a10d028c8c4ca0f2610d820f5ec82c8c30a346ef9d" exitCode=0 Nov 27 07:17:11 crc kubenswrapper[4971]: I1127 07:17:11.166189 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-6npvv" event={"ID":"158bee38-b35a-43f7-a8e2-547220d8f339","Type":"ContainerDied","Data":"7844a05a6c63d726288200a10d028c8c4ca0f2610d820f5ec82c8c30a346ef9d"} Nov 27 07:17:11 crc kubenswrapper[4971]: I1127 07:17:11.167521 4971 generic.go:334] "Generic (PLEG): container finished" podID="4e9e64a6-205f-4e28-87a7-d9061d5931d1" containerID="1b90d79708e13d8b636a1a0c7a9d004c9d25e1cdfbdf4629947028c179260d11" exitCode=0 Nov 27 07:17:11 crc kubenswrapper[4971]: I1127 07:17:11.167558 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-k2bcl" event={"ID":"4e9e64a6-205f-4e28-87a7-d9061d5931d1","Type":"ContainerDied","Data":"1b90d79708e13d8b636a1a0c7a9d004c9d25e1cdfbdf4629947028c179260d11"} Nov 27 07:17:11 crc kubenswrapper[4971]: I1127 07:17:11.460049 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-648d5566bc-jrbwx" Nov 27 07:17:11 crc kubenswrapper[4971]: I1127 07:17:11.543059 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-9895c4665-wr5mf"] Nov 27 07:17:11 crc kubenswrapper[4971]: I1127 07:17:11.543314 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-9895c4665-wr5mf" podUID="52ac96a5-4ef4-48ce-80d5-ca16e20de960" containerName="dnsmasq-dns" containerID="cri-o://0ee65125860516ca5db7cb878fef64dec49ccb71ab10d8dca20aaf709802f06b" gracePeriod=10 Nov 27 07:17:11 crc kubenswrapper[4971]: I1127 07:17:11.687925 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 27 07:17:11 crc kubenswrapper[4971]: I1127 07:17:11.687972 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 27 07:17:11 crc kubenswrapper[4971]: I1127 07:17:11.855997 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Nov 27 07:17:11 crc kubenswrapper[4971]: I1127 07:17:11.856047 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Nov 27 07:17:11 crc kubenswrapper[4971]: I1127 07:17:11.902853 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Nov 27 07:17:11 crc kubenswrapper[4971]: I1127 07:17:11.910851 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.074007 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9895c4665-wr5mf" Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.178583 4971 generic.go:334] "Generic (PLEG): container finished" podID="52ac96a5-4ef4-48ce-80d5-ca16e20de960" containerID="0ee65125860516ca5db7cb878fef64dec49ccb71ab10d8dca20aaf709802f06b" exitCode=0 Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.178926 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9895c4665-wr5mf" Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.180356 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9895c4665-wr5mf" event={"ID":"52ac96a5-4ef4-48ce-80d5-ca16e20de960","Type":"ContainerDied","Data":"0ee65125860516ca5db7cb878fef64dec49ccb71ab10d8dca20aaf709802f06b"} Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.180431 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9895c4665-wr5mf" event={"ID":"52ac96a5-4ef4-48ce-80d5-ca16e20de960","Type":"ContainerDied","Data":"ce9bb0536c22314b74059002820957bb2ccdd23833d9d8995cb1d13dbb3e9dbf"} Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.180463 4971 scope.go:117] "RemoveContainer" containerID="0ee65125860516ca5db7cb878fef64dec49ccb71ab10d8dca20aaf709802f06b" Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.182886 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/52ac96a5-4ef4-48ce-80d5-ca16e20de960-dns-swift-storage-0\") pod \"52ac96a5-4ef4-48ce-80d5-ca16e20de960\" (UID: \"52ac96a5-4ef4-48ce-80d5-ca16e20de960\") " Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.183148 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/52ac96a5-4ef4-48ce-80d5-ca16e20de960-dns-svc\") pod \"52ac96a5-4ef4-48ce-80d5-ca16e20de960\" (UID: \"52ac96a5-4ef4-48ce-80d5-ca16e20de960\") " Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.183823 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/52ac96a5-4ef4-48ce-80d5-ca16e20de960-ovsdbserver-nb\") pod \"52ac96a5-4ef4-48ce-80d5-ca16e20de960\" (UID: \"52ac96a5-4ef4-48ce-80d5-ca16e20de960\") " Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.183895 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/52ac96a5-4ef4-48ce-80d5-ca16e20de960-config\") pod \"52ac96a5-4ef4-48ce-80d5-ca16e20de960\" (UID: \"52ac96a5-4ef4-48ce-80d5-ca16e20de960\") " Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.183943 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z4bgd\" (UniqueName: \"kubernetes.io/projected/52ac96a5-4ef4-48ce-80d5-ca16e20de960-kube-api-access-z4bgd\") pod \"52ac96a5-4ef4-48ce-80d5-ca16e20de960\" (UID: \"52ac96a5-4ef4-48ce-80d5-ca16e20de960\") " Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.184069 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/52ac96a5-4ef4-48ce-80d5-ca16e20de960-ovsdbserver-sb\") pod \"52ac96a5-4ef4-48ce-80d5-ca16e20de960\" (UID: \"52ac96a5-4ef4-48ce-80d5-ca16e20de960\") " Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.216203 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52ac96a5-4ef4-48ce-80d5-ca16e20de960-kube-api-access-z4bgd" (OuterVolumeSpecName: "kube-api-access-z4bgd") pod "52ac96a5-4ef4-48ce-80d5-ca16e20de960" (UID: "52ac96a5-4ef4-48ce-80d5-ca16e20de960"). InnerVolumeSpecName "kube-api-access-z4bgd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.234201 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.304758 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/52ac96a5-4ef4-48ce-80d5-ca16e20de960-config" (OuterVolumeSpecName: "config") pod "52ac96a5-4ef4-48ce-80d5-ca16e20de960" (UID: "52ac96a5-4ef4-48ce-80d5-ca16e20de960"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.307544 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/52ac96a5-4ef4-48ce-80d5-ca16e20de960-config\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.307595 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z4bgd\" (UniqueName: \"kubernetes.io/projected/52ac96a5-4ef4-48ce-80d5-ca16e20de960-kube-api-access-z4bgd\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.313254 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/52ac96a5-4ef4-48ce-80d5-ca16e20de960-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "52ac96a5-4ef4-48ce-80d5-ca16e20de960" (UID: "52ac96a5-4ef4-48ce-80d5-ca16e20de960"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.319507 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/52ac96a5-4ef4-48ce-80d5-ca16e20de960-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "52ac96a5-4ef4-48ce-80d5-ca16e20de960" (UID: "52ac96a5-4ef4-48ce-80d5-ca16e20de960"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.339825 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/52ac96a5-4ef4-48ce-80d5-ca16e20de960-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "52ac96a5-4ef4-48ce-80d5-ca16e20de960" (UID: "52ac96a5-4ef4-48ce-80d5-ca16e20de960"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.378220 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/52ac96a5-4ef4-48ce-80d5-ca16e20de960-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "52ac96a5-4ef4-48ce-80d5-ca16e20de960" (UID: "52ac96a5-4ef4-48ce-80d5-ca16e20de960"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.429155 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/52ac96a5-4ef4-48ce-80d5-ca16e20de960-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.429204 4971 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/52ac96a5-4ef4-48ce-80d5-ca16e20de960-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.429217 4971 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/52ac96a5-4ef4-48ce-80d5-ca16e20de960-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.429228 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/52ac96a5-4ef4-48ce-80d5-ca16e20de960-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.453208 4971 scope.go:117] "RemoveContainer" containerID="d3651eeb82ea44564d71dff0627fcdcd88d912d928e661b235d834584973935e" Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.544658 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-9895c4665-wr5mf"] Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.545734 4971 scope.go:117] "RemoveContainer" containerID="0ee65125860516ca5db7cb878fef64dec49ccb71ab10d8dca20aaf709802f06b" Nov 27 07:17:12 crc kubenswrapper[4971]: E1127 07:17:12.550644 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0ee65125860516ca5db7cb878fef64dec49ccb71ab10d8dca20aaf709802f06b\": container with ID starting with 0ee65125860516ca5db7cb878fef64dec49ccb71ab10d8dca20aaf709802f06b not found: ID does not exist" containerID="0ee65125860516ca5db7cb878fef64dec49ccb71ab10d8dca20aaf709802f06b" Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.550688 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ee65125860516ca5db7cb878fef64dec49ccb71ab10d8dca20aaf709802f06b"} err="failed to get container status \"0ee65125860516ca5db7cb878fef64dec49ccb71ab10d8dca20aaf709802f06b\": rpc error: code = NotFound desc = could not find container \"0ee65125860516ca5db7cb878fef64dec49ccb71ab10d8dca20aaf709802f06b\": container with ID starting with 0ee65125860516ca5db7cb878fef64dec49ccb71ab10d8dca20aaf709802f06b not found: ID does not exist" Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.550715 4971 scope.go:117] "RemoveContainer" containerID="d3651eeb82ea44564d71dff0627fcdcd88d912d928e661b235d834584973935e" Nov 27 07:17:12 crc kubenswrapper[4971]: E1127 07:17:12.551131 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3651eeb82ea44564d71dff0627fcdcd88d912d928e661b235d834584973935e\": container with ID starting with d3651eeb82ea44564d71dff0627fcdcd88d912d928e661b235d834584973935e not found: ID does not exist" containerID="d3651eeb82ea44564d71dff0627fcdcd88d912d928e661b235d834584973935e" Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.551149 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3651eeb82ea44564d71dff0627fcdcd88d912d928e661b235d834584973935e"} err="failed to get container status \"d3651eeb82ea44564d71dff0627fcdcd88d912d928e661b235d834584973935e\": rpc error: code = NotFound desc = could not find container \"d3651eeb82ea44564d71dff0627fcdcd88d912d928e661b235d834584973935e\": container with ID starting with d3651eeb82ea44564d71dff0627fcdcd88d912d928e661b235d834584973935e not found: ID does not exist" Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.560679 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-9895c4665-wr5mf"] Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.780624 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="7b59348c-fd2b-41f7-8826-54b28f71e3a0" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.186:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.780924 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="7b59348c-fd2b-41f7-8826-54b28f71e3a0" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.186:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.860316 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-6npvv" Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.865668 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-k2bcl" Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.939221 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/158bee38-b35a-43f7-a8e2-547220d8f339-combined-ca-bundle\") pod \"158bee38-b35a-43f7-a8e2-547220d8f339\" (UID: \"158bee38-b35a-43f7-a8e2-547220d8f339\") " Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.939279 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-twlhr\" (UniqueName: \"kubernetes.io/projected/158bee38-b35a-43f7-a8e2-547220d8f339-kube-api-access-twlhr\") pod \"158bee38-b35a-43f7-a8e2-547220d8f339\" (UID: \"158bee38-b35a-43f7-a8e2-547220d8f339\") " Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.939361 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e9e64a6-205f-4e28-87a7-d9061d5931d1-combined-ca-bundle\") pod \"4e9e64a6-205f-4e28-87a7-d9061d5931d1\" (UID: \"4e9e64a6-205f-4e28-87a7-d9061d5931d1\") " Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.939415 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dz2zv\" (UniqueName: \"kubernetes.io/projected/4e9e64a6-205f-4e28-87a7-d9061d5931d1-kube-api-access-dz2zv\") pod \"4e9e64a6-205f-4e28-87a7-d9061d5931d1\" (UID: \"4e9e64a6-205f-4e28-87a7-d9061d5931d1\") " Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.939552 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/158bee38-b35a-43f7-a8e2-547220d8f339-scripts\") pod \"158bee38-b35a-43f7-a8e2-547220d8f339\" (UID: \"158bee38-b35a-43f7-a8e2-547220d8f339\") " Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.939575 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e9e64a6-205f-4e28-87a7-d9061d5931d1-scripts\") pod \"4e9e64a6-205f-4e28-87a7-d9061d5931d1\" (UID: \"4e9e64a6-205f-4e28-87a7-d9061d5931d1\") " Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.940256 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/158bee38-b35a-43f7-a8e2-547220d8f339-config-data\") pod \"158bee38-b35a-43f7-a8e2-547220d8f339\" (UID: \"158bee38-b35a-43f7-a8e2-547220d8f339\") " Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.940303 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e9e64a6-205f-4e28-87a7-d9061d5931d1-config-data\") pod \"4e9e64a6-205f-4e28-87a7-d9061d5931d1\" (UID: \"4e9e64a6-205f-4e28-87a7-d9061d5931d1\") " Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.945471 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e9e64a6-205f-4e28-87a7-d9061d5931d1-kube-api-access-dz2zv" (OuterVolumeSpecName: "kube-api-access-dz2zv") pod "4e9e64a6-205f-4e28-87a7-d9061d5931d1" (UID: "4e9e64a6-205f-4e28-87a7-d9061d5931d1"). InnerVolumeSpecName "kube-api-access-dz2zv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.945498 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/158bee38-b35a-43f7-a8e2-547220d8f339-scripts" (OuterVolumeSpecName: "scripts") pod "158bee38-b35a-43f7-a8e2-547220d8f339" (UID: "158bee38-b35a-43f7-a8e2-547220d8f339"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.949686 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/158bee38-b35a-43f7-a8e2-547220d8f339-kube-api-access-twlhr" (OuterVolumeSpecName: "kube-api-access-twlhr") pod "158bee38-b35a-43f7-a8e2-547220d8f339" (UID: "158bee38-b35a-43f7-a8e2-547220d8f339"). InnerVolumeSpecName "kube-api-access-twlhr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.963659 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e9e64a6-205f-4e28-87a7-d9061d5931d1-scripts" (OuterVolumeSpecName: "scripts") pod "4e9e64a6-205f-4e28-87a7-d9061d5931d1" (UID: "4e9e64a6-205f-4e28-87a7-d9061d5931d1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.971834 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/158bee38-b35a-43f7-a8e2-547220d8f339-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "158bee38-b35a-43f7-a8e2-547220d8f339" (UID: "158bee38-b35a-43f7-a8e2-547220d8f339"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.977734 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/158bee38-b35a-43f7-a8e2-547220d8f339-config-data" (OuterVolumeSpecName: "config-data") pod "158bee38-b35a-43f7-a8e2-547220d8f339" (UID: "158bee38-b35a-43f7-a8e2-547220d8f339"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.983043 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e9e64a6-205f-4e28-87a7-d9061d5931d1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4e9e64a6-205f-4e28-87a7-d9061d5931d1" (UID: "4e9e64a6-205f-4e28-87a7-d9061d5931d1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:17:12 crc kubenswrapper[4971]: I1127 07:17:12.985710 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e9e64a6-205f-4e28-87a7-d9061d5931d1-config-data" (OuterVolumeSpecName: "config-data") pod "4e9e64a6-205f-4e28-87a7-d9061d5931d1" (UID: "4e9e64a6-205f-4e28-87a7-d9061d5931d1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.043070 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/158bee38-b35a-43f7-a8e2-547220d8f339-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.043109 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-twlhr\" (UniqueName: \"kubernetes.io/projected/158bee38-b35a-43f7-a8e2-547220d8f339-kube-api-access-twlhr\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.043122 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e9e64a6-205f-4e28-87a7-d9061d5931d1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.043131 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dz2zv\" (UniqueName: \"kubernetes.io/projected/4e9e64a6-205f-4e28-87a7-d9061d5931d1-kube-api-access-dz2zv\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.043140 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/158bee38-b35a-43f7-a8e2-547220d8f339-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.043149 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e9e64a6-205f-4e28-87a7-d9061d5931d1-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.043158 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/158bee38-b35a-43f7-a8e2-547220d8f339-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.043169 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e9e64a6-205f-4e28-87a7-d9061d5931d1-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.196678 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-6npvv" event={"ID":"158bee38-b35a-43f7-a8e2-547220d8f339","Type":"ContainerDied","Data":"e79bda654e3e5e68fdc4007bedd7d5e8119bc2424fc52aa528856a83c2cab3ad"} Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.197064 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e79bda654e3e5e68fdc4007bedd7d5e8119bc2424fc52aa528856a83c2cab3ad" Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.197160 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-6npvv" Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.202390 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-k2bcl" Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.210851 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-k2bcl" event={"ID":"4e9e64a6-205f-4e28-87a7-d9061d5931d1","Type":"ContainerDied","Data":"0a272e37ac9a7cd9b4e62098dcfe1fc3c58b3368b1c0db6fc386b530bf14a775"} Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.210918 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0a272e37ac9a7cd9b4e62098dcfe1fc3c58b3368b1c0db6fc386b530bf14a775" Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.283594 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 27 07:17:13 crc kubenswrapper[4971]: E1127 07:17:13.284091 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e9e64a6-205f-4e28-87a7-d9061d5931d1" containerName="nova-manage" Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.284107 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e9e64a6-205f-4e28-87a7-d9061d5931d1" containerName="nova-manage" Nov 27 07:17:13 crc kubenswrapper[4971]: E1127 07:17:13.284125 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52ac96a5-4ef4-48ce-80d5-ca16e20de960" containerName="init" Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.284131 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="52ac96a5-4ef4-48ce-80d5-ca16e20de960" containerName="init" Nov 27 07:17:13 crc kubenswrapper[4971]: E1127 07:17:13.284166 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="158bee38-b35a-43f7-a8e2-547220d8f339" containerName="nova-cell1-conductor-db-sync" Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.284173 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="158bee38-b35a-43f7-a8e2-547220d8f339" containerName="nova-cell1-conductor-db-sync" Nov 27 07:17:13 crc kubenswrapper[4971]: E1127 07:17:13.284183 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52ac96a5-4ef4-48ce-80d5-ca16e20de960" containerName="dnsmasq-dns" Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.284189 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="52ac96a5-4ef4-48ce-80d5-ca16e20de960" containerName="dnsmasq-dns" Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.284364 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="52ac96a5-4ef4-48ce-80d5-ca16e20de960" containerName="dnsmasq-dns" Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.284381 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e9e64a6-205f-4e28-87a7-d9061d5931d1" containerName="nova-manage" Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.284396 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="158bee38-b35a-43f7-a8e2-547220d8f339" containerName="nova-cell1-conductor-db-sync" Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.285105 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.289212 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.305225 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.353061 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f461fed-9df2-44a5-b99c-17f30adf0d9c-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"7f461fed-9df2-44a5-b99c-17f30adf0d9c\") " pod="openstack/nova-cell1-conductor-0" Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.353206 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b4nfw\" (UniqueName: \"kubernetes.io/projected/7f461fed-9df2-44a5-b99c-17f30adf0d9c-kube-api-access-b4nfw\") pod \"nova-cell1-conductor-0\" (UID: \"7f461fed-9df2-44a5-b99c-17f30adf0d9c\") " pod="openstack/nova-cell1-conductor-0" Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.353253 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f461fed-9df2-44a5-b99c-17f30adf0d9c-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"7f461fed-9df2-44a5-b99c-17f30adf0d9c\") " pod="openstack/nova-cell1-conductor-0" Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.402236 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.402574 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="7b59348c-fd2b-41f7-8826-54b28f71e3a0" containerName="nova-api-log" containerID="cri-o://197022aa48aedf07992a9135db0f21fa56cfab40635a7b4cefc0d0f0dbcf54f5" gracePeriod=30 Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.403127 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="7b59348c-fd2b-41f7-8826-54b28f71e3a0" containerName="nova-api-api" containerID="cri-o://3817e83567971603f678466761ddbb7dc6a837bc262aeb0c2e6364436a020855" gracePeriod=30 Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.436863 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.437144 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="22792041-7917-428c-bdd2-ccaab3a09693" containerName="nova-metadata-log" containerID="cri-o://b0a8551a5229618823aabf1e798dfc1a18476d809bf1d614f5be22c29d8acad7" gracePeriod=30 Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.437658 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="22792041-7917-428c-bdd2-ccaab3a09693" containerName="nova-metadata-metadata" containerID="cri-o://d5133dfdad426e6738699db512a348f8d837cd871809a765334c58e2fec65b80" gracePeriod=30 Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.456317 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b4nfw\" (UniqueName: \"kubernetes.io/projected/7f461fed-9df2-44a5-b99c-17f30adf0d9c-kube-api-access-b4nfw\") pod \"nova-cell1-conductor-0\" (UID: \"7f461fed-9df2-44a5-b99c-17f30adf0d9c\") " pod="openstack/nova-cell1-conductor-0" Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.456727 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f461fed-9df2-44a5-b99c-17f30adf0d9c-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"7f461fed-9df2-44a5-b99c-17f30adf0d9c\") " pod="openstack/nova-cell1-conductor-0" Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.456909 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f461fed-9df2-44a5-b99c-17f30adf0d9c-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"7f461fed-9df2-44a5-b99c-17f30adf0d9c\") " pod="openstack/nova-cell1-conductor-0" Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.466478 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f461fed-9df2-44a5-b99c-17f30adf0d9c-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"7f461fed-9df2-44a5-b99c-17f30adf0d9c\") " pod="openstack/nova-cell1-conductor-0" Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.468299 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f461fed-9df2-44a5-b99c-17f30adf0d9c-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"7f461fed-9df2-44a5-b99c-17f30adf0d9c\") " pod="openstack/nova-cell1-conductor-0" Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.475829 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b4nfw\" (UniqueName: \"kubernetes.io/projected/7f461fed-9df2-44a5-b99c-17f30adf0d9c-kube-api-access-b4nfw\") pod \"nova-cell1-conductor-0\" (UID: \"7f461fed-9df2-44a5-b99c-17f30adf0d9c\") " pod="openstack/nova-cell1-conductor-0" Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.509111 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.531828 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.532714 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 27 07:17:13 crc kubenswrapper[4971]: I1127 07:17:13.622612 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.127373 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.144686 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.231852 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"7f461fed-9df2-44a5-b99c-17f30adf0d9c","Type":"ContainerStarted","Data":"088e9ad3206eafb668d6b41c19fb219140daba1f69cf0186fd2f271c5c6aa17f"} Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.234608 4971 generic.go:334] "Generic (PLEG): container finished" podID="22792041-7917-428c-bdd2-ccaab3a09693" containerID="d5133dfdad426e6738699db512a348f8d837cd871809a765334c58e2fec65b80" exitCode=0 Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.234638 4971 generic.go:334] "Generic (PLEG): container finished" podID="22792041-7917-428c-bdd2-ccaab3a09693" containerID="b0a8551a5229618823aabf1e798dfc1a18476d809bf1d614f5be22c29d8acad7" exitCode=143 Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.234689 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"22792041-7917-428c-bdd2-ccaab3a09693","Type":"ContainerDied","Data":"d5133dfdad426e6738699db512a348f8d837cd871809a765334c58e2fec65b80"} Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.234717 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"22792041-7917-428c-bdd2-ccaab3a09693","Type":"ContainerDied","Data":"b0a8551a5229618823aabf1e798dfc1a18476d809bf1d614f5be22c29d8acad7"} Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.234731 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"22792041-7917-428c-bdd2-ccaab3a09693","Type":"ContainerDied","Data":"c78ebe693ba70f40d68cfa78c72412e2fb1cbc678837bdc46e9eb25900bc69cc"} Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.234751 4971 scope.go:117] "RemoveContainer" containerID="d5133dfdad426e6738699db512a348f8d837cd871809a765334c58e2fec65b80" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.234754 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.236577 4971 generic.go:334] "Generic (PLEG): container finished" podID="7b59348c-fd2b-41f7-8826-54b28f71e3a0" containerID="197022aa48aedf07992a9135db0f21fa56cfab40635a7b4cefc0d0f0dbcf54f5" exitCode=143 Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.236771 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="8c37f29c-a94c-4c2b-87e1-62bffc246889" containerName="nova-scheduler-scheduler" containerID="cri-o://fe828838b18c9b976f9afd340d33b50ae6a1e1ad189c3f7cef303c102d9f0311" gracePeriod=30 Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.236807 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7b59348c-fd2b-41f7-8826-54b28f71e3a0","Type":"ContainerDied","Data":"197022aa48aedf07992a9135db0f21fa56cfab40635a7b4cefc0d0f0dbcf54f5"} Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.279465 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/22792041-7917-428c-bdd2-ccaab3a09693-logs\") pod \"22792041-7917-428c-bdd2-ccaab3a09693\" (UID: \"22792041-7917-428c-bdd2-ccaab3a09693\") " Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.279666 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4f28b\" (UniqueName: \"kubernetes.io/projected/22792041-7917-428c-bdd2-ccaab3a09693-kube-api-access-4f28b\") pod \"22792041-7917-428c-bdd2-ccaab3a09693\" (UID: \"22792041-7917-428c-bdd2-ccaab3a09693\") " Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.279734 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22792041-7917-428c-bdd2-ccaab3a09693-combined-ca-bundle\") pod \"22792041-7917-428c-bdd2-ccaab3a09693\" (UID: \"22792041-7917-428c-bdd2-ccaab3a09693\") " Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.279848 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22792041-7917-428c-bdd2-ccaab3a09693-config-data\") pod \"22792041-7917-428c-bdd2-ccaab3a09693\" (UID: \"22792041-7917-428c-bdd2-ccaab3a09693\") " Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.280072 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22792041-7917-428c-bdd2-ccaab3a09693-logs" (OuterVolumeSpecName: "logs") pod "22792041-7917-428c-bdd2-ccaab3a09693" (UID: "22792041-7917-428c-bdd2-ccaab3a09693"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.280084 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/22792041-7917-428c-bdd2-ccaab3a09693-nova-metadata-tls-certs\") pod \"22792041-7917-428c-bdd2-ccaab3a09693\" (UID: \"22792041-7917-428c-bdd2-ccaab3a09693\") " Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.281703 4971 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/22792041-7917-428c-bdd2-ccaab3a09693-logs\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.285789 4971 scope.go:117] "RemoveContainer" containerID="b0a8551a5229618823aabf1e798dfc1a18476d809bf1d614f5be22c29d8acad7" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.286201 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22792041-7917-428c-bdd2-ccaab3a09693-kube-api-access-4f28b" (OuterVolumeSpecName: "kube-api-access-4f28b") pod "22792041-7917-428c-bdd2-ccaab3a09693" (UID: "22792041-7917-428c-bdd2-ccaab3a09693"). InnerVolumeSpecName "kube-api-access-4f28b". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.309950 4971 scope.go:117] "RemoveContainer" containerID="d5133dfdad426e6738699db512a348f8d837cd871809a765334c58e2fec65b80" Nov 27 07:17:14 crc kubenswrapper[4971]: E1127 07:17:14.310334 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d5133dfdad426e6738699db512a348f8d837cd871809a765334c58e2fec65b80\": container with ID starting with d5133dfdad426e6738699db512a348f8d837cd871809a765334c58e2fec65b80 not found: ID does not exist" containerID="d5133dfdad426e6738699db512a348f8d837cd871809a765334c58e2fec65b80" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.310365 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5133dfdad426e6738699db512a348f8d837cd871809a765334c58e2fec65b80"} err="failed to get container status \"d5133dfdad426e6738699db512a348f8d837cd871809a765334c58e2fec65b80\": rpc error: code = NotFound desc = could not find container \"d5133dfdad426e6738699db512a348f8d837cd871809a765334c58e2fec65b80\": container with ID starting with d5133dfdad426e6738699db512a348f8d837cd871809a765334c58e2fec65b80 not found: ID does not exist" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.310410 4971 scope.go:117] "RemoveContainer" containerID="b0a8551a5229618823aabf1e798dfc1a18476d809bf1d614f5be22c29d8acad7" Nov 27 07:17:14 crc kubenswrapper[4971]: E1127 07:17:14.310699 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0a8551a5229618823aabf1e798dfc1a18476d809bf1d614f5be22c29d8acad7\": container with ID starting with b0a8551a5229618823aabf1e798dfc1a18476d809bf1d614f5be22c29d8acad7 not found: ID does not exist" containerID="b0a8551a5229618823aabf1e798dfc1a18476d809bf1d614f5be22c29d8acad7" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.310718 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0a8551a5229618823aabf1e798dfc1a18476d809bf1d614f5be22c29d8acad7"} err="failed to get container status \"b0a8551a5229618823aabf1e798dfc1a18476d809bf1d614f5be22c29d8acad7\": rpc error: code = NotFound desc = could not find container \"b0a8551a5229618823aabf1e798dfc1a18476d809bf1d614f5be22c29d8acad7\": container with ID starting with b0a8551a5229618823aabf1e798dfc1a18476d809bf1d614f5be22c29d8acad7 not found: ID does not exist" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.310729 4971 scope.go:117] "RemoveContainer" containerID="d5133dfdad426e6738699db512a348f8d837cd871809a765334c58e2fec65b80" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.310987 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5133dfdad426e6738699db512a348f8d837cd871809a765334c58e2fec65b80"} err="failed to get container status \"d5133dfdad426e6738699db512a348f8d837cd871809a765334c58e2fec65b80\": rpc error: code = NotFound desc = could not find container \"d5133dfdad426e6738699db512a348f8d837cd871809a765334c58e2fec65b80\": container with ID starting with d5133dfdad426e6738699db512a348f8d837cd871809a765334c58e2fec65b80 not found: ID does not exist" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.311005 4971 scope.go:117] "RemoveContainer" containerID="b0a8551a5229618823aabf1e798dfc1a18476d809bf1d614f5be22c29d8acad7" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.311454 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0a8551a5229618823aabf1e798dfc1a18476d809bf1d614f5be22c29d8acad7"} err="failed to get container status \"b0a8551a5229618823aabf1e798dfc1a18476d809bf1d614f5be22c29d8acad7\": rpc error: code = NotFound desc = could not find container \"b0a8551a5229618823aabf1e798dfc1a18476d809bf1d614f5be22c29d8acad7\": container with ID starting with b0a8551a5229618823aabf1e798dfc1a18476d809bf1d614f5be22c29d8acad7 not found: ID does not exist" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.318826 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22792041-7917-428c-bdd2-ccaab3a09693-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "22792041-7917-428c-bdd2-ccaab3a09693" (UID: "22792041-7917-428c-bdd2-ccaab3a09693"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.320884 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22792041-7917-428c-bdd2-ccaab3a09693-config-data" (OuterVolumeSpecName: "config-data") pod "22792041-7917-428c-bdd2-ccaab3a09693" (UID: "22792041-7917-428c-bdd2-ccaab3a09693"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.349094 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22792041-7917-428c-bdd2-ccaab3a09693-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "22792041-7917-428c-bdd2-ccaab3a09693" (UID: "22792041-7917-428c-bdd2-ccaab3a09693"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.383357 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4f28b\" (UniqueName: \"kubernetes.io/projected/22792041-7917-428c-bdd2-ccaab3a09693-kube-api-access-4f28b\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.383399 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22792041-7917-428c-bdd2-ccaab3a09693-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.383412 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22792041-7917-428c-bdd2-ccaab3a09693-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.383427 4971 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/22792041-7917-428c-bdd2-ccaab3a09693-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.560062 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52ac96a5-4ef4-48ce-80d5-ca16e20de960" path="/var/lib/kubelet/pods/52ac96a5-4ef4-48ce-80d5-ca16e20de960/volumes" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.573659 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.583275 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.594785 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 27 07:17:14 crc kubenswrapper[4971]: E1127 07:17:14.595294 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22792041-7917-428c-bdd2-ccaab3a09693" containerName="nova-metadata-metadata" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.595319 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="22792041-7917-428c-bdd2-ccaab3a09693" containerName="nova-metadata-metadata" Nov 27 07:17:14 crc kubenswrapper[4971]: E1127 07:17:14.595346 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22792041-7917-428c-bdd2-ccaab3a09693" containerName="nova-metadata-log" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.595354 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="22792041-7917-428c-bdd2-ccaab3a09693" containerName="nova-metadata-log" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.595586 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="22792041-7917-428c-bdd2-ccaab3a09693" containerName="nova-metadata-metadata" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.595607 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="22792041-7917-428c-bdd2-ccaab3a09693" containerName="nova-metadata-log" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.596646 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.599807 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.600361 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.609598 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.791164 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/0195ada6-2a69-427f-8041-166dfdacf583-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"0195ada6-2a69-427f-8041-166dfdacf583\") " pod="openstack/nova-metadata-0" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.791436 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0195ada6-2a69-427f-8041-166dfdacf583-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"0195ada6-2a69-427f-8041-166dfdacf583\") " pod="openstack/nova-metadata-0" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.791498 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0195ada6-2a69-427f-8041-166dfdacf583-logs\") pod \"nova-metadata-0\" (UID: \"0195ada6-2a69-427f-8041-166dfdacf583\") " pod="openstack/nova-metadata-0" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.791525 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0195ada6-2a69-427f-8041-166dfdacf583-config-data\") pod \"nova-metadata-0\" (UID: \"0195ada6-2a69-427f-8041-166dfdacf583\") " pod="openstack/nova-metadata-0" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.791589 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-st7td\" (UniqueName: \"kubernetes.io/projected/0195ada6-2a69-427f-8041-166dfdacf583-kube-api-access-st7td\") pod \"nova-metadata-0\" (UID: \"0195ada6-2a69-427f-8041-166dfdacf583\") " pod="openstack/nova-metadata-0" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.895349 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-st7td\" (UniqueName: \"kubernetes.io/projected/0195ada6-2a69-427f-8041-166dfdacf583-kube-api-access-st7td\") pod \"nova-metadata-0\" (UID: \"0195ada6-2a69-427f-8041-166dfdacf583\") " pod="openstack/nova-metadata-0" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.895479 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/0195ada6-2a69-427f-8041-166dfdacf583-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"0195ada6-2a69-427f-8041-166dfdacf583\") " pod="openstack/nova-metadata-0" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.895592 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0195ada6-2a69-427f-8041-166dfdacf583-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"0195ada6-2a69-427f-8041-166dfdacf583\") " pod="openstack/nova-metadata-0" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.895638 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0195ada6-2a69-427f-8041-166dfdacf583-logs\") pod \"nova-metadata-0\" (UID: \"0195ada6-2a69-427f-8041-166dfdacf583\") " pod="openstack/nova-metadata-0" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.895660 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0195ada6-2a69-427f-8041-166dfdacf583-config-data\") pod \"nova-metadata-0\" (UID: \"0195ada6-2a69-427f-8041-166dfdacf583\") " pod="openstack/nova-metadata-0" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.897816 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0195ada6-2a69-427f-8041-166dfdacf583-logs\") pod \"nova-metadata-0\" (UID: \"0195ada6-2a69-427f-8041-166dfdacf583\") " pod="openstack/nova-metadata-0" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.901414 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/0195ada6-2a69-427f-8041-166dfdacf583-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"0195ada6-2a69-427f-8041-166dfdacf583\") " pod="openstack/nova-metadata-0" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.901859 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0195ada6-2a69-427f-8041-166dfdacf583-config-data\") pod \"nova-metadata-0\" (UID: \"0195ada6-2a69-427f-8041-166dfdacf583\") " pod="openstack/nova-metadata-0" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.902862 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0195ada6-2a69-427f-8041-166dfdacf583-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"0195ada6-2a69-427f-8041-166dfdacf583\") " pod="openstack/nova-metadata-0" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.917168 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-st7td\" (UniqueName: \"kubernetes.io/projected/0195ada6-2a69-427f-8041-166dfdacf583-kube-api-access-st7td\") pod \"nova-metadata-0\" (UID: \"0195ada6-2a69-427f-8041-166dfdacf583\") " pod="openstack/nova-metadata-0" Nov 27 07:17:14 crc kubenswrapper[4971]: I1127 07:17:14.947021 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 27 07:17:15 crc kubenswrapper[4971]: I1127 07:17:15.248451 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"7f461fed-9df2-44a5-b99c-17f30adf0d9c","Type":"ContainerStarted","Data":"bacf1459d637ebaf1473ffbba9c90d2886c77004f0c01d13c0668368ade96df2"} Nov 27 07:17:15 crc kubenswrapper[4971]: I1127 07:17:15.248972 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Nov 27 07:17:15 crc kubenswrapper[4971]: I1127 07:17:15.266013 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.265993517 podStartE2EDuration="2.265993517s" podCreationTimestamp="2025-11-27 07:17:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:17:15.263244352 +0000 UTC m=+1473.455288270" watchObservedRunningTime="2025-11-27 07:17:15.265993517 +0000 UTC m=+1473.458037425" Nov 27 07:17:15 crc kubenswrapper[4971]: I1127 07:17:15.395881 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 07:17:16 crc kubenswrapper[4971]: I1127 07:17:16.259433 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"0195ada6-2a69-427f-8041-166dfdacf583","Type":"ContainerStarted","Data":"991ebc52378df336553930ac80c5badc6b2daafef116a6928889734ba2a45fa9"} Nov 27 07:17:16 crc kubenswrapper[4971]: I1127 07:17:16.259769 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"0195ada6-2a69-427f-8041-166dfdacf583","Type":"ContainerStarted","Data":"dfae08e08ac1dc8cbd0db6a30ba44f2a2d19a35f970e1bc67d0f92ecbeac6049"} Nov 27 07:17:16 crc kubenswrapper[4971]: I1127 07:17:16.259780 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"0195ada6-2a69-427f-8041-166dfdacf583","Type":"ContainerStarted","Data":"7f144bbf6aba2a3f733129e9672651af873a48760fe1d26ea94bbdf8b2fe53b7"} Nov 27 07:17:16 crc kubenswrapper[4971]: I1127 07:17:16.282358 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.282336855 podStartE2EDuration="2.282336855s" podCreationTimestamp="2025-11-27 07:17:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:17:16.278745258 +0000 UTC m=+1474.470789176" watchObservedRunningTime="2025-11-27 07:17:16.282336855 +0000 UTC m=+1474.474380783" Nov 27 07:17:16 crc kubenswrapper[4971]: I1127 07:17:16.561042 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22792041-7917-428c-bdd2-ccaab3a09693" path="/var/lib/kubelet/pods/22792041-7917-428c-bdd2-ccaab3a09693/volumes" Nov 27 07:17:16 crc kubenswrapper[4971]: E1127 07:17:16.857723 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="fe828838b18c9b976f9afd340d33b50ae6a1e1ad189c3f7cef303c102d9f0311" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 27 07:17:16 crc kubenswrapper[4971]: E1127 07:17:16.859414 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="fe828838b18c9b976f9afd340d33b50ae6a1e1ad189c3f7cef303c102d9f0311" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 27 07:17:16 crc kubenswrapper[4971]: E1127 07:17:16.860659 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="fe828838b18c9b976f9afd340d33b50ae6a1e1ad189c3f7cef303c102d9f0311" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 27 07:17:16 crc kubenswrapper[4971]: E1127 07:17:16.860692 4971 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="8c37f29c-a94c-4c2b-87e1-62bffc246889" containerName="nova-scheduler-scheduler" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.023461 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.098215 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c37f29c-a94c-4c2b-87e1-62bffc246889-combined-ca-bundle\") pod \"8c37f29c-a94c-4c2b-87e1-62bffc246889\" (UID: \"8c37f29c-a94c-4c2b-87e1-62bffc246889\") " Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.098430 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c37f29c-a94c-4c2b-87e1-62bffc246889-config-data\") pod \"8c37f29c-a94c-4c2b-87e1-62bffc246889\" (UID: \"8c37f29c-a94c-4c2b-87e1-62bffc246889\") " Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.098782 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-npvlp\" (UniqueName: \"kubernetes.io/projected/8c37f29c-a94c-4c2b-87e1-62bffc246889-kube-api-access-npvlp\") pod \"8c37f29c-a94c-4c2b-87e1-62bffc246889\" (UID: \"8c37f29c-a94c-4c2b-87e1-62bffc246889\") " Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.133641 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c37f29c-a94c-4c2b-87e1-62bffc246889-kube-api-access-npvlp" (OuterVolumeSpecName: "kube-api-access-npvlp") pod "8c37f29c-a94c-4c2b-87e1-62bffc246889" (UID: "8c37f29c-a94c-4c2b-87e1-62bffc246889"). InnerVolumeSpecName "kube-api-access-npvlp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.138990 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c37f29c-a94c-4c2b-87e1-62bffc246889-config-data" (OuterVolumeSpecName: "config-data") pod "8c37f29c-a94c-4c2b-87e1-62bffc246889" (UID: "8c37f29c-a94c-4c2b-87e1-62bffc246889"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.164503 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c37f29c-a94c-4c2b-87e1-62bffc246889-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8c37f29c-a94c-4c2b-87e1-62bffc246889" (UID: "8c37f29c-a94c-4c2b-87e1-62bffc246889"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.200702 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-npvlp\" (UniqueName: \"kubernetes.io/projected/8c37f29c-a94c-4c2b-87e1-62bffc246889-kube-api-access-npvlp\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.200734 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c37f29c-a94c-4c2b-87e1-62bffc246889-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.200744 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c37f29c-a94c-4c2b-87e1-62bffc246889-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.201346 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.298750 4971 generic.go:334] "Generic (PLEG): container finished" podID="8c37f29c-a94c-4c2b-87e1-62bffc246889" containerID="fe828838b18c9b976f9afd340d33b50ae6a1e1ad189c3f7cef303c102d9f0311" exitCode=0 Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.298831 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8c37f29c-a94c-4c2b-87e1-62bffc246889","Type":"ContainerDied","Data":"fe828838b18c9b976f9afd340d33b50ae6a1e1ad189c3f7cef303c102d9f0311"} Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.299188 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8c37f29c-a94c-4c2b-87e1-62bffc246889","Type":"ContainerDied","Data":"4f215a9a6bea35ae5a2cc48039aae20ed532531dfe5c39d4eeec2f3811b3a2a0"} Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.298864 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.299230 4971 scope.go:117] "RemoveContainer" containerID="fe828838b18c9b976f9afd340d33b50ae6a1e1ad189c3f7cef303c102d9f0311" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.301580 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g552\" (UniqueName: \"kubernetes.io/projected/7b59348c-fd2b-41f7-8826-54b28f71e3a0-kube-api-access-6g552\") pod \"7b59348c-fd2b-41f7-8826-54b28f71e3a0\" (UID: \"7b59348c-fd2b-41f7-8826-54b28f71e3a0\") " Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.301637 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b59348c-fd2b-41f7-8826-54b28f71e3a0-config-data\") pod \"7b59348c-fd2b-41f7-8826-54b28f71e3a0\" (UID: \"7b59348c-fd2b-41f7-8826-54b28f71e3a0\") " Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.301915 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b59348c-fd2b-41f7-8826-54b28f71e3a0-logs\") pod \"7b59348c-fd2b-41f7-8826-54b28f71e3a0\" (UID: \"7b59348c-fd2b-41f7-8826-54b28f71e3a0\") " Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.301953 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b59348c-fd2b-41f7-8826-54b28f71e3a0-combined-ca-bundle\") pod \"7b59348c-fd2b-41f7-8826-54b28f71e3a0\" (UID: \"7b59348c-fd2b-41f7-8826-54b28f71e3a0\") " Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.302048 4971 generic.go:334] "Generic (PLEG): container finished" podID="7b59348c-fd2b-41f7-8826-54b28f71e3a0" containerID="3817e83567971603f678466761ddbb7dc6a837bc262aeb0c2e6364436a020855" exitCode=0 Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.302076 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7b59348c-fd2b-41f7-8826-54b28f71e3a0","Type":"ContainerDied","Data":"3817e83567971603f678466761ddbb7dc6a837bc262aeb0c2e6364436a020855"} Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.302092 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7b59348c-fd2b-41f7-8826-54b28f71e3a0","Type":"ContainerDied","Data":"7fb58b198288231b69b1b0a215fc66f4755019a98b5efb8aab2afd1f14cdfdb0"} Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.302142 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.302727 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b59348c-fd2b-41f7-8826-54b28f71e3a0-logs" (OuterVolumeSpecName: "logs") pod "7b59348c-fd2b-41f7-8826-54b28f71e3a0" (UID: "7b59348c-fd2b-41f7-8826-54b28f71e3a0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.307739 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b59348c-fd2b-41f7-8826-54b28f71e3a0-kube-api-access-6g552" (OuterVolumeSpecName: "kube-api-access-6g552") pod "7b59348c-fd2b-41f7-8826-54b28f71e3a0" (UID: "7b59348c-fd2b-41f7-8826-54b28f71e3a0"). InnerVolumeSpecName "kube-api-access-6g552". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.325444 4971 scope.go:117] "RemoveContainer" containerID="fe828838b18c9b976f9afd340d33b50ae6a1e1ad189c3f7cef303c102d9f0311" Nov 27 07:17:18 crc kubenswrapper[4971]: E1127 07:17:18.327788 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe828838b18c9b976f9afd340d33b50ae6a1e1ad189c3f7cef303c102d9f0311\": container with ID starting with fe828838b18c9b976f9afd340d33b50ae6a1e1ad189c3f7cef303c102d9f0311 not found: ID does not exist" containerID="fe828838b18c9b976f9afd340d33b50ae6a1e1ad189c3f7cef303c102d9f0311" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.327874 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe828838b18c9b976f9afd340d33b50ae6a1e1ad189c3f7cef303c102d9f0311"} err="failed to get container status \"fe828838b18c9b976f9afd340d33b50ae6a1e1ad189c3f7cef303c102d9f0311\": rpc error: code = NotFound desc = could not find container \"fe828838b18c9b976f9afd340d33b50ae6a1e1ad189c3f7cef303c102d9f0311\": container with ID starting with fe828838b18c9b976f9afd340d33b50ae6a1e1ad189c3f7cef303c102d9f0311 not found: ID does not exist" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.327925 4971 scope.go:117] "RemoveContainer" containerID="3817e83567971603f678466761ddbb7dc6a837bc262aeb0c2e6364436a020855" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.346515 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.349544 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b59348c-fd2b-41f7-8826-54b28f71e3a0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7b59348c-fd2b-41f7-8826-54b28f71e3a0" (UID: "7b59348c-fd2b-41f7-8826-54b28f71e3a0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.358676 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.359629 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b59348c-fd2b-41f7-8826-54b28f71e3a0-config-data" (OuterVolumeSpecName: "config-data") pod "7b59348c-fd2b-41f7-8826-54b28f71e3a0" (UID: "7b59348c-fd2b-41f7-8826-54b28f71e3a0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.359699 4971 scope.go:117] "RemoveContainer" containerID="197022aa48aedf07992a9135db0f21fa56cfab40635a7b4cefc0d0f0dbcf54f5" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.380420 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 07:17:18 crc kubenswrapper[4971]: E1127 07:17:18.381391 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b59348c-fd2b-41f7-8826-54b28f71e3a0" containerName="nova-api-log" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.381449 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b59348c-fd2b-41f7-8826-54b28f71e3a0" containerName="nova-api-log" Nov 27 07:17:18 crc kubenswrapper[4971]: E1127 07:17:18.381486 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c37f29c-a94c-4c2b-87e1-62bffc246889" containerName="nova-scheduler-scheduler" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.381494 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c37f29c-a94c-4c2b-87e1-62bffc246889" containerName="nova-scheduler-scheduler" Nov 27 07:17:18 crc kubenswrapper[4971]: E1127 07:17:18.381524 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b59348c-fd2b-41f7-8826-54b28f71e3a0" containerName="nova-api-api" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.381564 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b59348c-fd2b-41f7-8826-54b28f71e3a0" containerName="nova-api-api" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.381800 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b59348c-fd2b-41f7-8826-54b28f71e3a0" containerName="nova-api-log" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.381826 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c37f29c-a94c-4c2b-87e1-62bffc246889" containerName="nova-scheduler-scheduler" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.381846 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b59348c-fd2b-41f7-8826-54b28f71e3a0" containerName="nova-api-api" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.383551 4971 scope.go:117] "RemoveContainer" containerID="3817e83567971603f678466761ddbb7dc6a837bc262aeb0c2e6364436a020855" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.383858 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 27 07:17:18 crc kubenswrapper[4971]: E1127 07:17:18.384577 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3817e83567971603f678466761ddbb7dc6a837bc262aeb0c2e6364436a020855\": container with ID starting with 3817e83567971603f678466761ddbb7dc6a837bc262aeb0c2e6364436a020855 not found: ID does not exist" containerID="3817e83567971603f678466761ddbb7dc6a837bc262aeb0c2e6364436a020855" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.384625 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3817e83567971603f678466761ddbb7dc6a837bc262aeb0c2e6364436a020855"} err="failed to get container status \"3817e83567971603f678466761ddbb7dc6a837bc262aeb0c2e6364436a020855\": rpc error: code = NotFound desc = could not find container \"3817e83567971603f678466761ddbb7dc6a837bc262aeb0c2e6364436a020855\": container with ID starting with 3817e83567971603f678466761ddbb7dc6a837bc262aeb0c2e6364436a020855 not found: ID does not exist" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.384658 4971 scope.go:117] "RemoveContainer" containerID="197022aa48aedf07992a9135db0f21fa56cfab40635a7b4cefc0d0f0dbcf54f5" Nov 27 07:17:18 crc kubenswrapper[4971]: E1127 07:17:18.385920 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"197022aa48aedf07992a9135db0f21fa56cfab40635a7b4cefc0d0f0dbcf54f5\": container with ID starting with 197022aa48aedf07992a9135db0f21fa56cfab40635a7b4cefc0d0f0dbcf54f5 not found: ID does not exist" containerID="197022aa48aedf07992a9135db0f21fa56cfab40635a7b4cefc0d0f0dbcf54f5" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.385945 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"197022aa48aedf07992a9135db0f21fa56cfab40635a7b4cefc0d0f0dbcf54f5"} err="failed to get container status \"197022aa48aedf07992a9135db0f21fa56cfab40635a7b4cefc0d0f0dbcf54f5\": rpc error: code = NotFound desc = could not find container \"197022aa48aedf07992a9135db0f21fa56cfab40635a7b4cefc0d0f0dbcf54f5\": container with ID starting with 197022aa48aedf07992a9135db0f21fa56cfab40635a7b4cefc0d0f0dbcf54f5 not found: ID does not exist" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.386081 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.395873 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.407402 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rlrdh\" (UniqueName: \"kubernetes.io/projected/2b7ff1aa-9bdb-4c11-9a97-bac98829a89f-kube-api-access-rlrdh\") pod \"nova-scheduler-0\" (UID: \"2b7ff1aa-9bdb-4c11-9a97-bac98829a89f\") " pod="openstack/nova-scheduler-0" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.407494 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b7ff1aa-9bdb-4c11-9a97-bac98829a89f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"2b7ff1aa-9bdb-4c11-9a97-bac98829a89f\") " pod="openstack/nova-scheduler-0" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.407687 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b7ff1aa-9bdb-4c11-9a97-bac98829a89f-config-data\") pod \"nova-scheduler-0\" (UID: \"2b7ff1aa-9bdb-4c11-9a97-bac98829a89f\") " pod="openstack/nova-scheduler-0" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.407851 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g552\" (UniqueName: \"kubernetes.io/projected/7b59348c-fd2b-41f7-8826-54b28f71e3a0-kube-api-access-6g552\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.407877 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b59348c-fd2b-41f7-8826-54b28f71e3a0-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.407889 4971 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7b59348c-fd2b-41f7-8826-54b28f71e3a0-logs\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.407900 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b59348c-fd2b-41f7-8826-54b28f71e3a0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.509928 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rlrdh\" (UniqueName: \"kubernetes.io/projected/2b7ff1aa-9bdb-4c11-9a97-bac98829a89f-kube-api-access-rlrdh\") pod \"nova-scheduler-0\" (UID: \"2b7ff1aa-9bdb-4c11-9a97-bac98829a89f\") " pod="openstack/nova-scheduler-0" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.510009 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b7ff1aa-9bdb-4c11-9a97-bac98829a89f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"2b7ff1aa-9bdb-4c11-9a97-bac98829a89f\") " pod="openstack/nova-scheduler-0" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.510040 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b7ff1aa-9bdb-4c11-9a97-bac98829a89f-config-data\") pod \"nova-scheduler-0\" (UID: \"2b7ff1aa-9bdb-4c11-9a97-bac98829a89f\") " pod="openstack/nova-scheduler-0" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.514677 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b7ff1aa-9bdb-4c11-9a97-bac98829a89f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"2b7ff1aa-9bdb-4c11-9a97-bac98829a89f\") " pod="openstack/nova-scheduler-0" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.516169 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b7ff1aa-9bdb-4c11-9a97-bac98829a89f-config-data\") pod \"nova-scheduler-0\" (UID: \"2b7ff1aa-9bdb-4c11-9a97-bac98829a89f\") " pod="openstack/nova-scheduler-0" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.544357 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rlrdh\" (UniqueName: \"kubernetes.io/projected/2b7ff1aa-9bdb-4c11-9a97-bac98829a89f-kube-api-access-rlrdh\") pod \"nova-scheduler-0\" (UID: \"2b7ff1aa-9bdb-4c11-9a97-bac98829a89f\") " pod="openstack/nova-scheduler-0" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.564651 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c37f29c-a94c-4c2b-87e1-62bffc246889" path="/var/lib/kubelet/pods/8c37f29c-a94c-4c2b-87e1-62bffc246889/volumes" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.673701 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.693612 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.711421 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.718467 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.722108 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.726412 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.730192 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.827470 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/353c1f0c-de63-416e-b2b6-49155b0bd4a5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"353c1f0c-de63-416e-b2b6-49155b0bd4a5\") " pod="openstack/nova-api-0" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.828093 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/353c1f0c-de63-416e-b2b6-49155b0bd4a5-logs\") pod \"nova-api-0\" (UID: \"353c1f0c-de63-416e-b2b6-49155b0bd4a5\") " pod="openstack/nova-api-0" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.828360 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfw5p\" (UniqueName: \"kubernetes.io/projected/353c1f0c-de63-416e-b2b6-49155b0bd4a5-kube-api-access-lfw5p\") pod \"nova-api-0\" (UID: \"353c1f0c-de63-416e-b2b6-49155b0bd4a5\") " pod="openstack/nova-api-0" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.828390 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/353c1f0c-de63-416e-b2b6-49155b0bd4a5-config-data\") pod \"nova-api-0\" (UID: \"353c1f0c-de63-416e-b2b6-49155b0bd4a5\") " pod="openstack/nova-api-0" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.930040 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfw5p\" (UniqueName: \"kubernetes.io/projected/353c1f0c-de63-416e-b2b6-49155b0bd4a5-kube-api-access-lfw5p\") pod \"nova-api-0\" (UID: \"353c1f0c-de63-416e-b2b6-49155b0bd4a5\") " pod="openstack/nova-api-0" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.930087 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/353c1f0c-de63-416e-b2b6-49155b0bd4a5-config-data\") pod \"nova-api-0\" (UID: \"353c1f0c-de63-416e-b2b6-49155b0bd4a5\") " pod="openstack/nova-api-0" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.930132 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/353c1f0c-de63-416e-b2b6-49155b0bd4a5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"353c1f0c-de63-416e-b2b6-49155b0bd4a5\") " pod="openstack/nova-api-0" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.930203 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/353c1f0c-de63-416e-b2b6-49155b0bd4a5-logs\") pod \"nova-api-0\" (UID: \"353c1f0c-de63-416e-b2b6-49155b0bd4a5\") " pod="openstack/nova-api-0" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.930590 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/353c1f0c-de63-416e-b2b6-49155b0bd4a5-logs\") pod \"nova-api-0\" (UID: \"353c1f0c-de63-416e-b2b6-49155b0bd4a5\") " pod="openstack/nova-api-0" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.945121 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/353c1f0c-de63-416e-b2b6-49155b0bd4a5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"353c1f0c-de63-416e-b2b6-49155b0bd4a5\") " pod="openstack/nova-api-0" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.961470 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfw5p\" (UniqueName: \"kubernetes.io/projected/353c1f0c-de63-416e-b2b6-49155b0bd4a5-kube-api-access-lfw5p\") pod \"nova-api-0\" (UID: \"353c1f0c-de63-416e-b2b6-49155b0bd4a5\") " pod="openstack/nova-api-0" Nov 27 07:17:18 crc kubenswrapper[4971]: I1127 07:17:18.963493 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/353c1f0c-de63-416e-b2b6-49155b0bd4a5-config-data\") pod \"nova-api-0\" (UID: \"353c1f0c-de63-416e-b2b6-49155b0bd4a5\") " pod="openstack/nova-api-0" Nov 27 07:17:19 crc kubenswrapper[4971]: I1127 07:17:19.173213 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 27 07:17:19 crc kubenswrapper[4971]: I1127 07:17:19.203020 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 07:17:19 crc kubenswrapper[4971]: W1127 07:17:19.205929 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2b7ff1aa_9bdb_4c11_9a97_bac98829a89f.slice/crio-cd5cf61d7985d15e49c5d0a0b418dff4b067091f8a49789611967c4b9c047329 WatchSource:0}: Error finding container cd5cf61d7985d15e49c5d0a0b418dff4b067091f8a49789611967c4b9c047329: Status 404 returned error can't find the container with id cd5cf61d7985d15e49c5d0a0b418dff4b067091f8a49789611967c4b9c047329 Nov 27 07:17:19 crc kubenswrapper[4971]: I1127 07:17:19.312474 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2b7ff1aa-9bdb-4c11-9a97-bac98829a89f","Type":"ContainerStarted","Data":"cd5cf61d7985d15e49c5d0a0b418dff4b067091f8a49789611967c4b9c047329"} Nov 27 07:17:19 crc kubenswrapper[4971]: I1127 07:17:19.632720 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 27 07:17:19 crc kubenswrapper[4971]: I1127 07:17:19.947313 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 27 07:17:19 crc kubenswrapper[4971]: I1127 07:17:19.947385 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 27 07:17:20 crc kubenswrapper[4971]: I1127 07:17:20.329210 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"353c1f0c-de63-416e-b2b6-49155b0bd4a5","Type":"ContainerStarted","Data":"dcfac204c7989c2b0acb8b5ce4c35db6fe6acec931af78c4c25dd2ac7e8c929f"} Nov 27 07:17:20 crc kubenswrapper[4971]: I1127 07:17:20.329507 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"353c1f0c-de63-416e-b2b6-49155b0bd4a5","Type":"ContainerStarted","Data":"26ca8147f93e2a817c647364a41bd1eaf9334de41e901a3f8adc56c0ee2f5b24"} Nov 27 07:17:20 crc kubenswrapper[4971]: I1127 07:17:20.329518 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"353c1f0c-de63-416e-b2b6-49155b0bd4a5","Type":"ContainerStarted","Data":"d04569fc0bcd41add1fde2b23bd0d7f2e967fb7f8637d0a710af73767ddc6658"} Nov 27 07:17:20 crc kubenswrapper[4971]: I1127 07:17:20.332918 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2b7ff1aa-9bdb-4c11-9a97-bac98829a89f","Type":"ContainerStarted","Data":"9dda0ab26da17d256febc0b7476eb425a2961dbd6725659139cfe57841fa7a79"} Nov 27 07:17:20 crc kubenswrapper[4971]: I1127 07:17:20.356793 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.356769887 podStartE2EDuration="2.356769887s" podCreationTimestamp="2025-11-27 07:17:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:17:20.347585819 +0000 UTC m=+1478.539629777" watchObservedRunningTime="2025-11-27 07:17:20.356769887 +0000 UTC m=+1478.548813795" Nov 27 07:17:20 crc kubenswrapper[4971]: I1127 07:17:20.374920 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.374903306 podStartE2EDuration="2.374903306s" podCreationTimestamp="2025-11-27 07:17:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:17:20.366442758 +0000 UTC m=+1478.558486676" watchObservedRunningTime="2025-11-27 07:17:20.374903306 +0000 UTC m=+1478.566947214" Nov 27 07:17:20 crc kubenswrapper[4971]: I1127 07:17:20.561338 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b59348c-fd2b-41f7-8826-54b28f71e3a0" path="/var/lib/kubelet/pods/7b59348c-fd2b-41f7-8826-54b28f71e3a0/volumes" Nov 27 07:17:23 crc kubenswrapper[4971]: I1127 07:17:23.647878 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Nov 27 07:17:23 crc kubenswrapper[4971]: I1127 07:17:23.712823 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Nov 27 07:17:24 crc kubenswrapper[4971]: I1127 07:17:24.295681 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Nov 27 07:17:24 crc kubenswrapper[4971]: I1127 07:17:24.947697 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 27 07:17:24 crc kubenswrapper[4971]: I1127 07:17:24.947758 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 27 07:17:25 crc kubenswrapper[4971]: I1127 07:17:25.961693 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="0195ada6-2a69-427f-8041-166dfdacf583" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Nov 27 07:17:25 crc kubenswrapper[4971]: I1127 07:17:25.961693 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="0195ada6-2a69-427f-8041-166dfdacf583" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Nov 27 07:17:26 crc kubenswrapper[4971]: I1127 07:17:26.413285 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 07:17:26 crc kubenswrapper[4971]: I1127 07:17:26.413353 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 07:17:28 crc kubenswrapper[4971]: I1127 07:17:28.712905 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Nov 27 07:17:28 crc kubenswrapper[4971]: I1127 07:17:28.740303 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Nov 27 07:17:29 crc kubenswrapper[4971]: I1127 07:17:29.174243 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 27 07:17:29 crc kubenswrapper[4971]: I1127 07:17:29.174340 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 27 07:17:29 crc kubenswrapper[4971]: I1127 07:17:29.482605 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Nov 27 07:17:30 crc kubenswrapper[4971]: I1127 07:17:30.257678 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="353c1f0c-de63-416e-b2b6-49155b0bd4a5" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.195:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 27 07:17:30 crc kubenswrapper[4971]: I1127 07:17:30.257892 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="353c1f0c-de63-416e-b2b6-49155b0bd4a5" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.195:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 27 07:17:34 crc kubenswrapper[4971]: I1127 07:17:34.951992 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 27 07:17:34 crc kubenswrapper[4971]: I1127 07:17:34.957126 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 27 07:17:34 crc kubenswrapper[4971]: I1127 07:17:34.959168 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 27 07:17:35 crc kubenswrapper[4971]: I1127 07:17:35.511888 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 27 07:17:37 crc kubenswrapper[4971]: I1127 07:17:37.525706 4971 generic.go:334] "Generic (PLEG): container finished" podID="6abff3e7-26fe-4194-b53c-5f346c554777" containerID="62c5d6cd46420fb65a7f206aa0adabc71b71475222eed36141bfe65ea7e170a8" exitCode=137 Nov 27 07:17:37 crc kubenswrapper[4971]: I1127 07:17:37.525741 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"6abff3e7-26fe-4194-b53c-5f346c554777","Type":"ContainerDied","Data":"62c5d6cd46420fb65a7f206aa0adabc71b71475222eed36141bfe65ea7e170a8"} Nov 27 07:17:37 crc kubenswrapper[4971]: I1127 07:17:37.526787 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"6abff3e7-26fe-4194-b53c-5f346c554777","Type":"ContainerDied","Data":"35108c570bfe437130037dce0277e52477ab4922395b53cffe3a5dd97c984e3a"} Nov 27 07:17:37 crc kubenswrapper[4971]: I1127 07:17:37.526801 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="35108c570bfe437130037dce0277e52477ab4922395b53cffe3a5dd97c984e3a" Nov 27 07:17:37 crc kubenswrapper[4971]: I1127 07:17:37.567356 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 27 07:17:37 crc kubenswrapper[4971]: I1127 07:17:37.745337 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6abff3e7-26fe-4194-b53c-5f346c554777-combined-ca-bundle\") pod \"6abff3e7-26fe-4194-b53c-5f346c554777\" (UID: \"6abff3e7-26fe-4194-b53c-5f346c554777\") " Nov 27 07:17:37 crc kubenswrapper[4971]: I1127 07:17:37.745574 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h2625\" (UniqueName: \"kubernetes.io/projected/6abff3e7-26fe-4194-b53c-5f346c554777-kube-api-access-h2625\") pod \"6abff3e7-26fe-4194-b53c-5f346c554777\" (UID: \"6abff3e7-26fe-4194-b53c-5f346c554777\") " Nov 27 07:17:37 crc kubenswrapper[4971]: I1127 07:17:37.745754 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6abff3e7-26fe-4194-b53c-5f346c554777-config-data\") pod \"6abff3e7-26fe-4194-b53c-5f346c554777\" (UID: \"6abff3e7-26fe-4194-b53c-5f346c554777\") " Nov 27 07:17:37 crc kubenswrapper[4971]: I1127 07:17:37.752429 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6abff3e7-26fe-4194-b53c-5f346c554777-kube-api-access-h2625" (OuterVolumeSpecName: "kube-api-access-h2625") pod "6abff3e7-26fe-4194-b53c-5f346c554777" (UID: "6abff3e7-26fe-4194-b53c-5f346c554777"). InnerVolumeSpecName "kube-api-access-h2625". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:17:37 crc kubenswrapper[4971]: I1127 07:17:37.781924 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6abff3e7-26fe-4194-b53c-5f346c554777-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6abff3e7-26fe-4194-b53c-5f346c554777" (UID: "6abff3e7-26fe-4194-b53c-5f346c554777"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:17:37 crc kubenswrapper[4971]: I1127 07:17:37.794390 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6abff3e7-26fe-4194-b53c-5f346c554777-config-data" (OuterVolumeSpecName: "config-data") pod "6abff3e7-26fe-4194-b53c-5f346c554777" (UID: "6abff3e7-26fe-4194-b53c-5f346c554777"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:17:37 crc kubenswrapper[4971]: I1127 07:17:37.848475 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6abff3e7-26fe-4194-b53c-5f346c554777-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:37 crc kubenswrapper[4971]: I1127 07:17:37.848513 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h2625\" (UniqueName: \"kubernetes.io/projected/6abff3e7-26fe-4194-b53c-5f346c554777-kube-api-access-h2625\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:37 crc kubenswrapper[4971]: I1127 07:17:37.848528 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6abff3e7-26fe-4194-b53c-5f346c554777-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:38 crc kubenswrapper[4971]: I1127 07:17:38.537035 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 27 07:17:38 crc kubenswrapper[4971]: I1127 07:17:38.605133 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 27 07:17:38 crc kubenswrapper[4971]: I1127 07:17:38.622475 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 27 07:17:38 crc kubenswrapper[4971]: I1127 07:17:38.660311 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 27 07:17:38 crc kubenswrapper[4971]: E1127 07:17:38.661126 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6abff3e7-26fe-4194-b53c-5f346c554777" containerName="nova-cell1-novncproxy-novncproxy" Nov 27 07:17:38 crc kubenswrapper[4971]: I1127 07:17:38.661162 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="6abff3e7-26fe-4194-b53c-5f346c554777" containerName="nova-cell1-novncproxy-novncproxy" Nov 27 07:17:38 crc kubenswrapper[4971]: I1127 07:17:38.661634 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="6abff3e7-26fe-4194-b53c-5f346c554777" containerName="nova-cell1-novncproxy-novncproxy" Nov 27 07:17:38 crc kubenswrapper[4971]: I1127 07:17:38.662795 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 27 07:17:38 crc kubenswrapper[4971]: I1127 07:17:38.666366 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Nov 27 07:17:38 crc kubenswrapper[4971]: I1127 07:17:38.666809 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Nov 27 07:17:38 crc kubenswrapper[4971]: I1127 07:17:38.667119 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Nov 27 07:17:38 crc kubenswrapper[4971]: I1127 07:17:38.671037 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 27 07:17:38 crc kubenswrapper[4971]: I1127 07:17:38.738059 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-dglpp"] Nov 27 07:17:38 crc kubenswrapper[4971]: I1127 07:17:38.741065 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dglpp" Nov 27 07:17:38 crc kubenswrapper[4971]: I1127 07:17:38.758641 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dglpp"] Nov 27 07:17:38 crc kubenswrapper[4971]: I1127 07:17:38.775298 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4t5z4\" (UniqueName: \"kubernetes.io/projected/fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea-kube-api-access-4t5z4\") pod \"nova-cell1-novncproxy-0\" (UID: \"fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 07:17:38 crc kubenswrapper[4971]: I1127 07:17:38.775397 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 07:17:38 crc kubenswrapper[4971]: I1127 07:17:38.775469 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 07:17:38 crc kubenswrapper[4971]: I1127 07:17:38.775517 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 07:17:38 crc kubenswrapper[4971]: I1127 07:17:38.775601 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 07:17:38 crc kubenswrapper[4971]: I1127 07:17:38.877437 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 07:17:38 crc kubenswrapper[4971]: I1127 07:17:38.877522 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqwbs\" (UniqueName: \"kubernetes.io/projected/71d7b8e5-06be-4a2e-8d36-dd6e97616fec-kube-api-access-fqwbs\") pod \"redhat-marketplace-dglpp\" (UID: \"71d7b8e5-06be-4a2e-8d36-dd6e97616fec\") " pod="openshift-marketplace/redhat-marketplace-dglpp" Nov 27 07:17:38 crc kubenswrapper[4971]: I1127 07:17:38.877570 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 07:17:38 crc kubenswrapper[4971]: I1127 07:17:38.877615 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4t5z4\" (UniqueName: \"kubernetes.io/projected/fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea-kube-api-access-4t5z4\") pod \"nova-cell1-novncproxy-0\" (UID: \"fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 07:17:38 crc kubenswrapper[4971]: I1127 07:17:38.877669 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 07:17:38 crc kubenswrapper[4971]: I1127 07:17:38.877714 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71d7b8e5-06be-4a2e-8d36-dd6e97616fec-utilities\") pod \"redhat-marketplace-dglpp\" (UID: \"71d7b8e5-06be-4a2e-8d36-dd6e97616fec\") " pod="openshift-marketplace/redhat-marketplace-dglpp" Nov 27 07:17:38 crc kubenswrapper[4971]: I1127 07:17:38.877758 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 07:17:38 crc kubenswrapper[4971]: I1127 07:17:38.877791 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71d7b8e5-06be-4a2e-8d36-dd6e97616fec-catalog-content\") pod \"redhat-marketplace-dglpp\" (UID: \"71d7b8e5-06be-4a2e-8d36-dd6e97616fec\") " pod="openshift-marketplace/redhat-marketplace-dglpp" Nov 27 07:17:38 crc kubenswrapper[4971]: I1127 07:17:38.881545 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 07:17:38 crc kubenswrapper[4971]: I1127 07:17:38.881595 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 07:17:38 crc kubenswrapper[4971]: I1127 07:17:38.881649 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 07:17:38 crc kubenswrapper[4971]: I1127 07:17:38.891786 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 07:17:38 crc kubenswrapper[4971]: I1127 07:17:38.900114 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4t5z4\" (UniqueName: \"kubernetes.io/projected/fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea-kube-api-access-4t5z4\") pod \"nova-cell1-novncproxy-0\" (UID: \"fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 07:17:38 crc kubenswrapper[4971]: I1127 07:17:38.979574 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71d7b8e5-06be-4a2e-8d36-dd6e97616fec-utilities\") pod \"redhat-marketplace-dglpp\" (UID: \"71d7b8e5-06be-4a2e-8d36-dd6e97616fec\") " pod="openshift-marketplace/redhat-marketplace-dglpp" Nov 27 07:17:38 crc kubenswrapper[4971]: I1127 07:17:38.979677 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71d7b8e5-06be-4a2e-8d36-dd6e97616fec-catalog-content\") pod \"redhat-marketplace-dglpp\" (UID: \"71d7b8e5-06be-4a2e-8d36-dd6e97616fec\") " pod="openshift-marketplace/redhat-marketplace-dglpp" Nov 27 07:17:38 crc kubenswrapper[4971]: I1127 07:17:38.979777 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqwbs\" (UniqueName: \"kubernetes.io/projected/71d7b8e5-06be-4a2e-8d36-dd6e97616fec-kube-api-access-fqwbs\") pod \"redhat-marketplace-dglpp\" (UID: \"71d7b8e5-06be-4a2e-8d36-dd6e97616fec\") " pod="openshift-marketplace/redhat-marketplace-dglpp" Nov 27 07:17:38 crc kubenswrapper[4971]: I1127 07:17:38.980156 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71d7b8e5-06be-4a2e-8d36-dd6e97616fec-utilities\") pod \"redhat-marketplace-dglpp\" (UID: \"71d7b8e5-06be-4a2e-8d36-dd6e97616fec\") " pod="openshift-marketplace/redhat-marketplace-dglpp" Nov 27 07:17:38 crc kubenswrapper[4971]: I1127 07:17:38.980335 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71d7b8e5-06be-4a2e-8d36-dd6e97616fec-catalog-content\") pod \"redhat-marketplace-dglpp\" (UID: \"71d7b8e5-06be-4a2e-8d36-dd6e97616fec\") " pod="openshift-marketplace/redhat-marketplace-dglpp" Nov 27 07:17:38 crc kubenswrapper[4971]: I1127 07:17:38.995392 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 27 07:17:39 crc kubenswrapper[4971]: I1127 07:17:39.003053 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqwbs\" (UniqueName: \"kubernetes.io/projected/71d7b8e5-06be-4a2e-8d36-dd6e97616fec-kube-api-access-fqwbs\") pod \"redhat-marketplace-dglpp\" (UID: \"71d7b8e5-06be-4a2e-8d36-dd6e97616fec\") " pod="openshift-marketplace/redhat-marketplace-dglpp" Nov 27 07:17:39 crc kubenswrapper[4971]: I1127 07:17:39.068241 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dglpp" Nov 27 07:17:39 crc kubenswrapper[4971]: I1127 07:17:39.194288 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 27 07:17:39 crc kubenswrapper[4971]: I1127 07:17:39.196391 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 27 07:17:39 crc kubenswrapper[4971]: I1127 07:17:39.202558 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 27 07:17:39 crc kubenswrapper[4971]: I1127 07:17:39.207924 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 27 07:17:39 crc kubenswrapper[4971]: I1127 07:17:39.509592 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 27 07:17:39 crc kubenswrapper[4971]: W1127 07:17:39.511828 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfa693ecf_e3d2_4a1b_9bd0_df1e508e3bea.slice/crio-7101867e5e94352aa26ebb236111effa5248597ea686da5f10e624a1125b4366 WatchSource:0}: Error finding container 7101867e5e94352aa26ebb236111effa5248597ea686da5f10e624a1125b4366: Status 404 returned error can't find the container with id 7101867e5e94352aa26ebb236111effa5248597ea686da5f10e624a1125b4366 Nov 27 07:17:39 crc kubenswrapper[4971]: I1127 07:17:39.549868 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea","Type":"ContainerStarted","Data":"7101867e5e94352aa26ebb236111effa5248597ea686da5f10e624a1125b4366"} Nov 27 07:17:39 crc kubenswrapper[4971]: I1127 07:17:39.551132 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 27 07:17:39 crc kubenswrapper[4971]: I1127 07:17:39.556991 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 27 07:17:39 crc kubenswrapper[4971]: W1127 07:17:39.601513 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71d7b8e5_06be_4a2e_8d36_dd6e97616fec.slice/crio-8ff80b03df77b3e60d9c32dce7bcf31d634cd74af037e369783569f1de01ae60 WatchSource:0}: Error finding container 8ff80b03df77b3e60d9c32dce7bcf31d634cd74af037e369783569f1de01ae60: Status 404 returned error can't find the container with id 8ff80b03df77b3e60d9c32dce7bcf31d634cd74af037e369783569f1de01ae60 Nov 27 07:17:39 crc kubenswrapper[4971]: I1127 07:17:39.609291 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dglpp"] Nov 27 07:17:39 crc kubenswrapper[4971]: I1127 07:17:39.758898 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-669669cf59-5rwqh"] Nov 27 07:17:39 crc kubenswrapper[4971]: I1127 07:17:39.762765 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-669669cf59-5rwqh" Nov 27 07:17:39 crc kubenswrapper[4971]: I1127 07:17:39.777207 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-669669cf59-5rwqh"] Nov 27 07:17:39 crc kubenswrapper[4971]: I1127 07:17:39.912076 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b3fa1872-f7d9-4531-bc33-619419f530a5-ovsdbserver-nb\") pod \"dnsmasq-dns-669669cf59-5rwqh\" (UID: \"b3fa1872-f7d9-4531-bc33-619419f530a5\") " pod="openstack/dnsmasq-dns-669669cf59-5rwqh" Nov 27 07:17:39 crc kubenswrapper[4971]: I1127 07:17:39.912470 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3fa1872-f7d9-4531-bc33-619419f530a5-config\") pod \"dnsmasq-dns-669669cf59-5rwqh\" (UID: \"b3fa1872-f7d9-4531-bc33-619419f530a5\") " pod="openstack/dnsmasq-dns-669669cf59-5rwqh" Nov 27 07:17:39 crc kubenswrapper[4971]: I1127 07:17:39.912521 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b3fa1872-f7d9-4531-bc33-619419f530a5-ovsdbserver-sb\") pod \"dnsmasq-dns-669669cf59-5rwqh\" (UID: \"b3fa1872-f7d9-4531-bc33-619419f530a5\") " pod="openstack/dnsmasq-dns-669669cf59-5rwqh" Nov 27 07:17:39 crc kubenswrapper[4971]: I1127 07:17:39.912793 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6gb2q\" (UniqueName: \"kubernetes.io/projected/b3fa1872-f7d9-4531-bc33-619419f530a5-kube-api-access-6gb2q\") pod \"dnsmasq-dns-669669cf59-5rwqh\" (UID: \"b3fa1872-f7d9-4531-bc33-619419f530a5\") " pod="openstack/dnsmasq-dns-669669cf59-5rwqh" Nov 27 07:17:39 crc kubenswrapper[4971]: I1127 07:17:39.912951 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b3fa1872-f7d9-4531-bc33-619419f530a5-dns-svc\") pod \"dnsmasq-dns-669669cf59-5rwqh\" (UID: \"b3fa1872-f7d9-4531-bc33-619419f530a5\") " pod="openstack/dnsmasq-dns-669669cf59-5rwqh" Nov 27 07:17:39 crc kubenswrapper[4971]: I1127 07:17:39.912984 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b3fa1872-f7d9-4531-bc33-619419f530a5-dns-swift-storage-0\") pod \"dnsmasq-dns-669669cf59-5rwqh\" (UID: \"b3fa1872-f7d9-4531-bc33-619419f530a5\") " pod="openstack/dnsmasq-dns-669669cf59-5rwqh" Nov 27 07:17:40 crc kubenswrapper[4971]: I1127 07:17:40.015249 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b3fa1872-f7d9-4531-bc33-619419f530a5-ovsdbserver-nb\") pod \"dnsmasq-dns-669669cf59-5rwqh\" (UID: \"b3fa1872-f7d9-4531-bc33-619419f530a5\") " pod="openstack/dnsmasq-dns-669669cf59-5rwqh" Nov 27 07:17:40 crc kubenswrapper[4971]: I1127 07:17:40.015326 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3fa1872-f7d9-4531-bc33-619419f530a5-config\") pod \"dnsmasq-dns-669669cf59-5rwqh\" (UID: \"b3fa1872-f7d9-4531-bc33-619419f530a5\") " pod="openstack/dnsmasq-dns-669669cf59-5rwqh" Nov 27 07:17:40 crc kubenswrapper[4971]: I1127 07:17:40.015362 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b3fa1872-f7d9-4531-bc33-619419f530a5-ovsdbserver-sb\") pod \"dnsmasq-dns-669669cf59-5rwqh\" (UID: \"b3fa1872-f7d9-4531-bc33-619419f530a5\") " pod="openstack/dnsmasq-dns-669669cf59-5rwqh" Nov 27 07:17:40 crc kubenswrapper[4971]: I1127 07:17:40.015426 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6gb2q\" (UniqueName: \"kubernetes.io/projected/b3fa1872-f7d9-4531-bc33-619419f530a5-kube-api-access-6gb2q\") pod \"dnsmasq-dns-669669cf59-5rwqh\" (UID: \"b3fa1872-f7d9-4531-bc33-619419f530a5\") " pod="openstack/dnsmasq-dns-669669cf59-5rwqh" Nov 27 07:17:40 crc kubenswrapper[4971]: I1127 07:17:40.015472 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b3fa1872-f7d9-4531-bc33-619419f530a5-dns-svc\") pod \"dnsmasq-dns-669669cf59-5rwqh\" (UID: \"b3fa1872-f7d9-4531-bc33-619419f530a5\") " pod="openstack/dnsmasq-dns-669669cf59-5rwqh" Nov 27 07:17:40 crc kubenswrapper[4971]: I1127 07:17:40.015494 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b3fa1872-f7d9-4531-bc33-619419f530a5-dns-swift-storage-0\") pod \"dnsmasq-dns-669669cf59-5rwqh\" (UID: \"b3fa1872-f7d9-4531-bc33-619419f530a5\") " pod="openstack/dnsmasq-dns-669669cf59-5rwqh" Nov 27 07:17:40 crc kubenswrapper[4971]: I1127 07:17:40.017218 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b3fa1872-f7d9-4531-bc33-619419f530a5-ovsdbserver-nb\") pod \"dnsmasq-dns-669669cf59-5rwqh\" (UID: \"b3fa1872-f7d9-4531-bc33-619419f530a5\") " pod="openstack/dnsmasq-dns-669669cf59-5rwqh" Nov 27 07:17:40 crc kubenswrapper[4971]: I1127 07:17:40.017855 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3fa1872-f7d9-4531-bc33-619419f530a5-config\") pod \"dnsmasq-dns-669669cf59-5rwqh\" (UID: \"b3fa1872-f7d9-4531-bc33-619419f530a5\") " pod="openstack/dnsmasq-dns-669669cf59-5rwqh" Nov 27 07:17:40 crc kubenswrapper[4971]: I1127 07:17:40.018493 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b3fa1872-f7d9-4531-bc33-619419f530a5-ovsdbserver-sb\") pod \"dnsmasq-dns-669669cf59-5rwqh\" (UID: \"b3fa1872-f7d9-4531-bc33-619419f530a5\") " pod="openstack/dnsmasq-dns-669669cf59-5rwqh" Nov 27 07:17:40 crc kubenswrapper[4971]: I1127 07:17:40.019246 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b3fa1872-f7d9-4531-bc33-619419f530a5-dns-svc\") pod \"dnsmasq-dns-669669cf59-5rwqh\" (UID: \"b3fa1872-f7d9-4531-bc33-619419f530a5\") " pod="openstack/dnsmasq-dns-669669cf59-5rwqh" Nov 27 07:17:40 crc kubenswrapper[4971]: I1127 07:17:40.019388 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b3fa1872-f7d9-4531-bc33-619419f530a5-dns-swift-storage-0\") pod \"dnsmasq-dns-669669cf59-5rwqh\" (UID: \"b3fa1872-f7d9-4531-bc33-619419f530a5\") " pod="openstack/dnsmasq-dns-669669cf59-5rwqh" Nov 27 07:17:40 crc kubenswrapper[4971]: I1127 07:17:40.041368 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6gb2q\" (UniqueName: \"kubernetes.io/projected/b3fa1872-f7d9-4531-bc33-619419f530a5-kube-api-access-6gb2q\") pod \"dnsmasq-dns-669669cf59-5rwqh\" (UID: \"b3fa1872-f7d9-4531-bc33-619419f530a5\") " pod="openstack/dnsmasq-dns-669669cf59-5rwqh" Nov 27 07:17:40 crc kubenswrapper[4971]: I1127 07:17:40.098514 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-669669cf59-5rwqh" Nov 27 07:17:40 crc kubenswrapper[4971]: I1127 07:17:40.606293 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6abff3e7-26fe-4194-b53c-5f346c554777" path="/var/lib/kubelet/pods/6abff3e7-26fe-4194-b53c-5f346c554777/volumes" Nov 27 07:17:40 crc kubenswrapper[4971]: I1127 07:17:40.607754 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea","Type":"ContainerStarted","Data":"86a75ee86a1e45fea7cc0b2bf039f079e8bc453673a092221bda14b2056c6baf"} Nov 27 07:17:40 crc kubenswrapper[4971]: I1127 07:17:40.614596 4971 generic.go:334] "Generic (PLEG): container finished" podID="71d7b8e5-06be-4a2e-8d36-dd6e97616fec" containerID="3586a41a7891cea521492885be55a9a9bd05a1cb5921d455b0e22ebf6c5e936b" exitCode=0 Nov 27 07:17:40 crc kubenswrapper[4971]: I1127 07:17:40.615983 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dglpp" event={"ID":"71d7b8e5-06be-4a2e-8d36-dd6e97616fec","Type":"ContainerDied","Data":"3586a41a7891cea521492885be55a9a9bd05a1cb5921d455b0e22ebf6c5e936b"} Nov 27 07:17:40 crc kubenswrapper[4971]: I1127 07:17:40.616017 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dglpp" event={"ID":"71d7b8e5-06be-4a2e-8d36-dd6e97616fec","Type":"ContainerStarted","Data":"8ff80b03df77b3e60d9c32dce7bcf31d634cd74af037e369783569f1de01ae60"} Nov 27 07:17:40 crc kubenswrapper[4971]: I1127 07:17:40.637638 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-669669cf59-5rwqh"] Nov 27 07:17:40 crc kubenswrapper[4971]: I1127 07:17:40.660233 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.660212872 podStartE2EDuration="2.660212872s" podCreationTimestamp="2025-11-27 07:17:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:17:40.620981124 +0000 UTC m=+1498.813025052" watchObservedRunningTime="2025-11-27 07:17:40.660212872 +0000 UTC m=+1498.852256800" Nov 27 07:17:41 crc kubenswrapper[4971]: I1127 07:17:41.627169 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dglpp" event={"ID":"71d7b8e5-06be-4a2e-8d36-dd6e97616fec","Type":"ContainerStarted","Data":"46227a133d66ed19c72b28861c349b925b200b4f43e8eb5ee9084b038610548a"} Nov 27 07:17:41 crc kubenswrapper[4971]: I1127 07:17:41.629243 4971 generic.go:334] "Generic (PLEG): container finished" podID="b3fa1872-f7d9-4531-bc33-619419f530a5" containerID="c4384c63c41c30e5aff2a9142833b3197827809310d63cf252a7f987b5fa0052" exitCode=0 Nov 27 07:17:41 crc kubenswrapper[4971]: I1127 07:17:41.629320 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-669669cf59-5rwqh" event={"ID":"b3fa1872-f7d9-4531-bc33-619419f530a5","Type":"ContainerDied","Data":"c4384c63c41c30e5aff2a9142833b3197827809310d63cf252a7f987b5fa0052"} Nov 27 07:17:41 crc kubenswrapper[4971]: I1127 07:17:41.629357 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-669669cf59-5rwqh" event={"ID":"b3fa1872-f7d9-4531-bc33-619419f530a5","Type":"ContainerStarted","Data":"36d81b94fdfead0137bb05bc0e7411297f1b21b70c1bddab11f0ae4747cb68a6"} Nov 27 07:17:42 crc kubenswrapper[4971]: I1127 07:17:42.240449 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:17:42 crc kubenswrapper[4971]: I1127 07:17:42.241823 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="76095246-397f-49df-9758-e41ef115f04e" containerName="ceilometer-central-agent" containerID="cri-o://0aa5575b639548fda0cd5e9bc080b30b926779f25a3673e3a45f9a5dfba89e65" gracePeriod=30 Nov 27 07:17:42 crc kubenswrapper[4971]: I1127 07:17:42.242101 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="76095246-397f-49df-9758-e41ef115f04e" containerName="ceilometer-notification-agent" containerID="cri-o://b476ee801887a991ec352e6d262b810d23270fe912044830298d84052a838b2d" gracePeriod=30 Nov 27 07:17:42 crc kubenswrapper[4971]: I1127 07:17:42.242103 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="76095246-397f-49df-9758-e41ef115f04e" containerName="proxy-httpd" containerID="cri-o://9056a9ec05a7c23aa18ad63623b7cf69c34e7d323c0ae3f328595b2c37f437c4" gracePeriod=30 Nov 27 07:17:42 crc kubenswrapper[4971]: I1127 07:17:42.242105 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="76095246-397f-49df-9758-e41ef115f04e" containerName="sg-core" containerID="cri-o://1480a56592f432ac9c7f7cdd1c22db97b04b9ca7ca9c7343b9a017325fd12817" gracePeriod=30 Nov 27 07:17:42 crc kubenswrapper[4971]: I1127 07:17:42.373972 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 27 07:17:42 crc kubenswrapper[4971]: I1127 07:17:42.663309 4971 generic.go:334] "Generic (PLEG): container finished" podID="71d7b8e5-06be-4a2e-8d36-dd6e97616fec" containerID="46227a133d66ed19c72b28861c349b925b200b4f43e8eb5ee9084b038610548a" exitCode=0 Nov 27 07:17:42 crc kubenswrapper[4971]: I1127 07:17:42.663459 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dglpp" event={"ID":"71d7b8e5-06be-4a2e-8d36-dd6e97616fec","Type":"ContainerDied","Data":"46227a133d66ed19c72b28861c349b925b200b4f43e8eb5ee9084b038610548a"} Nov 27 07:17:42 crc kubenswrapper[4971]: I1127 07:17:42.676650 4971 generic.go:334] "Generic (PLEG): container finished" podID="76095246-397f-49df-9758-e41ef115f04e" containerID="9056a9ec05a7c23aa18ad63623b7cf69c34e7d323c0ae3f328595b2c37f437c4" exitCode=0 Nov 27 07:17:42 crc kubenswrapper[4971]: I1127 07:17:42.676687 4971 generic.go:334] "Generic (PLEG): container finished" podID="76095246-397f-49df-9758-e41ef115f04e" containerID="1480a56592f432ac9c7f7cdd1c22db97b04b9ca7ca9c7343b9a017325fd12817" exitCode=2 Nov 27 07:17:42 crc kubenswrapper[4971]: I1127 07:17:42.676716 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"76095246-397f-49df-9758-e41ef115f04e","Type":"ContainerDied","Data":"9056a9ec05a7c23aa18ad63623b7cf69c34e7d323c0ae3f328595b2c37f437c4"} Nov 27 07:17:42 crc kubenswrapper[4971]: I1127 07:17:42.676762 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"76095246-397f-49df-9758-e41ef115f04e","Type":"ContainerDied","Data":"1480a56592f432ac9c7f7cdd1c22db97b04b9ca7ca9c7343b9a017325fd12817"} Nov 27 07:17:42 crc kubenswrapper[4971]: I1127 07:17:42.679484 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="353c1f0c-de63-416e-b2b6-49155b0bd4a5" containerName="nova-api-log" containerID="cri-o://26ca8147f93e2a817c647364a41bd1eaf9334de41e901a3f8adc56c0ee2f5b24" gracePeriod=30 Nov 27 07:17:42 crc kubenswrapper[4971]: I1127 07:17:42.680962 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="353c1f0c-de63-416e-b2b6-49155b0bd4a5" containerName="nova-api-api" containerID="cri-o://dcfac204c7989c2b0acb8b5ce4c35db6fe6acec931af78c4c25dd2ac7e8c929f" gracePeriod=30 Nov 27 07:17:42 crc kubenswrapper[4971]: I1127 07:17:42.681036 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-669669cf59-5rwqh" event={"ID":"b3fa1872-f7d9-4531-bc33-619419f530a5","Type":"ContainerStarted","Data":"8e8e087cfd5dc9b5572bd417f86679e6c4d8fa3a5d6ad123e1e7cf39a19c38ce"} Nov 27 07:17:42 crc kubenswrapper[4971]: I1127 07:17:42.681154 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-669669cf59-5rwqh" Nov 27 07:17:42 crc kubenswrapper[4971]: I1127 07:17:42.715569 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-669669cf59-5rwqh" podStartSLOduration=3.715544971 podStartE2EDuration="3.715544971s" podCreationTimestamp="2025-11-27 07:17:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:17:42.703485096 +0000 UTC m=+1500.895529034" watchObservedRunningTime="2025-11-27 07:17:42.715544971 +0000 UTC m=+1500.907588899" Nov 27 07:17:43 crc kubenswrapper[4971]: I1127 07:17:43.692303 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dglpp" event={"ID":"71d7b8e5-06be-4a2e-8d36-dd6e97616fec","Type":"ContainerStarted","Data":"4ddcbc42db2ba1064b1be23b19557092bc9f250cd522d599f52c0edf05df8c9a"} Nov 27 07:17:43 crc kubenswrapper[4971]: I1127 07:17:43.696835 4971 generic.go:334] "Generic (PLEG): container finished" podID="76095246-397f-49df-9758-e41ef115f04e" containerID="0aa5575b639548fda0cd5e9bc080b30b926779f25a3673e3a45f9a5dfba89e65" exitCode=0 Nov 27 07:17:43 crc kubenswrapper[4971]: I1127 07:17:43.696896 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"76095246-397f-49df-9758-e41ef115f04e","Type":"ContainerDied","Data":"0aa5575b639548fda0cd5e9bc080b30b926779f25a3673e3a45f9a5dfba89e65"} Nov 27 07:17:43 crc kubenswrapper[4971]: I1127 07:17:43.699792 4971 generic.go:334] "Generic (PLEG): container finished" podID="353c1f0c-de63-416e-b2b6-49155b0bd4a5" containerID="26ca8147f93e2a817c647364a41bd1eaf9334de41e901a3f8adc56c0ee2f5b24" exitCode=143 Nov 27 07:17:43 crc kubenswrapper[4971]: I1127 07:17:43.699873 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"353c1f0c-de63-416e-b2b6-49155b0bd4a5","Type":"ContainerDied","Data":"26ca8147f93e2a817c647364a41bd1eaf9334de41e901a3f8adc56c0ee2f5b24"} Nov 27 07:17:43 crc kubenswrapper[4971]: I1127 07:17:43.725715 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-dglpp" podStartSLOduration=3.1568527299999998 podStartE2EDuration="5.725698064s" podCreationTimestamp="2025-11-27 07:17:38 +0000 UTC" firstStartedPulling="2025-11-27 07:17:40.631081036 +0000 UTC m=+1498.823124954" lastFinishedPulling="2025-11-27 07:17:43.19992637 +0000 UTC m=+1501.391970288" observedRunningTime="2025-11-27 07:17:43.71996573 +0000 UTC m=+1501.912009678" watchObservedRunningTime="2025-11-27 07:17:43.725698064 +0000 UTC m=+1501.917741982" Nov 27 07:17:43 crc kubenswrapper[4971]: I1127 07:17:43.996510 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.355948 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.483424 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/353c1f0c-de63-416e-b2b6-49155b0bd4a5-logs\") pod \"353c1f0c-de63-416e-b2b6-49155b0bd4a5\" (UID: \"353c1f0c-de63-416e-b2b6-49155b0bd4a5\") " Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.483787 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/353c1f0c-de63-416e-b2b6-49155b0bd4a5-config-data\") pod \"353c1f0c-de63-416e-b2b6-49155b0bd4a5\" (UID: \"353c1f0c-de63-416e-b2b6-49155b0bd4a5\") " Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.484059 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/353c1f0c-de63-416e-b2b6-49155b0bd4a5-logs" (OuterVolumeSpecName: "logs") pod "353c1f0c-de63-416e-b2b6-49155b0bd4a5" (UID: "353c1f0c-de63-416e-b2b6-49155b0bd4a5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.484219 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/353c1f0c-de63-416e-b2b6-49155b0bd4a5-combined-ca-bundle\") pod \"353c1f0c-de63-416e-b2b6-49155b0bd4a5\" (UID: \"353c1f0c-de63-416e-b2b6-49155b0bd4a5\") " Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.484463 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lfw5p\" (UniqueName: \"kubernetes.io/projected/353c1f0c-de63-416e-b2b6-49155b0bd4a5-kube-api-access-lfw5p\") pod \"353c1f0c-de63-416e-b2b6-49155b0bd4a5\" (UID: \"353c1f0c-de63-416e-b2b6-49155b0bd4a5\") " Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.485107 4971 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/353c1f0c-de63-416e-b2b6-49155b0bd4a5-logs\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.491221 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/353c1f0c-de63-416e-b2b6-49155b0bd4a5-kube-api-access-lfw5p" (OuterVolumeSpecName: "kube-api-access-lfw5p") pod "353c1f0c-de63-416e-b2b6-49155b0bd4a5" (UID: "353c1f0c-de63-416e-b2b6-49155b0bd4a5"). InnerVolumeSpecName "kube-api-access-lfw5p". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.517482 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/353c1f0c-de63-416e-b2b6-49155b0bd4a5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "353c1f0c-de63-416e-b2b6-49155b0bd4a5" (UID: "353c1f0c-de63-416e-b2b6-49155b0bd4a5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.525067 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/353c1f0c-de63-416e-b2b6-49155b0bd4a5-config-data" (OuterVolumeSpecName: "config-data") pod "353c1f0c-de63-416e-b2b6-49155b0bd4a5" (UID: "353c1f0c-de63-416e-b2b6-49155b0bd4a5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.538338 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.593816 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lfw5p\" (UniqueName: \"kubernetes.io/projected/353c1f0c-de63-416e-b2b6-49155b0bd4a5-kube-api-access-lfw5p\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.593918 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/353c1f0c-de63-416e-b2b6-49155b0bd4a5-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.594005 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/353c1f0c-de63-416e-b2b6-49155b0bd4a5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.694940 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/76095246-397f-49df-9758-e41ef115f04e-scripts\") pod \"76095246-397f-49df-9758-e41ef115f04e\" (UID: \"76095246-397f-49df-9758-e41ef115f04e\") " Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.695313 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76095246-397f-49df-9758-e41ef115f04e-config-data\") pod \"76095246-397f-49df-9758-e41ef115f04e\" (UID: \"76095246-397f-49df-9758-e41ef115f04e\") " Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.695408 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76095246-397f-49df-9758-e41ef115f04e-combined-ca-bundle\") pod \"76095246-397f-49df-9758-e41ef115f04e\" (UID: \"76095246-397f-49df-9758-e41ef115f04e\") " Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.695566 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/76095246-397f-49df-9758-e41ef115f04e-log-httpd\") pod \"76095246-397f-49df-9758-e41ef115f04e\" (UID: \"76095246-397f-49df-9758-e41ef115f04e\") " Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.695700 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4gl8g\" (UniqueName: \"kubernetes.io/projected/76095246-397f-49df-9758-e41ef115f04e-kube-api-access-4gl8g\") pod \"76095246-397f-49df-9758-e41ef115f04e\" (UID: \"76095246-397f-49df-9758-e41ef115f04e\") " Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.695853 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/76095246-397f-49df-9758-e41ef115f04e-run-httpd\") pod \"76095246-397f-49df-9758-e41ef115f04e\" (UID: \"76095246-397f-49df-9758-e41ef115f04e\") " Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.695978 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/76095246-397f-49df-9758-e41ef115f04e-ceilometer-tls-certs\") pod \"76095246-397f-49df-9758-e41ef115f04e\" (UID: \"76095246-397f-49df-9758-e41ef115f04e\") " Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.696186 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/76095246-397f-49df-9758-e41ef115f04e-sg-core-conf-yaml\") pod \"76095246-397f-49df-9758-e41ef115f04e\" (UID: \"76095246-397f-49df-9758-e41ef115f04e\") " Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.696574 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/76095246-397f-49df-9758-e41ef115f04e-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "76095246-397f-49df-9758-e41ef115f04e" (UID: "76095246-397f-49df-9758-e41ef115f04e"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.696928 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/76095246-397f-49df-9758-e41ef115f04e-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "76095246-397f-49df-9758-e41ef115f04e" (UID: "76095246-397f-49df-9758-e41ef115f04e"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.697105 4971 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/76095246-397f-49df-9758-e41ef115f04e-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.697186 4971 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/76095246-397f-49df-9758-e41ef115f04e-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.698965 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76095246-397f-49df-9758-e41ef115f04e-scripts" (OuterVolumeSpecName: "scripts") pod "76095246-397f-49df-9758-e41ef115f04e" (UID: "76095246-397f-49df-9758-e41ef115f04e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.704305 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76095246-397f-49df-9758-e41ef115f04e-kube-api-access-4gl8g" (OuterVolumeSpecName: "kube-api-access-4gl8g") pod "76095246-397f-49df-9758-e41ef115f04e" (UID: "76095246-397f-49df-9758-e41ef115f04e"). InnerVolumeSpecName "kube-api-access-4gl8g". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:17:46 crc kubenswrapper[4971]: E1127 07:17:46.713360 4971 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod353c1f0c_de63_416e_b2b6_49155b0bd4a5.slice\": RecentStats: unable to find data in memory cache]" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.728739 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76095246-397f-49df-9758-e41ef115f04e-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "76095246-397f-49df-9758-e41ef115f04e" (UID: "76095246-397f-49df-9758-e41ef115f04e"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.733842 4971 generic.go:334] "Generic (PLEG): container finished" podID="76095246-397f-49df-9758-e41ef115f04e" containerID="b476ee801887a991ec352e6d262b810d23270fe912044830298d84052a838b2d" exitCode=0 Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.733922 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"76095246-397f-49df-9758-e41ef115f04e","Type":"ContainerDied","Data":"b476ee801887a991ec352e6d262b810d23270fe912044830298d84052a838b2d"} Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.733957 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"76095246-397f-49df-9758-e41ef115f04e","Type":"ContainerDied","Data":"5ef49813369074fb3b7cc4c248b5d6fdb4e8668cbaaa888aba7e7e00a0d24fe9"} Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.733978 4971 scope.go:117] "RemoveContainer" containerID="9056a9ec05a7c23aa18ad63623b7cf69c34e7d323c0ae3f328595b2c37f437c4" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.734145 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.740482 4971 generic.go:334] "Generic (PLEG): container finished" podID="353c1f0c-de63-416e-b2b6-49155b0bd4a5" containerID="dcfac204c7989c2b0acb8b5ce4c35db6fe6acec931af78c4c25dd2ac7e8c929f" exitCode=0 Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.740544 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"353c1f0c-de63-416e-b2b6-49155b0bd4a5","Type":"ContainerDied","Data":"dcfac204c7989c2b0acb8b5ce4c35db6fe6acec931af78c4c25dd2ac7e8c929f"} Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.740570 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"353c1f0c-de63-416e-b2b6-49155b0bd4a5","Type":"ContainerDied","Data":"d04569fc0bcd41add1fde2b23bd0d7f2e967fb7f8637d0a710af73767ddc6658"} Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.740674 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.773489 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.773575 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76095246-397f-49df-9758-e41ef115f04e-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "76095246-397f-49df-9758-e41ef115f04e" (UID: "76095246-397f-49df-9758-e41ef115f04e"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.777672 4971 scope.go:117] "RemoveContainer" containerID="1480a56592f432ac9c7f7cdd1c22db97b04b9ca7ca9c7343b9a017325fd12817" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.800314 4971 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/76095246-397f-49df-9758-e41ef115f04e-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.800350 4971 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/76095246-397f-49df-9758-e41ef115f04e-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.800364 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/76095246-397f-49df-9758-e41ef115f04e-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.800377 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4gl8g\" (UniqueName: \"kubernetes.io/projected/76095246-397f-49df-9758-e41ef115f04e-kube-api-access-4gl8g\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.805617 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76095246-397f-49df-9758-e41ef115f04e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "76095246-397f-49df-9758-e41ef115f04e" (UID: "76095246-397f-49df-9758-e41ef115f04e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.819821 4971 scope.go:117] "RemoveContainer" containerID="b476ee801887a991ec352e6d262b810d23270fe912044830298d84052a838b2d" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.819970 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.834670 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 27 07:17:46 crc kubenswrapper[4971]: E1127 07:17:46.835266 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="353c1f0c-de63-416e-b2b6-49155b0bd4a5" containerName="nova-api-log" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.835294 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="353c1f0c-de63-416e-b2b6-49155b0bd4a5" containerName="nova-api-log" Nov 27 07:17:46 crc kubenswrapper[4971]: E1127 07:17:46.835322 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76095246-397f-49df-9758-e41ef115f04e" containerName="ceilometer-central-agent" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.835331 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="76095246-397f-49df-9758-e41ef115f04e" containerName="ceilometer-central-agent" Nov 27 07:17:46 crc kubenswrapper[4971]: E1127 07:17:46.835350 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76095246-397f-49df-9758-e41ef115f04e" containerName="sg-core" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.835359 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="76095246-397f-49df-9758-e41ef115f04e" containerName="sg-core" Nov 27 07:17:46 crc kubenswrapper[4971]: E1127 07:17:46.835374 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76095246-397f-49df-9758-e41ef115f04e" containerName="proxy-httpd" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.835382 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="76095246-397f-49df-9758-e41ef115f04e" containerName="proxy-httpd" Nov 27 07:17:46 crc kubenswrapper[4971]: E1127 07:17:46.835424 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="353c1f0c-de63-416e-b2b6-49155b0bd4a5" containerName="nova-api-api" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.835434 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="353c1f0c-de63-416e-b2b6-49155b0bd4a5" containerName="nova-api-api" Nov 27 07:17:46 crc kubenswrapper[4971]: E1127 07:17:46.835452 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76095246-397f-49df-9758-e41ef115f04e" containerName="ceilometer-notification-agent" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.835462 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="76095246-397f-49df-9758-e41ef115f04e" containerName="ceilometer-notification-agent" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.835711 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="76095246-397f-49df-9758-e41ef115f04e" containerName="proxy-httpd" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.835736 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="353c1f0c-de63-416e-b2b6-49155b0bd4a5" containerName="nova-api-api" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.835756 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="76095246-397f-49df-9758-e41ef115f04e" containerName="ceilometer-notification-agent" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.835776 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="353c1f0c-de63-416e-b2b6-49155b0bd4a5" containerName="nova-api-log" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.835788 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="76095246-397f-49df-9758-e41ef115f04e" containerName="sg-core" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.835804 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="76095246-397f-49df-9758-e41ef115f04e" containerName="ceilometer-central-agent" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.837264 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.840660 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.841471 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.841745 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.859781 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.863957 4971 scope.go:117] "RemoveContainer" containerID="0aa5575b639548fda0cd5e9bc080b30b926779f25a3673e3a45f9a5dfba89e65" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.864305 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76095246-397f-49df-9758-e41ef115f04e-config-data" (OuterVolumeSpecName: "config-data") pod "76095246-397f-49df-9758-e41ef115f04e" (UID: "76095246-397f-49df-9758-e41ef115f04e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.890455 4971 scope.go:117] "RemoveContainer" containerID="9056a9ec05a7c23aa18ad63623b7cf69c34e7d323c0ae3f328595b2c37f437c4" Nov 27 07:17:46 crc kubenswrapper[4971]: E1127 07:17:46.891042 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9056a9ec05a7c23aa18ad63623b7cf69c34e7d323c0ae3f328595b2c37f437c4\": container with ID starting with 9056a9ec05a7c23aa18ad63623b7cf69c34e7d323c0ae3f328595b2c37f437c4 not found: ID does not exist" containerID="9056a9ec05a7c23aa18ad63623b7cf69c34e7d323c0ae3f328595b2c37f437c4" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.891087 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9056a9ec05a7c23aa18ad63623b7cf69c34e7d323c0ae3f328595b2c37f437c4"} err="failed to get container status \"9056a9ec05a7c23aa18ad63623b7cf69c34e7d323c0ae3f328595b2c37f437c4\": rpc error: code = NotFound desc = could not find container \"9056a9ec05a7c23aa18ad63623b7cf69c34e7d323c0ae3f328595b2c37f437c4\": container with ID starting with 9056a9ec05a7c23aa18ad63623b7cf69c34e7d323c0ae3f328595b2c37f437c4 not found: ID does not exist" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.891116 4971 scope.go:117] "RemoveContainer" containerID="1480a56592f432ac9c7f7cdd1c22db97b04b9ca7ca9c7343b9a017325fd12817" Nov 27 07:17:46 crc kubenswrapper[4971]: E1127 07:17:46.891493 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1480a56592f432ac9c7f7cdd1c22db97b04b9ca7ca9c7343b9a017325fd12817\": container with ID starting with 1480a56592f432ac9c7f7cdd1c22db97b04b9ca7ca9c7343b9a017325fd12817 not found: ID does not exist" containerID="1480a56592f432ac9c7f7cdd1c22db97b04b9ca7ca9c7343b9a017325fd12817" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.891530 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1480a56592f432ac9c7f7cdd1c22db97b04b9ca7ca9c7343b9a017325fd12817"} err="failed to get container status \"1480a56592f432ac9c7f7cdd1c22db97b04b9ca7ca9c7343b9a017325fd12817\": rpc error: code = NotFound desc = could not find container \"1480a56592f432ac9c7f7cdd1c22db97b04b9ca7ca9c7343b9a017325fd12817\": container with ID starting with 1480a56592f432ac9c7f7cdd1c22db97b04b9ca7ca9c7343b9a017325fd12817 not found: ID does not exist" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.891616 4971 scope.go:117] "RemoveContainer" containerID="b476ee801887a991ec352e6d262b810d23270fe912044830298d84052a838b2d" Nov 27 07:17:46 crc kubenswrapper[4971]: E1127 07:17:46.892012 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b476ee801887a991ec352e6d262b810d23270fe912044830298d84052a838b2d\": container with ID starting with b476ee801887a991ec352e6d262b810d23270fe912044830298d84052a838b2d not found: ID does not exist" containerID="b476ee801887a991ec352e6d262b810d23270fe912044830298d84052a838b2d" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.892069 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b476ee801887a991ec352e6d262b810d23270fe912044830298d84052a838b2d"} err="failed to get container status \"b476ee801887a991ec352e6d262b810d23270fe912044830298d84052a838b2d\": rpc error: code = NotFound desc = could not find container \"b476ee801887a991ec352e6d262b810d23270fe912044830298d84052a838b2d\": container with ID starting with b476ee801887a991ec352e6d262b810d23270fe912044830298d84052a838b2d not found: ID does not exist" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.892107 4971 scope.go:117] "RemoveContainer" containerID="0aa5575b639548fda0cd5e9bc080b30b926779f25a3673e3a45f9a5dfba89e65" Nov 27 07:17:46 crc kubenswrapper[4971]: E1127 07:17:46.892448 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0aa5575b639548fda0cd5e9bc080b30b926779f25a3673e3a45f9a5dfba89e65\": container with ID starting with 0aa5575b639548fda0cd5e9bc080b30b926779f25a3673e3a45f9a5dfba89e65 not found: ID does not exist" containerID="0aa5575b639548fda0cd5e9bc080b30b926779f25a3673e3a45f9a5dfba89e65" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.892476 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0aa5575b639548fda0cd5e9bc080b30b926779f25a3673e3a45f9a5dfba89e65"} err="failed to get container status \"0aa5575b639548fda0cd5e9bc080b30b926779f25a3673e3a45f9a5dfba89e65\": rpc error: code = NotFound desc = could not find container \"0aa5575b639548fda0cd5e9bc080b30b926779f25a3673e3a45f9a5dfba89e65\": container with ID starting with 0aa5575b639548fda0cd5e9bc080b30b926779f25a3673e3a45f9a5dfba89e65 not found: ID does not exist" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.892490 4971 scope.go:117] "RemoveContainer" containerID="dcfac204c7989c2b0acb8b5ce4c35db6fe6acec931af78c4c25dd2ac7e8c929f" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.901790 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0a02759-f505-4e1c-ad20-d7c2b7bd4a03-config-data\") pod \"nova-api-0\" (UID: \"c0a02759-f505-4e1c-ad20-d7c2b7bd4a03\") " pod="openstack/nova-api-0" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.901842 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0a02759-f505-4e1c-ad20-d7c2b7bd4a03-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c0a02759-f505-4e1c-ad20-d7c2b7bd4a03\") " pod="openstack/nova-api-0" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.901917 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0a02759-f505-4e1c-ad20-d7c2b7bd4a03-public-tls-certs\") pod \"nova-api-0\" (UID: \"c0a02759-f505-4e1c-ad20-d7c2b7bd4a03\") " pod="openstack/nova-api-0" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.901971 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0a02759-f505-4e1c-ad20-d7c2b7bd4a03-logs\") pod \"nova-api-0\" (UID: \"c0a02759-f505-4e1c-ad20-d7c2b7bd4a03\") " pod="openstack/nova-api-0" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.902025 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0a02759-f505-4e1c-ad20-d7c2b7bd4a03-internal-tls-certs\") pod \"nova-api-0\" (UID: \"c0a02759-f505-4e1c-ad20-d7c2b7bd4a03\") " pod="openstack/nova-api-0" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.902047 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-562kx\" (UniqueName: \"kubernetes.io/projected/c0a02759-f505-4e1c-ad20-d7c2b7bd4a03-kube-api-access-562kx\") pod \"nova-api-0\" (UID: \"c0a02759-f505-4e1c-ad20-d7c2b7bd4a03\") " pod="openstack/nova-api-0" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.902107 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76095246-397f-49df-9758-e41ef115f04e-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.902119 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76095246-397f-49df-9758-e41ef115f04e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:46 crc kubenswrapper[4971]: I1127 07:17:46.975948 4971 scope.go:117] "RemoveContainer" containerID="26ca8147f93e2a817c647364a41bd1eaf9334de41e901a3f8adc56c0ee2f5b24" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.002198 4971 scope.go:117] "RemoveContainer" containerID="dcfac204c7989c2b0acb8b5ce4c35db6fe6acec931af78c4c25dd2ac7e8c929f" Nov 27 07:17:47 crc kubenswrapper[4971]: E1127 07:17:47.002705 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dcfac204c7989c2b0acb8b5ce4c35db6fe6acec931af78c4c25dd2ac7e8c929f\": container with ID starting with dcfac204c7989c2b0acb8b5ce4c35db6fe6acec931af78c4c25dd2ac7e8c929f not found: ID does not exist" containerID="dcfac204c7989c2b0acb8b5ce4c35db6fe6acec931af78c4c25dd2ac7e8c929f" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.002737 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dcfac204c7989c2b0acb8b5ce4c35db6fe6acec931af78c4c25dd2ac7e8c929f"} err="failed to get container status \"dcfac204c7989c2b0acb8b5ce4c35db6fe6acec931af78c4c25dd2ac7e8c929f\": rpc error: code = NotFound desc = could not find container \"dcfac204c7989c2b0acb8b5ce4c35db6fe6acec931af78c4c25dd2ac7e8c929f\": container with ID starting with dcfac204c7989c2b0acb8b5ce4c35db6fe6acec931af78c4c25dd2ac7e8c929f not found: ID does not exist" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.002769 4971 scope.go:117] "RemoveContainer" containerID="26ca8147f93e2a817c647364a41bd1eaf9334de41e901a3f8adc56c0ee2f5b24" Nov 27 07:17:47 crc kubenswrapper[4971]: E1127 07:17:47.003130 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"26ca8147f93e2a817c647364a41bd1eaf9334de41e901a3f8adc56c0ee2f5b24\": container with ID starting with 26ca8147f93e2a817c647364a41bd1eaf9334de41e901a3f8adc56c0ee2f5b24 not found: ID does not exist" containerID="26ca8147f93e2a817c647364a41bd1eaf9334de41e901a3f8adc56c0ee2f5b24" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.003172 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"26ca8147f93e2a817c647364a41bd1eaf9334de41e901a3f8adc56c0ee2f5b24"} err="failed to get container status \"26ca8147f93e2a817c647364a41bd1eaf9334de41e901a3f8adc56c0ee2f5b24\": rpc error: code = NotFound desc = could not find container \"26ca8147f93e2a817c647364a41bd1eaf9334de41e901a3f8adc56c0ee2f5b24\": container with ID starting with 26ca8147f93e2a817c647364a41bd1eaf9334de41e901a3f8adc56c0ee2f5b24 not found: ID does not exist" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.004235 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0a02759-f505-4e1c-ad20-d7c2b7bd4a03-config-data\") pod \"nova-api-0\" (UID: \"c0a02759-f505-4e1c-ad20-d7c2b7bd4a03\") " pod="openstack/nova-api-0" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.004457 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0a02759-f505-4e1c-ad20-d7c2b7bd4a03-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c0a02759-f505-4e1c-ad20-d7c2b7bd4a03\") " pod="openstack/nova-api-0" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.004673 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0a02759-f505-4e1c-ad20-d7c2b7bd4a03-public-tls-certs\") pod \"nova-api-0\" (UID: \"c0a02759-f505-4e1c-ad20-d7c2b7bd4a03\") " pod="openstack/nova-api-0" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.004841 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0a02759-f505-4e1c-ad20-d7c2b7bd4a03-logs\") pod \"nova-api-0\" (UID: \"c0a02759-f505-4e1c-ad20-d7c2b7bd4a03\") " pod="openstack/nova-api-0" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.005038 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0a02759-f505-4e1c-ad20-d7c2b7bd4a03-internal-tls-certs\") pod \"nova-api-0\" (UID: \"c0a02759-f505-4e1c-ad20-d7c2b7bd4a03\") " pod="openstack/nova-api-0" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.005167 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-562kx\" (UniqueName: \"kubernetes.io/projected/c0a02759-f505-4e1c-ad20-d7c2b7bd4a03-kube-api-access-562kx\") pod \"nova-api-0\" (UID: \"c0a02759-f505-4e1c-ad20-d7c2b7bd4a03\") " pod="openstack/nova-api-0" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.005219 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0a02759-f505-4e1c-ad20-d7c2b7bd4a03-logs\") pod \"nova-api-0\" (UID: \"c0a02759-f505-4e1c-ad20-d7c2b7bd4a03\") " pod="openstack/nova-api-0" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.008637 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0a02759-f505-4e1c-ad20-d7c2b7bd4a03-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c0a02759-f505-4e1c-ad20-d7c2b7bd4a03\") " pod="openstack/nova-api-0" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.009261 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0a02759-f505-4e1c-ad20-d7c2b7bd4a03-public-tls-certs\") pod \"nova-api-0\" (UID: \"c0a02759-f505-4e1c-ad20-d7c2b7bd4a03\") " pod="openstack/nova-api-0" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.010619 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0a02759-f505-4e1c-ad20-d7c2b7bd4a03-internal-tls-certs\") pod \"nova-api-0\" (UID: \"c0a02759-f505-4e1c-ad20-d7c2b7bd4a03\") " pod="openstack/nova-api-0" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.010624 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0a02759-f505-4e1c-ad20-d7c2b7bd4a03-config-data\") pod \"nova-api-0\" (UID: \"c0a02759-f505-4e1c-ad20-d7c2b7bd4a03\") " pod="openstack/nova-api-0" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.027418 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-562kx\" (UniqueName: \"kubernetes.io/projected/c0a02759-f505-4e1c-ad20-d7c2b7bd4a03-kube-api-access-562kx\") pod \"nova-api-0\" (UID: \"c0a02759-f505-4e1c-ad20-d7c2b7bd4a03\") " pod="openstack/nova-api-0" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.133977 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.147115 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.162614 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.165501 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.168640 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.168877 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.169012 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.169103 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.187560 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.309328 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/41a3ef31-df5e-4cb9-8983-40a16f46823c-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"41a3ef31-df5e-4cb9-8983-40a16f46823c\") " pod="openstack/ceilometer-0" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.309699 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41a3ef31-df5e-4cb9-8983-40a16f46823c-scripts\") pod \"ceilometer-0\" (UID: \"41a3ef31-df5e-4cb9-8983-40a16f46823c\") " pod="openstack/ceilometer-0" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.309814 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xbft4\" (UniqueName: \"kubernetes.io/projected/41a3ef31-df5e-4cb9-8983-40a16f46823c-kube-api-access-xbft4\") pod \"ceilometer-0\" (UID: \"41a3ef31-df5e-4cb9-8983-40a16f46823c\") " pod="openstack/ceilometer-0" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.310287 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/41a3ef31-df5e-4cb9-8983-40a16f46823c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"41a3ef31-df5e-4cb9-8983-40a16f46823c\") " pod="openstack/ceilometer-0" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.310331 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41a3ef31-df5e-4cb9-8983-40a16f46823c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"41a3ef31-df5e-4cb9-8983-40a16f46823c\") " pod="openstack/ceilometer-0" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.310415 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41a3ef31-df5e-4cb9-8983-40a16f46823c-run-httpd\") pod \"ceilometer-0\" (UID: \"41a3ef31-df5e-4cb9-8983-40a16f46823c\") " pod="openstack/ceilometer-0" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.310467 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41a3ef31-df5e-4cb9-8983-40a16f46823c-config-data\") pod \"ceilometer-0\" (UID: \"41a3ef31-df5e-4cb9-8983-40a16f46823c\") " pod="openstack/ceilometer-0" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.311368 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41a3ef31-df5e-4cb9-8983-40a16f46823c-log-httpd\") pod \"ceilometer-0\" (UID: \"41a3ef31-df5e-4cb9-8983-40a16f46823c\") " pod="openstack/ceilometer-0" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.412652 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41a3ef31-df5e-4cb9-8983-40a16f46823c-scripts\") pod \"ceilometer-0\" (UID: \"41a3ef31-df5e-4cb9-8983-40a16f46823c\") " pod="openstack/ceilometer-0" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.412715 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xbft4\" (UniqueName: \"kubernetes.io/projected/41a3ef31-df5e-4cb9-8983-40a16f46823c-kube-api-access-xbft4\") pod \"ceilometer-0\" (UID: \"41a3ef31-df5e-4cb9-8983-40a16f46823c\") " pod="openstack/ceilometer-0" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.412741 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/41a3ef31-df5e-4cb9-8983-40a16f46823c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"41a3ef31-df5e-4cb9-8983-40a16f46823c\") " pod="openstack/ceilometer-0" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.412758 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41a3ef31-df5e-4cb9-8983-40a16f46823c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"41a3ef31-df5e-4cb9-8983-40a16f46823c\") " pod="openstack/ceilometer-0" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.412805 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41a3ef31-df5e-4cb9-8983-40a16f46823c-run-httpd\") pod \"ceilometer-0\" (UID: \"41a3ef31-df5e-4cb9-8983-40a16f46823c\") " pod="openstack/ceilometer-0" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.412838 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41a3ef31-df5e-4cb9-8983-40a16f46823c-config-data\") pod \"ceilometer-0\" (UID: \"41a3ef31-df5e-4cb9-8983-40a16f46823c\") " pod="openstack/ceilometer-0" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.412896 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41a3ef31-df5e-4cb9-8983-40a16f46823c-log-httpd\") pod \"ceilometer-0\" (UID: \"41a3ef31-df5e-4cb9-8983-40a16f46823c\") " pod="openstack/ceilometer-0" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.412934 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/41a3ef31-df5e-4cb9-8983-40a16f46823c-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"41a3ef31-df5e-4cb9-8983-40a16f46823c\") " pod="openstack/ceilometer-0" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.415054 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41a3ef31-df5e-4cb9-8983-40a16f46823c-run-httpd\") pod \"ceilometer-0\" (UID: \"41a3ef31-df5e-4cb9-8983-40a16f46823c\") " pod="openstack/ceilometer-0" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.415276 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41a3ef31-df5e-4cb9-8983-40a16f46823c-log-httpd\") pod \"ceilometer-0\" (UID: \"41a3ef31-df5e-4cb9-8983-40a16f46823c\") " pod="openstack/ceilometer-0" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.419742 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/41a3ef31-df5e-4cb9-8983-40a16f46823c-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"41a3ef31-df5e-4cb9-8983-40a16f46823c\") " pod="openstack/ceilometer-0" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.420441 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41a3ef31-df5e-4cb9-8983-40a16f46823c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"41a3ef31-df5e-4cb9-8983-40a16f46823c\") " pod="openstack/ceilometer-0" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.424827 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41a3ef31-df5e-4cb9-8983-40a16f46823c-config-data\") pod \"ceilometer-0\" (UID: \"41a3ef31-df5e-4cb9-8983-40a16f46823c\") " pod="openstack/ceilometer-0" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.428632 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/41a3ef31-df5e-4cb9-8983-40a16f46823c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"41a3ef31-df5e-4cb9-8983-40a16f46823c\") " pod="openstack/ceilometer-0" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.430695 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41a3ef31-df5e-4cb9-8983-40a16f46823c-scripts\") pod \"ceilometer-0\" (UID: \"41a3ef31-df5e-4cb9-8983-40a16f46823c\") " pod="openstack/ceilometer-0" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.432320 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xbft4\" (UniqueName: \"kubernetes.io/projected/41a3ef31-df5e-4cb9-8983-40a16f46823c-kube-api-access-xbft4\") pod \"ceilometer-0\" (UID: \"41a3ef31-df5e-4cb9-8983-40a16f46823c\") " pod="openstack/ceilometer-0" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.608552 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.632313 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 27 07:17:47 crc kubenswrapper[4971]: W1127 07:17:47.637112 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc0a02759_f505_4e1c_ad20_d7c2b7bd4a03.slice/crio-ba811350450f4ee87808db4f9ff8dbaed68e2095a8a7540d0524441241de91b7 WatchSource:0}: Error finding container ba811350450f4ee87808db4f9ff8dbaed68e2095a8a7540d0524441241de91b7: Status 404 returned error can't find the container with id ba811350450f4ee87808db4f9ff8dbaed68e2095a8a7540d0524441241de91b7 Nov 27 07:17:47 crc kubenswrapper[4971]: I1127 07:17:47.751944 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c0a02759-f505-4e1c-ad20-d7c2b7bd4a03","Type":"ContainerStarted","Data":"ba811350450f4ee87808db4f9ff8dbaed68e2095a8a7540d0524441241de91b7"} Nov 27 07:17:48 crc kubenswrapper[4971]: I1127 07:17:48.065898 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:17:48 crc kubenswrapper[4971]: I1127 07:17:48.504828 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-k7z68"] Nov 27 07:17:48 crc kubenswrapper[4971]: I1127 07:17:48.507570 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k7z68" Nov 27 07:17:48 crc kubenswrapper[4971]: I1127 07:17:48.612616 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="353c1f0c-de63-416e-b2b6-49155b0bd4a5" path="/var/lib/kubelet/pods/353c1f0c-de63-416e-b2b6-49155b0bd4a5/volumes" Nov 27 07:17:48 crc kubenswrapper[4971]: I1127 07:17:48.614209 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76095246-397f-49df-9758-e41ef115f04e" path="/var/lib/kubelet/pods/76095246-397f-49df-9758-e41ef115f04e/volumes" Nov 27 07:17:48 crc kubenswrapper[4971]: I1127 07:17:48.615589 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-k7z68"] Nov 27 07:17:48 crc kubenswrapper[4971]: I1127 07:17:48.645748 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8gqt\" (UniqueName: \"kubernetes.io/projected/ce5e91d8-51b9-4c86-85aa-3d78b68c2e23-kube-api-access-w8gqt\") pod \"redhat-operators-k7z68\" (UID: \"ce5e91d8-51b9-4c86-85aa-3d78b68c2e23\") " pod="openshift-marketplace/redhat-operators-k7z68" Nov 27 07:17:48 crc kubenswrapper[4971]: I1127 07:17:48.645876 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce5e91d8-51b9-4c86-85aa-3d78b68c2e23-catalog-content\") pod \"redhat-operators-k7z68\" (UID: \"ce5e91d8-51b9-4c86-85aa-3d78b68c2e23\") " pod="openshift-marketplace/redhat-operators-k7z68" Nov 27 07:17:48 crc kubenswrapper[4971]: I1127 07:17:48.645905 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce5e91d8-51b9-4c86-85aa-3d78b68c2e23-utilities\") pod \"redhat-operators-k7z68\" (UID: \"ce5e91d8-51b9-4c86-85aa-3d78b68c2e23\") " pod="openshift-marketplace/redhat-operators-k7z68" Nov 27 07:17:48 crc kubenswrapper[4971]: I1127 07:17:48.747551 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce5e91d8-51b9-4c86-85aa-3d78b68c2e23-catalog-content\") pod \"redhat-operators-k7z68\" (UID: \"ce5e91d8-51b9-4c86-85aa-3d78b68c2e23\") " pod="openshift-marketplace/redhat-operators-k7z68" Nov 27 07:17:48 crc kubenswrapper[4971]: I1127 07:17:48.748171 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce5e91d8-51b9-4c86-85aa-3d78b68c2e23-utilities\") pod \"redhat-operators-k7z68\" (UID: \"ce5e91d8-51b9-4c86-85aa-3d78b68c2e23\") " pod="openshift-marketplace/redhat-operators-k7z68" Nov 27 07:17:48 crc kubenswrapper[4971]: I1127 07:17:48.748297 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8gqt\" (UniqueName: \"kubernetes.io/projected/ce5e91d8-51b9-4c86-85aa-3d78b68c2e23-kube-api-access-w8gqt\") pod \"redhat-operators-k7z68\" (UID: \"ce5e91d8-51b9-4c86-85aa-3d78b68c2e23\") " pod="openshift-marketplace/redhat-operators-k7z68" Nov 27 07:17:48 crc kubenswrapper[4971]: I1127 07:17:48.749571 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce5e91d8-51b9-4c86-85aa-3d78b68c2e23-catalog-content\") pod \"redhat-operators-k7z68\" (UID: \"ce5e91d8-51b9-4c86-85aa-3d78b68c2e23\") " pod="openshift-marketplace/redhat-operators-k7z68" Nov 27 07:17:48 crc kubenswrapper[4971]: I1127 07:17:48.749947 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce5e91d8-51b9-4c86-85aa-3d78b68c2e23-utilities\") pod \"redhat-operators-k7z68\" (UID: \"ce5e91d8-51b9-4c86-85aa-3d78b68c2e23\") " pod="openshift-marketplace/redhat-operators-k7z68" Nov 27 07:17:48 crc kubenswrapper[4971]: I1127 07:17:48.773348 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41a3ef31-df5e-4cb9-8983-40a16f46823c","Type":"ContainerStarted","Data":"dcad59636dc1a6d93f37718d2ad05b92be2d5dd4fe4fa7c3cd8a3c37b02610ec"} Nov 27 07:17:48 crc kubenswrapper[4971]: I1127 07:17:48.776505 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8gqt\" (UniqueName: \"kubernetes.io/projected/ce5e91d8-51b9-4c86-85aa-3d78b68c2e23-kube-api-access-w8gqt\") pod \"redhat-operators-k7z68\" (UID: \"ce5e91d8-51b9-4c86-85aa-3d78b68c2e23\") " pod="openshift-marketplace/redhat-operators-k7z68" Nov 27 07:17:48 crc kubenswrapper[4971]: I1127 07:17:48.776599 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c0a02759-f505-4e1c-ad20-d7c2b7bd4a03","Type":"ContainerStarted","Data":"acd1b3bb060a3329f6548f899ba6d5bbf30135b64dfa6a4291b324203fb94aa7"} Nov 27 07:17:48 crc kubenswrapper[4971]: I1127 07:17:48.776641 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c0a02759-f505-4e1c-ad20-d7c2b7bd4a03","Type":"ContainerStarted","Data":"26043e9a97f70a4db001fd498f0f7bf0182a5702409fc73df98877e2bf3b8b5d"} Nov 27 07:17:48 crc kubenswrapper[4971]: I1127 07:17:48.801038 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.801018569 podStartE2EDuration="2.801018569s" podCreationTimestamp="2025-11-27 07:17:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:17:48.796805315 +0000 UTC m=+1506.988849243" watchObservedRunningTime="2025-11-27 07:17:48.801018569 +0000 UTC m=+1506.993062487" Nov 27 07:17:48 crc kubenswrapper[4971]: I1127 07:17:48.837493 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k7z68" Nov 27 07:17:48 crc kubenswrapper[4971]: I1127 07:17:48.997487 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Nov 27 07:17:49 crc kubenswrapper[4971]: I1127 07:17:49.027023 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Nov 27 07:17:49 crc kubenswrapper[4971]: I1127 07:17:49.072694 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-dglpp" Nov 27 07:17:49 crc kubenswrapper[4971]: I1127 07:17:49.074025 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-dglpp" Nov 27 07:17:49 crc kubenswrapper[4971]: I1127 07:17:49.186397 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-dglpp" Nov 27 07:17:49 crc kubenswrapper[4971]: I1127 07:17:49.237834 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-k7z68"] Nov 27 07:17:49 crc kubenswrapper[4971]: I1127 07:17:49.787502 4971 generic.go:334] "Generic (PLEG): container finished" podID="ce5e91d8-51b9-4c86-85aa-3d78b68c2e23" containerID="c204b0c7313f8addf18fb4df4db55222a083f76eddd95340d8ff4a9ee468f9fe" exitCode=0 Nov 27 07:17:49 crc kubenswrapper[4971]: I1127 07:17:49.787592 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k7z68" event={"ID":"ce5e91d8-51b9-4c86-85aa-3d78b68c2e23","Type":"ContainerDied","Data":"c204b0c7313f8addf18fb4df4db55222a083f76eddd95340d8ff4a9ee468f9fe"} Nov 27 07:17:49 crc kubenswrapper[4971]: I1127 07:17:49.788992 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k7z68" event={"ID":"ce5e91d8-51b9-4c86-85aa-3d78b68c2e23","Type":"ContainerStarted","Data":"a27e4dc636c997bc3820f4e772856f9f9241daebde4aba60e9fcd1ecc51f9d8d"} Nov 27 07:17:49 crc kubenswrapper[4971]: I1127 07:17:49.791733 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41a3ef31-df5e-4cb9-8983-40a16f46823c","Type":"ContainerStarted","Data":"268b7b5e3e099e59f58680414492d2b607e8bfc3bf0455ac64792b601ae0a545"} Nov 27 07:17:49 crc kubenswrapper[4971]: I1127 07:17:49.791780 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41a3ef31-df5e-4cb9-8983-40a16f46823c","Type":"ContainerStarted","Data":"e42e5fec5c109e2ea0c69c634a81c7905e5cc3f5aa9cfe275d500939cb600b2b"} Nov 27 07:17:49 crc kubenswrapper[4971]: I1127 07:17:49.812139 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Nov 27 07:17:49 crc kubenswrapper[4971]: I1127 07:17:49.880132 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-dglpp" Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.030283 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-pkkk2"] Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.043868 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-pkkk2"] Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.043986 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-pkkk2" Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.049955 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.049972 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.104364 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-669669cf59-5rwqh" Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.179886 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-648d5566bc-jrbwx"] Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.180203 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-648d5566bc-jrbwx" podUID="b03fc497-c5af-4176-9127-f0f1103aecca" containerName="dnsmasq-dns" containerID="cri-o://4ce78a43f7b9babd3c1c69d0e1c4e4f2fe1fede1d641aa46c89646e8677c7552" gracePeriod=10 Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.193336 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/457ab172-18f9-4aa4-85b8-eae591089ad7-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-pkkk2\" (UID: \"457ab172-18f9-4aa4-85b8-eae591089ad7\") " pod="openstack/nova-cell1-cell-mapping-pkkk2" Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.193447 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/457ab172-18f9-4aa4-85b8-eae591089ad7-scripts\") pod \"nova-cell1-cell-mapping-pkkk2\" (UID: \"457ab172-18f9-4aa4-85b8-eae591089ad7\") " pod="openstack/nova-cell1-cell-mapping-pkkk2" Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.193512 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/457ab172-18f9-4aa4-85b8-eae591089ad7-config-data\") pod \"nova-cell1-cell-mapping-pkkk2\" (UID: \"457ab172-18f9-4aa4-85b8-eae591089ad7\") " pod="openstack/nova-cell1-cell-mapping-pkkk2" Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.193562 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jnvl\" (UniqueName: \"kubernetes.io/projected/457ab172-18f9-4aa4-85b8-eae591089ad7-kube-api-access-7jnvl\") pod \"nova-cell1-cell-mapping-pkkk2\" (UID: \"457ab172-18f9-4aa4-85b8-eae591089ad7\") " pod="openstack/nova-cell1-cell-mapping-pkkk2" Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.295305 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/457ab172-18f9-4aa4-85b8-eae591089ad7-scripts\") pod \"nova-cell1-cell-mapping-pkkk2\" (UID: \"457ab172-18f9-4aa4-85b8-eae591089ad7\") " pod="openstack/nova-cell1-cell-mapping-pkkk2" Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.295467 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/457ab172-18f9-4aa4-85b8-eae591089ad7-config-data\") pod \"nova-cell1-cell-mapping-pkkk2\" (UID: \"457ab172-18f9-4aa4-85b8-eae591089ad7\") " pod="openstack/nova-cell1-cell-mapping-pkkk2" Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.295507 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jnvl\" (UniqueName: \"kubernetes.io/projected/457ab172-18f9-4aa4-85b8-eae591089ad7-kube-api-access-7jnvl\") pod \"nova-cell1-cell-mapping-pkkk2\" (UID: \"457ab172-18f9-4aa4-85b8-eae591089ad7\") " pod="openstack/nova-cell1-cell-mapping-pkkk2" Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.295642 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/457ab172-18f9-4aa4-85b8-eae591089ad7-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-pkkk2\" (UID: \"457ab172-18f9-4aa4-85b8-eae591089ad7\") " pod="openstack/nova-cell1-cell-mapping-pkkk2" Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.300490 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/457ab172-18f9-4aa4-85b8-eae591089ad7-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-pkkk2\" (UID: \"457ab172-18f9-4aa4-85b8-eae591089ad7\") " pod="openstack/nova-cell1-cell-mapping-pkkk2" Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.304149 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/457ab172-18f9-4aa4-85b8-eae591089ad7-config-data\") pod \"nova-cell1-cell-mapping-pkkk2\" (UID: \"457ab172-18f9-4aa4-85b8-eae591089ad7\") " pod="openstack/nova-cell1-cell-mapping-pkkk2" Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.308280 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/457ab172-18f9-4aa4-85b8-eae591089ad7-scripts\") pod \"nova-cell1-cell-mapping-pkkk2\" (UID: \"457ab172-18f9-4aa4-85b8-eae591089ad7\") " pod="openstack/nova-cell1-cell-mapping-pkkk2" Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.323934 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jnvl\" (UniqueName: \"kubernetes.io/projected/457ab172-18f9-4aa4-85b8-eae591089ad7-kube-api-access-7jnvl\") pod \"nova-cell1-cell-mapping-pkkk2\" (UID: \"457ab172-18f9-4aa4-85b8-eae591089ad7\") " pod="openstack/nova-cell1-cell-mapping-pkkk2" Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.378634 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-pkkk2" Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.721542 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-648d5566bc-jrbwx" Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.809012 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7qb7m\" (UniqueName: \"kubernetes.io/projected/b03fc497-c5af-4176-9127-f0f1103aecca-kube-api-access-7qb7m\") pod \"b03fc497-c5af-4176-9127-f0f1103aecca\" (UID: \"b03fc497-c5af-4176-9127-f0f1103aecca\") " Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.809114 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b03fc497-c5af-4176-9127-f0f1103aecca-ovsdbserver-sb\") pod \"b03fc497-c5af-4176-9127-f0f1103aecca\" (UID: \"b03fc497-c5af-4176-9127-f0f1103aecca\") " Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.809218 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b03fc497-c5af-4176-9127-f0f1103aecca-dns-svc\") pod \"b03fc497-c5af-4176-9127-f0f1103aecca\" (UID: \"b03fc497-c5af-4176-9127-f0f1103aecca\") " Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.809245 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b03fc497-c5af-4176-9127-f0f1103aecca-ovsdbserver-nb\") pod \"b03fc497-c5af-4176-9127-f0f1103aecca\" (UID: \"b03fc497-c5af-4176-9127-f0f1103aecca\") " Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.809313 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b03fc497-c5af-4176-9127-f0f1103aecca-config\") pod \"b03fc497-c5af-4176-9127-f0f1103aecca\" (UID: \"b03fc497-c5af-4176-9127-f0f1103aecca\") " Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.809369 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b03fc497-c5af-4176-9127-f0f1103aecca-dns-swift-storage-0\") pod \"b03fc497-c5af-4176-9127-f0f1103aecca\" (UID: \"b03fc497-c5af-4176-9127-f0f1103aecca\") " Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.810622 4971 generic.go:334] "Generic (PLEG): container finished" podID="b03fc497-c5af-4176-9127-f0f1103aecca" containerID="4ce78a43f7b9babd3c1c69d0e1c4e4f2fe1fede1d641aa46c89646e8677c7552" exitCode=0 Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.810677 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-648d5566bc-jrbwx" event={"ID":"b03fc497-c5af-4176-9127-f0f1103aecca","Type":"ContainerDied","Data":"4ce78a43f7b9babd3c1c69d0e1c4e4f2fe1fede1d641aa46c89646e8677c7552"} Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.810704 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-648d5566bc-jrbwx" event={"ID":"b03fc497-c5af-4176-9127-f0f1103aecca","Type":"ContainerDied","Data":"1b3dcd94ddef37e1bbc0019504e489e2b4e58774e4e63284ed552566d75c690a"} Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.810722 4971 scope.go:117] "RemoveContainer" containerID="4ce78a43f7b9babd3c1c69d0e1c4e4f2fe1fede1d641aa46c89646e8677c7552" Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.810825 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-648d5566bc-jrbwx" Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.820016 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k7z68" event={"ID":"ce5e91d8-51b9-4c86-85aa-3d78b68c2e23","Type":"ContainerStarted","Data":"a30fd31a8f0a9d180a2532c1c651736becb00ce80e5f048de979d4958545ca3a"} Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.820298 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b03fc497-c5af-4176-9127-f0f1103aecca-kube-api-access-7qb7m" (OuterVolumeSpecName: "kube-api-access-7qb7m") pod "b03fc497-c5af-4176-9127-f0f1103aecca" (UID: "b03fc497-c5af-4176-9127-f0f1103aecca"). InnerVolumeSpecName "kube-api-access-7qb7m". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.830615 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41a3ef31-df5e-4cb9-8983-40a16f46823c","Type":"ContainerStarted","Data":"0b2dec937686384d25cd26d898333c4fc81933ae416ed8605ae98f8c6d7ffe0d"} Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.849949 4971 scope.go:117] "RemoveContainer" containerID="006632e0a07d3a04353081708e90f331675590e5810529c3f7d8c0efbfdc6095" Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.912427 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7qb7m\" (UniqueName: \"kubernetes.io/projected/b03fc497-c5af-4176-9127-f0f1103aecca-kube-api-access-7qb7m\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.917078 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-pkkk2"] Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.957436 4971 scope.go:117] "RemoveContainer" containerID="4ce78a43f7b9babd3c1c69d0e1c4e4f2fe1fede1d641aa46c89646e8677c7552" Nov 27 07:17:50 crc kubenswrapper[4971]: E1127 07:17:50.958048 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ce78a43f7b9babd3c1c69d0e1c4e4f2fe1fede1d641aa46c89646e8677c7552\": container with ID starting with 4ce78a43f7b9babd3c1c69d0e1c4e4f2fe1fede1d641aa46c89646e8677c7552 not found: ID does not exist" containerID="4ce78a43f7b9babd3c1c69d0e1c4e4f2fe1fede1d641aa46c89646e8677c7552" Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.958167 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ce78a43f7b9babd3c1c69d0e1c4e4f2fe1fede1d641aa46c89646e8677c7552"} err="failed to get container status \"4ce78a43f7b9babd3c1c69d0e1c4e4f2fe1fede1d641aa46c89646e8677c7552\": rpc error: code = NotFound desc = could not find container \"4ce78a43f7b9babd3c1c69d0e1c4e4f2fe1fede1d641aa46c89646e8677c7552\": container with ID starting with 4ce78a43f7b9babd3c1c69d0e1c4e4f2fe1fede1d641aa46c89646e8677c7552 not found: ID does not exist" Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.958280 4971 scope.go:117] "RemoveContainer" containerID="006632e0a07d3a04353081708e90f331675590e5810529c3f7d8c0efbfdc6095" Nov 27 07:17:50 crc kubenswrapper[4971]: E1127 07:17:50.959747 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"006632e0a07d3a04353081708e90f331675590e5810529c3f7d8c0efbfdc6095\": container with ID starting with 006632e0a07d3a04353081708e90f331675590e5810529c3f7d8c0efbfdc6095 not found: ID does not exist" containerID="006632e0a07d3a04353081708e90f331675590e5810529c3f7d8c0efbfdc6095" Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.959852 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"006632e0a07d3a04353081708e90f331675590e5810529c3f7d8c0efbfdc6095"} err="failed to get container status \"006632e0a07d3a04353081708e90f331675590e5810529c3f7d8c0efbfdc6095\": rpc error: code = NotFound desc = could not find container \"006632e0a07d3a04353081708e90f331675590e5810529c3f7d8c0efbfdc6095\": container with ID starting with 006632e0a07d3a04353081708e90f331675590e5810529c3f7d8c0efbfdc6095 not found: ID does not exist" Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.979791 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b03fc497-c5af-4176-9127-f0f1103aecca-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b03fc497-c5af-4176-9127-f0f1103aecca" (UID: "b03fc497-c5af-4176-9127-f0f1103aecca"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:17:50 crc kubenswrapper[4971]: I1127 07:17:50.985155 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b03fc497-c5af-4176-9127-f0f1103aecca-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b03fc497-c5af-4176-9127-f0f1103aecca" (UID: "b03fc497-c5af-4176-9127-f0f1103aecca"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:17:51 crc kubenswrapper[4971]: I1127 07:17:51.001568 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b03fc497-c5af-4176-9127-f0f1103aecca-config" (OuterVolumeSpecName: "config") pod "b03fc497-c5af-4176-9127-f0f1103aecca" (UID: "b03fc497-c5af-4176-9127-f0f1103aecca"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:17:51 crc kubenswrapper[4971]: I1127 07:17:51.012347 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b03fc497-c5af-4176-9127-f0f1103aecca-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b03fc497-c5af-4176-9127-f0f1103aecca" (UID: "b03fc497-c5af-4176-9127-f0f1103aecca"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:17:51 crc kubenswrapper[4971]: I1127 07:17:51.015211 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b03fc497-c5af-4176-9127-f0f1103aecca-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:51 crc kubenswrapper[4971]: I1127 07:17:51.015236 4971 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b03fc497-c5af-4176-9127-f0f1103aecca-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:51 crc kubenswrapper[4971]: I1127 07:17:51.015246 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b03fc497-c5af-4176-9127-f0f1103aecca-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:51 crc kubenswrapper[4971]: I1127 07:17:51.015254 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b03fc497-c5af-4176-9127-f0f1103aecca-config\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:51 crc kubenswrapper[4971]: I1127 07:17:51.017162 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b03fc497-c5af-4176-9127-f0f1103aecca-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "b03fc497-c5af-4176-9127-f0f1103aecca" (UID: "b03fc497-c5af-4176-9127-f0f1103aecca"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:17:51 crc kubenswrapper[4971]: I1127 07:17:51.117025 4971 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b03fc497-c5af-4176-9127-f0f1103aecca-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:51 crc kubenswrapper[4971]: I1127 07:17:51.159299 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-648d5566bc-jrbwx"] Nov 27 07:17:51 crc kubenswrapper[4971]: I1127 07:17:51.175321 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-648d5566bc-jrbwx"] Nov 27 07:17:51 crc kubenswrapper[4971]: I1127 07:17:51.479737 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dglpp"] Nov 27 07:17:51 crc kubenswrapper[4971]: I1127 07:17:51.843967 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-pkkk2" event={"ID":"457ab172-18f9-4aa4-85b8-eae591089ad7","Type":"ContainerStarted","Data":"f8a41df47e9d0c49f7996de3bff1779009911f0b42f60ee5708ce91da4034a1e"} Nov 27 07:17:51 crc kubenswrapper[4971]: I1127 07:17:51.845307 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-pkkk2" event={"ID":"457ab172-18f9-4aa4-85b8-eae591089ad7","Type":"ContainerStarted","Data":"385a1778d52479ec21113129e1ad81825c8a608a9d772c254f53afee9d1ed91d"} Nov 27 07:17:51 crc kubenswrapper[4971]: I1127 07:17:51.867875 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-pkkk2" podStartSLOduration=2.867856477 podStartE2EDuration="2.867856477s" podCreationTimestamp="2025-11-27 07:17:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:17:51.861114425 +0000 UTC m=+1510.053158343" watchObservedRunningTime="2025-11-27 07:17:51.867856477 +0000 UTC m=+1510.059900425" Nov 27 07:17:52 crc kubenswrapper[4971]: I1127 07:17:52.562926 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b03fc497-c5af-4176-9127-f0f1103aecca" path="/var/lib/kubelet/pods/b03fc497-c5af-4176-9127-f0f1103aecca/volumes" Nov 27 07:17:52 crc kubenswrapper[4971]: I1127 07:17:52.852734 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41a3ef31-df5e-4cb9-8983-40a16f46823c","Type":"ContainerStarted","Data":"246b3a277e05318dff5ef1ad5944cae22aa133678c3e078b2b0e80c043b33db8"} Nov 27 07:17:52 crc kubenswrapper[4971]: I1127 07:17:52.853585 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 27 07:17:52 crc kubenswrapper[4971]: I1127 07:17:52.855146 4971 generic.go:334] "Generic (PLEG): container finished" podID="ce5e91d8-51b9-4c86-85aa-3d78b68c2e23" containerID="a30fd31a8f0a9d180a2532c1c651736becb00ce80e5f048de979d4958545ca3a" exitCode=0 Nov 27 07:17:52 crc kubenswrapper[4971]: I1127 07:17:52.856239 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k7z68" event={"ID":"ce5e91d8-51b9-4c86-85aa-3d78b68c2e23","Type":"ContainerDied","Data":"a30fd31a8f0a9d180a2532c1c651736becb00ce80e5f048de979d4958545ca3a"} Nov 27 07:17:52 crc kubenswrapper[4971]: I1127 07:17:52.857123 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-dglpp" podUID="71d7b8e5-06be-4a2e-8d36-dd6e97616fec" containerName="registry-server" containerID="cri-o://4ddcbc42db2ba1064b1be23b19557092bc9f250cd522d599f52c0edf05df8c9a" gracePeriod=2 Nov 27 07:17:52 crc kubenswrapper[4971]: I1127 07:17:52.889377 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.358372045 podStartE2EDuration="5.889359866s" podCreationTimestamp="2025-11-27 07:17:47 +0000 UTC" firstStartedPulling="2025-11-27 07:17:48.072622312 +0000 UTC m=+1506.264666230" lastFinishedPulling="2025-11-27 07:17:51.603610143 +0000 UTC m=+1509.795654051" observedRunningTime="2025-11-27 07:17:52.882616494 +0000 UTC m=+1511.074660412" watchObservedRunningTime="2025-11-27 07:17:52.889359866 +0000 UTC m=+1511.081403784" Nov 27 07:17:53 crc kubenswrapper[4971]: I1127 07:17:53.907690 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k7z68" event={"ID":"ce5e91d8-51b9-4c86-85aa-3d78b68c2e23","Type":"ContainerStarted","Data":"794da1a3241b802c1829cd89faaa822c91072e57e9db1b66ade83b481438c498"} Nov 27 07:17:53 crc kubenswrapper[4971]: I1127 07:17:53.913832 4971 generic.go:334] "Generic (PLEG): container finished" podID="71d7b8e5-06be-4a2e-8d36-dd6e97616fec" containerID="4ddcbc42db2ba1064b1be23b19557092bc9f250cd522d599f52c0edf05df8c9a" exitCode=0 Nov 27 07:17:53 crc kubenswrapper[4971]: I1127 07:17:53.914123 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dglpp" event={"ID":"71d7b8e5-06be-4a2e-8d36-dd6e97616fec","Type":"ContainerDied","Data":"4ddcbc42db2ba1064b1be23b19557092bc9f250cd522d599f52c0edf05df8c9a"} Nov 27 07:17:53 crc kubenswrapper[4971]: I1127 07:17:53.914171 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dglpp" event={"ID":"71d7b8e5-06be-4a2e-8d36-dd6e97616fec","Type":"ContainerDied","Data":"8ff80b03df77b3e60d9c32dce7bcf31d634cd74af037e369783569f1de01ae60"} Nov 27 07:17:53 crc kubenswrapper[4971]: I1127 07:17:53.914185 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8ff80b03df77b3e60d9c32dce7bcf31d634cd74af037e369783569f1de01ae60" Nov 27 07:17:53 crc kubenswrapper[4971]: I1127 07:17:53.938348 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-k7z68" podStartSLOduration=2.288431339 podStartE2EDuration="5.938323225s" podCreationTimestamp="2025-11-27 07:17:48 +0000 UTC" firstStartedPulling="2025-11-27 07:17:49.789261941 +0000 UTC m=+1507.981305859" lastFinishedPulling="2025-11-27 07:17:53.439153827 +0000 UTC m=+1511.631197745" observedRunningTime="2025-11-27 07:17:53.93257568 +0000 UTC m=+1512.124619588" watchObservedRunningTime="2025-11-27 07:17:53.938323225 +0000 UTC m=+1512.130367143" Nov 27 07:17:53 crc kubenswrapper[4971]: I1127 07:17:53.989607 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dglpp" Nov 27 07:17:54 crc kubenswrapper[4971]: I1127 07:17:54.086514 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71d7b8e5-06be-4a2e-8d36-dd6e97616fec-utilities\") pod \"71d7b8e5-06be-4a2e-8d36-dd6e97616fec\" (UID: \"71d7b8e5-06be-4a2e-8d36-dd6e97616fec\") " Nov 27 07:17:54 crc kubenswrapper[4971]: I1127 07:17:54.086640 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71d7b8e5-06be-4a2e-8d36-dd6e97616fec-catalog-content\") pod \"71d7b8e5-06be-4a2e-8d36-dd6e97616fec\" (UID: \"71d7b8e5-06be-4a2e-8d36-dd6e97616fec\") " Nov 27 07:17:54 crc kubenswrapper[4971]: I1127 07:17:54.086680 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqwbs\" (UniqueName: \"kubernetes.io/projected/71d7b8e5-06be-4a2e-8d36-dd6e97616fec-kube-api-access-fqwbs\") pod \"71d7b8e5-06be-4a2e-8d36-dd6e97616fec\" (UID: \"71d7b8e5-06be-4a2e-8d36-dd6e97616fec\") " Nov 27 07:17:54 crc kubenswrapper[4971]: I1127 07:17:54.087358 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/71d7b8e5-06be-4a2e-8d36-dd6e97616fec-utilities" (OuterVolumeSpecName: "utilities") pod "71d7b8e5-06be-4a2e-8d36-dd6e97616fec" (UID: "71d7b8e5-06be-4a2e-8d36-dd6e97616fec"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:17:54 crc kubenswrapper[4971]: I1127 07:17:54.088903 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71d7b8e5-06be-4a2e-8d36-dd6e97616fec-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:54 crc kubenswrapper[4971]: I1127 07:17:54.093714 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71d7b8e5-06be-4a2e-8d36-dd6e97616fec-kube-api-access-fqwbs" (OuterVolumeSpecName: "kube-api-access-fqwbs") pod "71d7b8e5-06be-4a2e-8d36-dd6e97616fec" (UID: "71d7b8e5-06be-4a2e-8d36-dd6e97616fec"). InnerVolumeSpecName "kube-api-access-fqwbs". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:17:54 crc kubenswrapper[4971]: I1127 07:17:54.107093 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/71d7b8e5-06be-4a2e-8d36-dd6e97616fec-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "71d7b8e5-06be-4a2e-8d36-dd6e97616fec" (UID: "71d7b8e5-06be-4a2e-8d36-dd6e97616fec"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:17:54 crc kubenswrapper[4971]: I1127 07:17:54.191010 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71d7b8e5-06be-4a2e-8d36-dd6e97616fec-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:54 crc kubenswrapper[4971]: I1127 07:17:54.191043 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqwbs\" (UniqueName: \"kubernetes.io/projected/71d7b8e5-06be-4a2e-8d36-dd6e97616fec-kube-api-access-fqwbs\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:54 crc kubenswrapper[4971]: I1127 07:17:54.923321 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dglpp" Nov 27 07:17:54 crc kubenswrapper[4971]: I1127 07:17:54.949049 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dglpp"] Nov 27 07:17:54 crc kubenswrapper[4971]: I1127 07:17:54.958498 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-dglpp"] Nov 27 07:17:56 crc kubenswrapper[4971]: I1127 07:17:56.413821 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 07:17:56 crc kubenswrapper[4971]: I1127 07:17:56.414870 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 07:17:56 crc kubenswrapper[4971]: I1127 07:17:56.587089 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71d7b8e5-06be-4a2e-8d36-dd6e97616fec" path="/var/lib/kubelet/pods/71d7b8e5-06be-4a2e-8d36-dd6e97616fec/volumes" Nov 27 07:17:56 crc kubenswrapper[4971]: I1127 07:17:56.942468 4971 generic.go:334] "Generic (PLEG): container finished" podID="457ab172-18f9-4aa4-85b8-eae591089ad7" containerID="f8a41df47e9d0c49f7996de3bff1779009911f0b42f60ee5708ce91da4034a1e" exitCode=0 Nov 27 07:17:56 crc kubenswrapper[4971]: I1127 07:17:56.942557 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-pkkk2" event={"ID":"457ab172-18f9-4aa4-85b8-eae591089ad7","Type":"ContainerDied","Data":"f8a41df47e9d0c49f7996de3bff1779009911f0b42f60ee5708ce91da4034a1e"} Nov 27 07:17:57 crc kubenswrapper[4971]: I1127 07:17:57.169839 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 27 07:17:57 crc kubenswrapper[4971]: I1127 07:17:57.170118 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 27 07:17:58 crc kubenswrapper[4971]: I1127 07:17:58.187842 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="c0a02759-f505-4e1c-ad20-d7c2b7bd4a03" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.199:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Nov 27 07:17:58 crc kubenswrapper[4971]: I1127 07:17:58.188393 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="c0a02759-f505-4e1c-ad20-d7c2b7bd4a03" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.199:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 27 07:17:58 crc kubenswrapper[4971]: I1127 07:17:58.333202 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-dmvjh"] Nov 27 07:17:58 crc kubenswrapper[4971]: E1127 07:17:58.333792 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b03fc497-c5af-4176-9127-f0f1103aecca" containerName="init" Nov 27 07:17:58 crc kubenswrapper[4971]: I1127 07:17:58.333815 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b03fc497-c5af-4176-9127-f0f1103aecca" containerName="init" Nov 27 07:17:58 crc kubenswrapper[4971]: E1127 07:17:58.333856 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b03fc497-c5af-4176-9127-f0f1103aecca" containerName="dnsmasq-dns" Nov 27 07:17:58 crc kubenswrapper[4971]: I1127 07:17:58.333865 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b03fc497-c5af-4176-9127-f0f1103aecca" containerName="dnsmasq-dns" Nov 27 07:17:58 crc kubenswrapper[4971]: E1127 07:17:58.333875 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71d7b8e5-06be-4a2e-8d36-dd6e97616fec" containerName="extract-content" Nov 27 07:17:58 crc kubenswrapper[4971]: I1127 07:17:58.333883 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="71d7b8e5-06be-4a2e-8d36-dd6e97616fec" containerName="extract-content" Nov 27 07:17:58 crc kubenswrapper[4971]: E1127 07:17:58.333899 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71d7b8e5-06be-4a2e-8d36-dd6e97616fec" containerName="extract-utilities" Nov 27 07:17:58 crc kubenswrapper[4971]: I1127 07:17:58.333905 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="71d7b8e5-06be-4a2e-8d36-dd6e97616fec" containerName="extract-utilities" Nov 27 07:17:58 crc kubenswrapper[4971]: E1127 07:17:58.333934 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71d7b8e5-06be-4a2e-8d36-dd6e97616fec" containerName="registry-server" Nov 27 07:17:58 crc kubenswrapper[4971]: I1127 07:17:58.333942 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="71d7b8e5-06be-4a2e-8d36-dd6e97616fec" containerName="registry-server" Nov 27 07:17:58 crc kubenswrapper[4971]: I1127 07:17:58.334381 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="b03fc497-c5af-4176-9127-f0f1103aecca" containerName="dnsmasq-dns" Nov 27 07:17:58 crc kubenswrapper[4971]: I1127 07:17:58.334403 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="71d7b8e5-06be-4a2e-8d36-dd6e97616fec" containerName="registry-server" Nov 27 07:17:58 crc kubenswrapper[4971]: I1127 07:17:58.336309 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dmvjh" Nov 27 07:17:58 crc kubenswrapper[4971]: I1127 07:17:58.349913 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dmvjh"] Nov 27 07:17:58 crc kubenswrapper[4971]: I1127 07:17:58.354845 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-pkkk2" Nov 27 07:17:58 crc kubenswrapper[4971]: I1127 07:17:58.374093 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d86569d-88c3-4ae0-9261-affa354b038d-utilities\") pod \"community-operators-dmvjh\" (UID: \"5d86569d-88c3-4ae0-9261-affa354b038d\") " pod="openshift-marketplace/community-operators-dmvjh" Nov 27 07:17:58 crc kubenswrapper[4971]: I1127 07:17:58.374193 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52cgl\" (UniqueName: \"kubernetes.io/projected/5d86569d-88c3-4ae0-9261-affa354b038d-kube-api-access-52cgl\") pod \"community-operators-dmvjh\" (UID: \"5d86569d-88c3-4ae0-9261-affa354b038d\") " pod="openshift-marketplace/community-operators-dmvjh" Nov 27 07:17:58 crc kubenswrapper[4971]: I1127 07:17:58.374280 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d86569d-88c3-4ae0-9261-affa354b038d-catalog-content\") pod \"community-operators-dmvjh\" (UID: \"5d86569d-88c3-4ae0-9261-affa354b038d\") " pod="openshift-marketplace/community-operators-dmvjh" Nov 27 07:17:58 crc kubenswrapper[4971]: I1127 07:17:58.476589 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/457ab172-18f9-4aa4-85b8-eae591089ad7-scripts\") pod \"457ab172-18f9-4aa4-85b8-eae591089ad7\" (UID: \"457ab172-18f9-4aa4-85b8-eae591089ad7\") " Nov 27 07:17:58 crc kubenswrapper[4971]: I1127 07:17:58.476654 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/457ab172-18f9-4aa4-85b8-eae591089ad7-config-data\") pod \"457ab172-18f9-4aa4-85b8-eae591089ad7\" (UID: \"457ab172-18f9-4aa4-85b8-eae591089ad7\") " Nov 27 07:17:58 crc kubenswrapper[4971]: I1127 07:17:58.476721 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/457ab172-18f9-4aa4-85b8-eae591089ad7-combined-ca-bundle\") pod \"457ab172-18f9-4aa4-85b8-eae591089ad7\" (UID: \"457ab172-18f9-4aa4-85b8-eae591089ad7\") " Nov 27 07:17:58 crc kubenswrapper[4971]: I1127 07:17:58.476807 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7jnvl\" (UniqueName: \"kubernetes.io/projected/457ab172-18f9-4aa4-85b8-eae591089ad7-kube-api-access-7jnvl\") pod \"457ab172-18f9-4aa4-85b8-eae591089ad7\" (UID: \"457ab172-18f9-4aa4-85b8-eae591089ad7\") " Nov 27 07:17:58 crc kubenswrapper[4971]: I1127 07:17:58.477366 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d86569d-88c3-4ae0-9261-affa354b038d-utilities\") pod \"community-operators-dmvjh\" (UID: \"5d86569d-88c3-4ae0-9261-affa354b038d\") " pod="openshift-marketplace/community-operators-dmvjh" Nov 27 07:17:58 crc kubenswrapper[4971]: I1127 07:17:58.477467 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52cgl\" (UniqueName: \"kubernetes.io/projected/5d86569d-88c3-4ae0-9261-affa354b038d-kube-api-access-52cgl\") pod \"community-operators-dmvjh\" (UID: \"5d86569d-88c3-4ae0-9261-affa354b038d\") " pod="openshift-marketplace/community-operators-dmvjh" Nov 27 07:17:58 crc kubenswrapper[4971]: I1127 07:17:58.477672 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d86569d-88c3-4ae0-9261-affa354b038d-catalog-content\") pod \"community-operators-dmvjh\" (UID: \"5d86569d-88c3-4ae0-9261-affa354b038d\") " pod="openshift-marketplace/community-operators-dmvjh" Nov 27 07:17:58 crc kubenswrapper[4971]: I1127 07:17:58.478221 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d86569d-88c3-4ae0-9261-affa354b038d-catalog-content\") pod \"community-operators-dmvjh\" (UID: \"5d86569d-88c3-4ae0-9261-affa354b038d\") " pod="openshift-marketplace/community-operators-dmvjh" Nov 27 07:17:58 crc kubenswrapper[4971]: I1127 07:17:58.478240 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d86569d-88c3-4ae0-9261-affa354b038d-utilities\") pod \"community-operators-dmvjh\" (UID: \"5d86569d-88c3-4ae0-9261-affa354b038d\") " pod="openshift-marketplace/community-operators-dmvjh" Nov 27 07:17:58 crc kubenswrapper[4971]: I1127 07:17:58.484714 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/457ab172-18f9-4aa4-85b8-eae591089ad7-scripts" (OuterVolumeSpecName: "scripts") pod "457ab172-18f9-4aa4-85b8-eae591089ad7" (UID: "457ab172-18f9-4aa4-85b8-eae591089ad7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:17:58 crc kubenswrapper[4971]: I1127 07:17:58.508134 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/457ab172-18f9-4aa4-85b8-eae591089ad7-kube-api-access-7jnvl" (OuterVolumeSpecName: "kube-api-access-7jnvl") pod "457ab172-18f9-4aa4-85b8-eae591089ad7" (UID: "457ab172-18f9-4aa4-85b8-eae591089ad7"). InnerVolumeSpecName "kube-api-access-7jnvl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:17:58 crc kubenswrapper[4971]: I1127 07:17:58.518195 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52cgl\" (UniqueName: \"kubernetes.io/projected/5d86569d-88c3-4ae0-9261-affa354b038d-kube-api-access-52cgl\") pod \"community-operators-dmvjh\" (UID: \"5d86569d-88c3-4ae0-9261-affa354b038d\") " pod="openshift-marketplace/community-operators-dmvjh" Nov 27 07:17:58 crc kubenswrapper[4971]: I1127 07:17:58.547577 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/457ab172-18f9-4aa4-85b8-eae591089ad7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "457ab172-18f9-4aa4-85b8-eae591089ad7" (UID: "457ab172-18f9-4aa4-85b8-eae591089ad7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:17:58 crc kubenswrapper[4971]: I1127 07:17:58.557619 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/457ab172-18f9-4aa4-85b8-eae591089ad7-config-data" (OuterVolumeSpecName: "config-data") pod "457ab172-18f9-4aa4-85b8-eae591089ad7" (UID: "457ab172-18f9-4aa4-85b8-eae591089ad7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:17:58 crc kubenswrapper[4971]: I1127 07:17:58.581045 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/457ab172-18f9-4aa4-85b8-eae591089ad7-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:58 crc kubenswrapper[4971]: I1127 07:17:58.581077 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/457ab172-18f9-4aa4-85b8-eae591089ad7-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:58 crc kubenswrapper[4971]: I1127 07:17:58.581091 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/457ab172-18f9-4aa4-85b8-eae591089ad7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:58 crc kubenswrapper[4971]: I1127 07:17:58.581103 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7jnvl\" (UniqueName: \"kubernetes.io/projected/457ab172-18f9-4aa4-85b8-eae591089ad7-kube-api-access-7jnvl\") on node \"crc\" DevicePath \"\"" Nov 27 07:17:58 crc kubenswrapper[4971]: I1127 07:17:58.685189 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dmvjh" Nov 27 07:17:58 crc kubenswrapper[4971]: I1127 07:17:58.838467 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-k7z68" Nov 27 07:17:58 crc kubenswrapper[4971]: I1127 07:17:58.838522 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-k7z68" Nov 27 07:17:59 crc kubenswrapper[4971]: I1127 07:17:59.034878 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-pkkk2" event={"ID":"457ab172-18f9-4aa4-85b8-eae591089ad7","Type":"ContainerDied","Data":"385a1778d52479ec21113129e1ad81825c8a608a9d772c254f53afee9d1ed91d"} Nov 27 07:17:59 crc kubenswrapper[4971]: I1127 07:17:59.035208 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="385a1778d52479ec21113129e1ad81825c8a608a9d772c254f53afee9d1ed91d" Nov 27 07:17:59 crc kubenswrapper[4971]: I1127 07:17:59.035322 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-pkkk2" Nov 27 07:17:59 crc kubenswrapper[4971]: I1127 07:17:59.176367 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 27 07:17:59 crc kubenswrapper[4971]: I1127 07:17:59.176697 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="c0a02759-f505-4e1c-ad20-d7c2b7bd4a03" containerName="nova-api-log" containerID="cri-o://26043e9a97f70a4db001fd498f0f7bf0182a5702409fc73df98877e2bf3b8b5d" gracePeriod=30 Nov 27 07:17:59 crc kubenswrapper[4971]: I1127 07:17:59.177283 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="c0a02759-f505-4e1c-ad20-d7c2b7bd4a03" containerName="nova-api-api" containerID="cri-o://acd1b3bb060a3329f6548f899ba6d5bbf30135b64dfa6a4291b324203fb94aa7" gracePeriod=30 Nov 27 07:17:59 crc kubenswrapper[4971]: I1127 07:17:59.196640 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 07:17:59 crc kubenswrapper[4971]: I1127 07:17:59.196892 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="2b7ff1aa-9bdb-4c11-9a97-bac98829a89f" containerName="nova-scheduler-scheduler" containerID="cri-o://9dda0ab26da17d256febc0b7476eb425a2961dbd6725659139cfe57841fa7a79" gracePeriod=30 Nov 27 07:17:59 crc kubenswrapper[4971]: I1127 07:17:59.205369 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 07:17:59 crc kubenswrapper[4971]: I1127 07:17:59.205660 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="0195ada6-2a69-427f-8041-166dfdacf583" containerName="nova-metadata-log" containerID="cri-o://dfae08e08ac1dc8cbd0db6a30ba44f2a2d19a35f970e1bc67d0f92ecbeac6049" gracePeriod=30 Nov 27 07:17:59 crc kubenswrapper[4971]: I1127 07:17:59.205927 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="0195ada6-2a69-427f-8041-166dfdacf583" containerName="nova-metadata-metadata" containerID="cri-o://991ebc52378df336553930ac80c5badc6b2daafef116a6928889734ba2a45fa9" gracePeriod=30 Nov 27 07:17:59 crc kubenswrapper[4971]: I1127 07:17:59.325893 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dmvjh"] Nov 27 07:17:59 crc kubenswrapper[4971]: I1127 07:17:59.934333 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-k7z68" podUID="ce5e91d8-51b9-4c86-85aa-3d78b68c2e23" containerName="registry-server" probeResult="failure" output=< Nov 27 07:17:59 crc kubenswrapper[4971]: timeout: failed to connect service ":50051" within 1s Nov 27 07:17:59 crc kubenswrapper[4971]: > Nov 27 07:18:00 crc kubenswrapper[4971]: I1127 07:18:00.048213 4971 generic.go:334] "Generic (PLEG): container finished" podID="0195ada6-2a69-427f-8041-166dfdacf583" containerID="dfae08e08ac1dc8cbd0db6a30ba44f2a2d19a35f970e1bc67d0f92ecbeac6049" exitCode=143 Nov 27 07:18:00 crc kubenswrapper[4971]: I1127 07:18:00.048339 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"0195ada6-2a69-427f-8041-166dfdacf583","Type":"ContainerDied","Data":"dfae08e08ac1dc8cbd0db6a30ba44f2a2d19a35f970e1bc67d0f92ecbeac6049"} Nov 27 07:18:00 crc kubenswrapper[4971]: I1127 07:18:00.051025 4971 generic.go:334] "Generic (PLEG): container finished" podID="c0a02759-f505-4e1c-ad20-d7c2b7bd4a03" containerID="26043e9a97f70a4db001fd498f0f7bf0182a5702409fc73df98877e2bf3b8b5d" exitCode=143 Nov 27 07:18:00 crc kubenswrapper[4971]: I1127 07:18:00.051105 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c0a02759-f505-4e1c-ad20-d7c2b7bd4a03","Type":"ContainerDied","Data":"26043e9a97f70a4db001fd498f0f7bf0182a5702409fc73df98877e2bf3b8b5d"} Nov 27 07:18:00 crc kubenswrapper[4971]: I1127 07:18:00.053376 4971 generic.go:334] "Generic (PLEG): container finished" podID="5d86569d-88c3-4ae0-9261-affa354b038d" containerID="78350e242cebc9d5ceece552a607bfa10b3679b3da8c2e88fdbc92b04b1d78c6" exitCode=0 Nov 27 07:18:00 crc kubenswrapper[4971]: I1127 07:18:00.053415 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dmvjh" event={"ID":"5d86569d-88c3-4ae0-9261-affa354b038d","Type":"ContainerDied","Data":"78350e242cebc9d5ceece552a607bfa10b3679b3da8c2e88fdbc92b04b1d78c6"} Nov 27 07:18:00 crc kubenswrapper[4971]: I1127 07:18:00.053439 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dmvjh" event={"ID":"5d86569d-88c3-4ae0-9261-affa354b038d","Type":"ContainerStarted","Data":"40d748253ee5190170fc8dd898f0251deaeb7bb5e454dca2188274ed2b3ad1f7"} Nov 27 07:18:02 crc kubenswrapper[4971]: I1127 07:18:02.111145 4971 generic.go:334] "Generic (PLEG): container finished" podID="5d86569d-88c3-4ae0-9261-affa354b038d" containerID="157c9a1f7be93fef1701d4f01336c177b13478b95c2a1a1425b285d3d31e589c" exitCode=0 Nov 27 07:18:02 crc kubenswrapper[4971]: I1127 07:18:02.111203 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dmvjh" event={"ID":"5d86569d-88c3-4ae0-9261-affa354b038d","Type":"ContainerDied","Data":"157c9a1f7be93fef1701d4f01336c177b13478b95c2a1a1425b285d3d31e589c"} Nov 27 07:18:02 crc kubenswrapper[4971]: I1127 07:18:02.597905 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="0195ada6-2a69-427f-8041-166dfdacf583" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": read tcp 10.217.0.2:40948->10.217.0.193:8775: read: connection reset by peer" Nov 27 07:18:02 crc kubenswrapper[4971]: I1127 07:18:02.597949 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="0195ada6-2a69-427f-8041-166dfdacf583" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": read tcp 10.217.0.2:40950->10.217.0.193:8775: read: connection reset by peer" Nov 27 07:18:02 crc kubenswrapper[4971]: I1127 07:18:02.853577 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.003228 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.038781 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rlrdh\" (UniqueName: \"kubernetes.io/projected/2b7ff1aa-9bdb-4c11-9a97-bac98829a89f-kube-api-access-rlrdh\") pod \"2b7ff1aa-9bdb-4c11-9a97-bac98829a89f\" (UID: \"2b7ff1aa-9bdb-4c11-9a97-bac98829a89f\") " Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.038876 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b7ff1aa-9bdb-4c11-9a97-bac98829a89f-combined-ca-bundle\") pod \"2b7ff1aa-9bdb-4c11-9a97-bac98829a89f\" (UID: \"2b7ff1aa-9bdb-4c11-9a97-bac98829a89f\") " Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.039067 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/0195ada6-2a69-427f-8041-166dfdacf583-nova-metadata-tls-certs\") pod \"0195ada6-2a69-427f-8041-166dfdacf583\" (UID: \"0195ada6-2a69-427f-8041-166dfdacf583\") " Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.039240 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b7ff1aa-9bdb-4c11-9a97-bac98829a89f-config-data\") pod \"2b7ff1aa-9bdb-4c11-9a97-bac98829a89f\" (UID: \"2b7ff1aa-9bdb-4c11-9a97-bac98829a89f\") " Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.063765 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b7ff1aa-9bdb-4c11-9a97-bac98829a89f-kube-api-access-rlrdh" (OuterVolumeSpecName: "kube-api-access-rlrdh") pod "2b7ff1aa-9bdb-4c11-9a97-bac98829a89f" (UID: "2b7ff1aa-9bdb-4c11-9a97-bac98829a89f"). InnerVolumeSpecName "kube-api-access-rlrdh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.077775 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b7ff1aa-9bdb-4c11-9a97-bac98829a89f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2b7ff1aa-9bdb-4c11-9a97-bac98829a89f" (UID: "2b7ff1aa-9bdb-4c11-9a97-bac98829a89f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.100554 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0195ada6-2a69-427f-8041-166dfdacf583-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "0195ada6-2a69-427f-8041-166dfdacf583" (UID: "0195ada6-2a69-427f-8041-166dfdacf583"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.111153 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b7ff1aa-9bdb-4c11-9a97-bac98829a89f-config-data" (OuterVolumeSpecName: "config-data") pod "2b7ff1aa-9bdb-4c11-9a97-bac98829a89f" (UID: "2b7ff1aa-9bdb-4c11-9a97-bac98829a89f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.122708 4971 generic.go:334] "Generic (PLEG): container finished" podID="2b7ff1aa-9bdb-4c11-9a97-bac98829a89f" containerID="9dda0ab26da17d256febc0b7476eb425a2961dbd6725659139cfe57841fa7a79" exitCode=0 Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.122788 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2b7ff1aa-9bdb-4c11-9a97-bac98829a89f","Type":"ContainerDied","Data":"9dda0ab26da17d256febc0b7476eb425a2961dbd6725659139cfe57841fa7a79"} Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.122796 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.122817 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2b7ff1aa-9bdb-4c11-9a97-bac98829a89f","Type":"ContainerDied","Data":"cd5cf61d7985d15e49c5d0a0b418dff4b067091f8a49789611967c4b9c047329"} Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.122865 4971 scope.go:117] "RemoveContainer" containerID="9dda0ab26da17d256febc0b7476eb425a2961dbd6725659139cfe57841fa7a79" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.125015 4971 generic.go:334] "Generic (PLEG): container finished" podID="0195ada6-2a69-427f-8041-166dfdacf583" containerID="991ebc52378df336553930ac80c5badc6b2daafef116a6928889734ba2a45fa9" exitCode=0 Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.125059 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"0195ada6-2a69-427f-8041-166dfdacf583","Type":"ContainerDied","Data":"991ebc52378df336553930ac80c5badc6b2daafef116a6928889734ba2a45fa9"} Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.125074 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"0195ada6-2a69-427f-8041-166dfdacf583","Type":"ContainerDied","Data":"7f144bbf6aba2a3f733129e9672651af873a48760fe1d26ea94bbdf8b2fe53b7"} Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.125104 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.127980 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dmvjh" event={"ID":"5d86569d-88c3-4ae0-9261-affa354b038d","Type":"ContainerStarted","Data":"9fc2f4619499ea4dba496e222ee218aa3e14e7f44bd1003b6ac22d3f7d010aea"} Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.143240 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0195ada6-2a69-427f-8041-166dfdacf583-config-data\") pod \"0195ada6-2a69-427f-8041-166dfdacf583\" (UID: \"0195ada6-2a69-427f-8041-166dfdacf583\") " Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.144647 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0195ada6-2a69-427f-8041-166dfdacf583-logs" (OuterVolumeSpecName: "logs") pod "0195ada6-2a69-427f-8041-166dfdacf583" (UID: "0195ada6-2a69-427f-8041-166dfdacf583"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.143366 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0195ada6-2a69-427f-8041-166dfdacf583-logs\") pod \"0195ada6-2a69-427f-8041-166dfdacf583\" (UID: \"0195ada6-2a69-427f-8041-166dfdacf583\") " Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.145286 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0195ada6-2a69-427f-8041-166dfdacf583-combined-ca-bundle\") pod \"0195ada6-2a69-427f-8041-166dfdacf583\" (UID: \"0195ada6-2a69-427f-8041-166dfdacf583\") " Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.145431 4971 scope.go:117] "RemoveContainer" containerID="9dda0ab26da17d256febc0b7476eb425a2961dbd6725659139cfe57841fa7a79" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.145522 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-st7td\" (UniqueName: \"kubernetes.io/projected/0195ada6-2a69-427f-8041-166dfdacf583-kube-api-access-st7td\") pod \"0195ada6-2a69-427f-8041-166dfdacf583\" (UID: \"0195ada6-2a69-427f-8041-166dfdacf583\") " Nov 27 07:18:03 crc kubenswrapper[4971]: E1127 07:18:03.146413 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9dda0ab26da17d256febc0b7476eb425a2961dbd6725659139cfe57841fa7a79\": container with ID starting with 9dda0ab26da17d256febc0b7476eb425a2961dbd6725659139cfe57841fa7a79 not found: ID does not exist" containerID="9dda0ab26da17d256febc0b7476eb425a2961dbd6725659139cfe57841fa7a79" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.146461 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9dda0ab26da17d256febc0b7476eb425a2961dbd6725659139cfe57841fa7a79"} err="failed to get container status \"9dda0ab26da17d256febc0b7476eb425a2961dbd6725659139cfe57841fa7a79\": rpc error: code = NotFound desc = could not find container \"9dda0ab26da17d256febc0b7476eb425a2961dbd6725659139cfe57841fa7a79\": container with ID starting with 9dda0ab26da17d256febc0b7476eb425a2961dbd6725659139cfe57841fa7a79 not found: ID does not exist" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.146486 4971 scope.go:117] "RemoveContainer" containerID="991ebc52378df336553930ac80c5badc6b2daafef116a6928889734ba2a45fa9" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.147090 4971 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/0195ada6-2a69-427f-8041-166dfdacf583-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.147120 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b7ff1aa-9bdb-4c11-9a97-bac98829a89f-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.147132 4971 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0195ada6-2a69-427f-8041-166dfdacf583-logs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.147147 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rlrdh\" (UniqueName: \"kubernetes.io/projected/2b7ff1aa-9bdb-4c11-9a97-bac98829a89f-kube-api-access-rlrdh\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.147159 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b7ff1aa-9bdb-4c11-9a97-bac98829a89f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.154733 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0195ada6-2a69-427f-8041-166dfdacf583-kube-api-access-st7td" (OuterVolumeSpecName: "kube-api-access-st7td") pod "0195ada6-2a69-427f-8041-166dfdacf583" (UID: "0195ada6-2a69-427f-8041-166dfdacf583"). InnerVolumeSpecName "kube-api-access-st7td". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.189770 4971 scope.go:117] "RemoveContainer" containerID="dfae08e08ac1dc8cbd0db6a30ba44f2a2d19a35f970e1bc67d0f92ecbeac6049" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.193039 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0195ada6-2a69-427f-8041-166dfdacf583-config-data" (OuterVolumeSpecName: "config-data") pod "0195ada6-2a69-427f-8041-166dfdacf583" (UID: "0195ada6-2a69-427f-8041-166dfdacf583"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.208687 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-dmvjh" podStartSLOduration=2.627299501 podStartE2EDuration="5.208660591s" podCreationTimestamp="2025-11-27 07:17:58 +0000 UTC" firstStartedPulling="2025-11-27 07:18:00.055584408 +0000 UTC m=+1518.247628326" lastFinishedPulling="2025-11-27 07:18:02.636945498 +0000 UTC m=+1520.828989416" observedRunningTime="2025-11-27 07:18:03.155262001 +0000 UTC m=+1521.347305919" watchObservedRunningTime="2025-11-27 07:18:03.208660591 +0000 UTC m=+1521.400704509" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.220949 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0195ada6-2a69-427f-8041-166dfdacf583-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0195ada6-2a69-427f-8041-166dfdacf583" (UID: "0195ada6-2a69-427f-8041-166dfdacf583"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.225705 4971 scope.go:117] "RemoveContainer" containerID="991ebc52378df336553930ac80c5badc6b2daafef116a6928889734ba2a45fa9" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.231738 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 07:18:03 crc kubenswrapper[4971]: E1127 07:18:03.231769 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"991ebc52378df336553930ac80c5badc6b2daafef116a6928889734ba2a45fa9\": container with ID starting with 991ebc52378df336553930ac80c5badc6b2daafef116a6928889734ba2a45fa9 not found: ID does not exist" containerID="991ebc52378df336553930ac80c5badc6b2daafef116a6928889734ba2a45fa9" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.231815 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"991ebc52378df336553930ac80c5badc6b2daafef116a6928889734ba2a45fa9"} err="failed to get container status \"991ebc52378df336553930ac80c5badc6b2daafef116a6928889734ba2a45fa9\": rpc error: code = NotFound desc = could not find container \"991ebc52378df336553930ac80c5badc6b2daafef116a6928889734ba2a45fa9\": container with ID starting with 991ebc52378df336553930ac80c5badc6b2daafef116a6928889734ba2a45fa9 not found: ID does not exist" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.231843 4971 scope.go:117] "RemoveContainer" containerID="dfae08e08ac1dc8cbd0db6a30ba44f2a2d19a35f970e1bc67d0f92ecbeac6049" Nov 27 07:18:03 crc kubenswrapper[4971]: E1127 07:18:03.232298 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dfae08e08ac1dc8cbd0db6a30ba44f2a2d19a35f970e1bc67d0f92ecbeac6049\": container with ID starting with dfae08e08ac1dc8cbd0db6a30ba44f2a2d19a35f970e1bc67d0f92ecbeac6049 not found: ID does not exist" containerID="dfae08e08ac1dc8cbd0db6a30ba44f2a2d19a35f970e1bc67d0f92ecbeac6049" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.232332 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dfae08e08ac1dc8cbd0db6a30ba44f2a2d19a35f970e1bc67d0f92ecbeac6049"} err="failed to get container status \"dfae08e08ac1dc8cbd0db6a30ba44f2a2d19a35f970e1bc67d0f92ecbeac6049\": rpc error: code = NotFound desc = could not find container \"dfae08e08ac1dc8cbd0db6a30ba44f2a2d19a35f970e1bc67d0f92ecbeac6049\": container with ID starting with dfae08e08ac1dc8cbd0db6a30ba44f2a2d19a35f970e1bc67d0f92ecbeac6049 not found: ID does not exist" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.244775 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.248911 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0195ada6-2a69-427f-8041-166dfdacf583-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.248956 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-st7td\" (UniqueName: \"kubernetes.io/projected/0195ada6-2a69-427f-8041-166dfdacf583-kube-api-access-st7td\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.248968 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0195ada6-2a69-427f-8041-166dfdacf583-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.258382 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 07:18:03 crc kubenswrapper[4971]: E1127 07:18:03.258932 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="457ab172-18f9-4aa4-85b8-eae591089ad7" containerName="nova-manage" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.258955 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="457ab172-18f9-4aa4-85b8-eae591089ad7" containerName="nova-manage" Nov 27 07:18:03 crc kubenswrapper[4971]: E1127 07:18:03.258973 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0195ada6-2a69-427f-8041-166dfdacf583" containerName="nova-metadata-metadata" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.258982 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="0195ada6-2a69-427f-8041-166dfdacf583" containerName="nova-metadata-metadata" Nov 27 07:18:03 crc kubenswrapper[4971]: E1127 07:18:03.258996 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0195ada6-2a69-427f-8041-166dfdacf583" containerName="nova-metadata-log" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.259003 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="0195ada6-2a69-427f-8041-166dfdacf583" containerName="nova-metadata-log" Nov 27 07:18:03 crc kubenswrapper[4971]: E1127 07:18:03.259012 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b7ff1aa-9bdb-4c11-9a97-bac98829a89f" containerName="nova-scheduler-scheduler" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.259019 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b7ff1aa-9bdb-4c11-9a97-bac98829a89f" containerName="nova-scheduler-scheduler" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.259205 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="0195ada6-2a69-427f-8041-166dfdacf583" containerName="nova-metadata-metadata" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.259225 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="0195ada6-2a69-427f-8041-166dfdacf583" containerName="nova-metadata-log" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.259235 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b7ff1aa-9bdb-4c11-9a97-bac98829a89f" containerName="nova-scheduler-scheduler" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.259256 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="457ab172-18f9-4aa4-85b8-eae591089ad7" containerName="nova-manage" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.260004 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.264228 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.271284 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.350714 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdtkd\" (UniqueName: \"kubernetes.io/projected/36cda10d-7f8f-403c-82b8-fbbdf89e8ed5-kube-api-access-vdtkd\") pod \"nova-scheduler-0\" (UID: \"36cda10d-7f8f-403c-82b8-fbbdf89e8ed5\") " pod="openstack/nova-scheduler-0" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.350845 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36cda10d-7f8f-403c-82b8-fbbdf89e8ed5-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"36cda10d-7f8f-403c-82b8-fbbdf89e8ed5\") " pod="openstack/nova-scheduler-0" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.350888 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36cda10d-7f8f-403c-82b8-fbbdf89e8ed5-config-data\") pod \"nova-scheduler-0\" (UID: \"36cda10d-7f8f-403c-82b8-fbbdf89e8ed5\") " pod="openstack/nova-scheduler-0" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.452743 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36cda10d-7f8f-403c-82b8-fbbdf89e8ed5-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"36cda10d-7f8f-403c-82b8-fbbdf89e8ed5\") " pod="openstack/nova-scheduler-0" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.452825 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36cda10d-7f8f-403c-82b8-fbbdf89e8ed5-config-data\") pod \"nova-scheduler-0\" (UID: \"36cda10d-7f8f-403c-82b8-fbbdf89e8ed5\") " pod="openstack/nova-scheduler-0" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.452968 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdtkd\" (UniqueName: \"kubernetes.io/projected/36cda10d-7f8f-403c-82b8-fbbdf89e8ed5-kube-api-access-vdtkd\") pod \"nova-scheduler-0\" (UID: \"36cda10d-7f8f-403c-82b8-fbbdf89e8ed5\") " pod="openstack/nova-scheduler-0" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.460333 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36cda10d-7f8f-403c-82b8-fbbdf89e8ed5-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"36cda10d-7f8f-403c-82b8-fbbdf89e8ed5\") " pod="openstack/nova-scheduler-0" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.467035 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36cda10d-7f8f-403c-82b8-fbbdf89e8ed5-config-data\") pod \"nova-scheduler-0\" (UID: \"36cda10d-7f8f-403c-82b8-fbbdf89e8ed5\") " pod="openstack/nova-scheduler-0" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.471963 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.474151 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdtkd\" (UniqueName: \"kubernetes.io/projected/36cda10d-7f8f-403c-82b8-fbbdf89e8ed5-kube-api-access-vdtkd\") pod \"nova-scheduler-0\" (UID: \"36cda10d-7f8f-403c-82b8-fbbdf89e8ed5\") " pod="openstack/nova-scheduler-0" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.489890 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.509459 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.512130 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.516983 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.517438 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.544909 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.554038 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35b90587-df5b-4f15-8c34-f1b0a8506d85-config-data\") pod \"nova-metadata-0\" (UID: \"35b90587-df5b-4f15-8c34-f1b0a8506d85\") " pod="openstack/nova-metadata-0" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.554355 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wrt9w\" (UniqueName: \"kubernetes.io/projected/35b90587-df5b-4f15-8c34-f1b0a8506d85-kube-api-access-wrt9w\") pod \"nova-metadata-0\" (UID: \"35b90587-df5b-4f15-8c34-f1b0a8506d85\") " pod="openstack/nova-metadata-0" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.554387 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35b90587-df5b-4f15-8c34-f1b0a8506d85-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"35b90587-df5b-4f15-8c34-f1b0a8506d85\") " pod="openstack/nova-metadata-0" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.554427 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/35b90587-df5b-4f15-8c34-f1b0a8506d85-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"35b90587-df5b-4f15-8c34-f1b0a8506d85\") " pod="openstack/nova-metadata-0" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.554498 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35b90587-df5b-4f15-8c34-f1b0a8506d85-logs\") pod \"nova-metadata-0\" (UID: \"35b90587-df5b-4f15-8c34-f1b0a8506d85\") " pod="openstack/nova-metadata-0" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.588036 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.656663 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35b90587-df5b-4f15-8c34-f1b0a8506d85-logs\") pod \"nova-metadata-0\" (UID: \"35b90587-df5b-4f15-8c34-f1b0a8506d85\") " pod="openstack/nova-metadata-0" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.656865 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35b90587-df5b-4f15-8c34-f1b0a8506d85-config-data\") pod \"nova-metadata-0\" (UID: \"35b90587-df5b-4f15-8c34-f1b0a8506d85\") " pod="openstack/nova-metadata-0" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.657760 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35b90587-df5b-4f15-8c34-f1b0a8506d85-logs\") pod \"nova-metadata-0\" (UID: \"35b90587-df5b-4f15-8c34-f1b0a8506d85\") " pod="openstack/nova-metadata-0" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.658071 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wrt9w\" (UniqueName: \"kubernetes.io/projected/35b90587-df5b-4f15-8c34-f1b0a8506d85-kube-api-access-wrt9w\") pod \"nova-metadata-0\" (UID: \"35b90587-df5b-4f15-8c34-f1b0a8506d85\") " pod="openstack/nova-metadata-0" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.658160 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35b90587-df5b-4f15-8c34-f1b0a8506d85-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"35b90587-df5b-4f15-8c34-f1b0a8506d85\") " pod="openstack/nova-metadata-0" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.658280 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/35b90587-df5b-4f15-8c34-f1b0a8506d85-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"35b90587-df5b-4f15-8c34-f1b0a8506d85\") " pod="openstack/nova-metadata-0" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.663290 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/35b90587-df5b-4f15-8c34-f1b0a8506d85-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"35b90587-df5b-4f15-8c34-f1b0a8506d85\") " pod="openstack/nova-metadata-0" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.664862 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35b90587-df5b-4f15-8c34-f1b0a8506d85-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"35b90587-df5b-4f15-8c34-f1b0a8506d85\") " pod="openstack/nova-metadata-0" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.674069 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wrt9w\" (UniqueName: \"kubernetes.io/projected/35b90587-df5b-4f15-8c34-f1b0a8506d85-kube-api-access-wrt9w\") pod \"nova-metadata-0\" (UID: \"35b90587-df5b-4f15-8c34-f1b0a8506d85\") " pod="openstack/nova-metadata-0" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.682295 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35b90587-df5b-4f15-8c34-f1b0a8506d85-config-data\") pod \"nova-metadata-0\" (UID: \"35b90587-df5b-4f15-8c34-f1b0a8506d85\") " pod="openstack/nova-metadata-0" Nov 27 07:18:03 crc kubenswrapper[4971]: I1127 07:18:03.885299 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.006202 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.143845 4971 generic.go:334] "Generic (PLEG): container finished" podID="c0a02759-f505-4e1c-ad20-d7c2b7bd4a03" containerID="acd1b3bb060a3329f6548f899ba6d5bbf30135b64dfa6a4291b324203fb94aa7" exitCode=0 Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.143927 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c0a02759-f505-4e1c-ad20-d7c2b7bd4a03","Type":"ContainerDied","Data":"acd1b3bb060a3329f6548f899ba6d5bbf30135b64dfa6a4291b324203fb94aa7"} Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.143962 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c0a02759-f505-4e1c-ad20-d7c2b7bd4a03","Type":"ContainerDied","Data":"ba811350450f4ee87808db4f9ff8dbaed68e2095a8a7540d0524441241de91b7"} Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.143985 4971 scope.go:117] "RemoveContainer" containerID="acd1b3bb060a3329f6548f899ba6d5bbf30135b64dfa6a4291b324203fb94aa7" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.144125 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.169481 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-562kx\" (UniqueName: \"kubernetes.io/projected/c0a02759-f505-4e1c-ad20-d7c2b7bd4a03-kube-api-access-562kx\") pod \"c0a02759-f505-4e1c-ad20-d7c2b7bd4a03\" (UID: \"c0a02759-f505-4e1c-ad20-d7c2b7bd4a03\") " Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.169626 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0a02759-f505-4e1c-ad20-d7c2b7bd4a03-logs\") pod \"c0a02759-f505-4e1c-ad20-d7c2b7bd4a03\" (UID: \"c0a02759-f505-4e1c-ad20-d7c2b7bd4a03\") " Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.169731 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0a02759-f505-4e1c-ad20-d7c2b7bd4a03-internal-tls-certs\") pod \"c0a02759-f505-4e1c-ad20-d7c2b7bd4a03\" (UID: \"c0a02759-f505-4e1c-ad20-d7c2b7bd4a03\") " Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.169775 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0a02759-f505-4e1c-ad20-d7c2b7bd4a03-combined-ca-bundle\") pod \"c0a02759-f505-4e1c-ad20-d7c2b7bd4a03\" (UID: \"c0a02759-f505-4e1c-ad20-d7c2b7bd4a03\") " Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.169821 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0a02759-f505-4e1c-ad20-d7c2b7bd4a03-public-tls-certs\") pod \"c0a02759-f505-4e1c-ad20-d7c2b7bd4a03\" (UID: \"c0a02759-f505-4e1c-ad20-d7c2b7bd4a03\") " Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.169902 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0a02759-f505-4e1c-ad20-d7c2b7bd4a03-config-data\") pod \"c0a02759-f505-4e1c-ad20-d7c2b7bd4a03\" (UID: \"c0a02759-f505-4e1c-ad20-d7c2b7bd4a03\") " Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.170727 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0a02759-f505-4e1c-ad20-d7c2b7bd4a03-logs" (OuterVolumeSpecName: "logs") pod "c0a02759-f505-4e1c-ad20-d7c2b7bd4a03" (UID: "c0a02759-f505-4e1c-ad20-d7c2b7bd4a03"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.189481 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0a02759-f505-4e1c-ad20-d7c2b7bd4a03-kube-api-access-562kx" (OuterVolumeSpecName: "kube-api-access-562kx") pod "c0a02759-f505-4e1c-ad20-d7c2b7bd4a03" (UID: "c0a02759-f505-4e1c-ad20-d7c2b7bd4a03"). InnerVolumeSpecName "kube-api-access-562kx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.202578 4971 scope.go:117] "RemoveContainer" containerID="26043e9a97f70a4db001fd498f0f7bf0182a5702409fc73df98877e2bf3b8b5d" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.207420 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0a02759-f505-4e1c-ad20-d7c2b7bd4a03-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c0a02759-f505-4e1c-ad20-d7c2b7bd4a03" (UID: "c0a02759-f505-4e1c-ad20-d7c2b7bd4a03"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.208815 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.219628 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0a02759-f505-4e1c-ad20-d7c2b7bd4a03-config-data" (OuterVolumeSpecName: "config-data") pod "c0a02759-f505-4e1c-ad20-d7c2b7bd4a03" (UID: "c0a02759-f505-4e1c-ad20-d7c2b7bd4a03"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.232781 4971 scope.go:117] "RemoveContainer" containerID="acd1b3bb060a3329f6548f899ba6d5bbf30135b64dfa6a4291b324203fb94aa7" Nov 27 07:18:04 crc kubenswrapper[4971]: E1127 07:18:04.233180 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"acd1b3bb060a3329f6548f899ba6d5bbf30135b64dfa6a4291b324203fb94aa7\": container with ID starting with acd1b3bb060a3329f6548f899ba6d5bbf30135b64dfa6a4291b324203fb94aa7 not found: ID does not exist" containerID="acd1b3bb060a3329f6548f899ba6d5bbf30135b64dfa6a4291b324203fb94aa7" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.233217 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acd1b3bb060a3329f6548f899ba6d5bbf30135b64dfa6a4291b324203fb94aa7"} err="failed to get container status \"acd1b3bb060a3329f6548f899ba6d5bbf30135b64dfa6a4291b324203fb94aa7\": rpc error: code = NotFound desc = could not find container \"acd1b3bb060a3329f6548f899ba6d5bbf30135b64dfa6a4291b324203fb94aa7\": container with ID starting with acd1b3bb060a3329f6548f899ba6d5bbf30135b64dfa6a4291b324203fb94aa7 not found: ID does not exist" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.233243 4971 scope.go:117] "RemoveContainer" containerID="26043e9a97f70a4db001fd498f0f7bf0182a5702409fc73df98877e2bf3b8b5d" Nov 27 07:18:04 crc kubenswrapper[4971]: E1127 07:18:04.233473 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"26043e9a97f70a4db001fd498f0f7bf0182a5702409fc73df98877e2bf3b8b5d\": container with ID starting with 26043e9a97f70a4db001fd498f0f7bf0182a5702409fc73df98877e2bf3b8b5d not found: ID does not exist" containerID="26043e9a97f70a4db001fd498f0f7bf0182a5702409fc73df98877e2bf3b8b5d" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.233497 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"26043e9a97f70a4db001fd498f0f7bf0182a5702409fc73df98877e2bf3b8b5d"} err="failed to get container status \"26043e9a97f70a4db001fd498f0f7bf0182a5702409fc73df98877e2bf3b8b5d\": rpc error: code = NotFound desc = could not find container \"26043e9a97f70a4db001fd498f0f7bf0182a5702409fc73df98877e2bf3b8b5d\": container with ID starting with 26043e9a97f70a4db001fd498f0f7bf0182a5702409fc73df98877e2bf3b8b5d not found: ID does not exist" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.250517 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0a02759-f505-4e1c-ad20-d7c2b7bd4a03-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "c0a02759-f505-4e1c-ad20-d7c2b7bd4a03" (UID: "c0a02759-f505-4e1c-ad20-d7c2b7bd4a03"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.268072 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0a02759-f505-4e1c-ad20-d7c2b7bd4a03-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "c0a02759-f505-4e1c-ad20-d7c2b7bd4a03" (UID: "c0a02759-f505-4e1c-ad20-d7c2b7bd4a03"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.272378 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0a02759-f505-4e1c-ad20-d7c2b7bd4a03-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.272417 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-562kx\" (UniqueName: \"kubernetes.io/projected/c0a02759-f505-4e1c-ad20-d7c2b7bd4a03-kube-api-access-562kx\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.272429 4971 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0a02759-f505-4e1c-ad20-d7c2b7bd4a03-logs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.272443 4971 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0a02759-f505-4e1c-ad20-d7c2b7bd4a03-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.272457 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0a02759-f505-4e1c-ad20-d7c2b7bd4a03-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.272470 4971 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0a02759-f505-4e1c-ad20-d7c2b7bd4a03-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.385930 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.545815 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.565194 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0195ada6-2a69-427f-8041-166dfdacf583" path="/var/lib/kubelet/pods/0195ada6-2a69-427f-8041-166dfdacf583/volumes" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.565924 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b7ff1aa-9bdb-4c11-9a97-bac98829a89f" path="/var/lib/kubelet/pods/2b7ff1aa-9bdb-4c11-9a97-bac98829a89f/volumes" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.567005 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.581957 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 27 07:18:04 crc kubenswrapper[4971]: E1127 07:18:04.582417 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0a02759-f505-4e1c-ad20-d7c2b7bd4a03" containerName="nova-api-log" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.582435 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0a02759-f505-4e1c-ad20-d7c2b7bd4a03" containerName="nova-api-log" Nov 27 07:18:04 crc kubenswrapper[4971]: E1127 07:18:04.582467 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0a02759-f505-4e1c-ad20-d7c2b7bd4a03" containerName="nova-api-api" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.582478 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0a02759-f505-4e1c-ad20-d7c2b7bd4a03" containerName="nova-api-api" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.582722 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0a02759-f505-4e1c-ad20-d7c2b7bd4a03" containerName="nova-api-api" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.582758 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0a02759-f505-4e1c-ad20-d7c2b7bd4a03" containerName="nova-api-log" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.583787 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.587100 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.587314 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.588148 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.613472 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.680830 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69d47892-79da-4e4e-8de2-a84801d4d6b9-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"69d47892-79da-4e4e-8de2-a84801d4d6b9\") " pod="openstack/nova-api-0" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.680896 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/69d47892-79da-4e4e-8de2-a84801d4d6b9-public-tls-certs\") pod \"nova-api-0\" (UID: \"69d47892-79da-4e4e-8de2-a84801d4d6b9\") " pod="openstack/nova-api-0" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.680923 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/69d47892-79da-4e4e-8de2-a84801d4d6b9-logs\") pod \"nova-api-0\" (UID: \"69d47892-79da-4e4e-8de2-a84801d4d6b9\") " pod="openstack/nova-api-0" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.680978 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-blnzp\" (UniqueName: \"kubernetes.io/projected/69d47892-79da-4e4e-8de2-a84801d4d6b9-kube-api-access-blnzp\") pod \"nova-api-0\" (UID: \"69d47892-79da-4e4e-8de2-a84801d4d6b9\") " pod="openstack/nova-api-0" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.681009 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69d47892-79da-4e4e-8de2-a84801d4d6b9-config-data\") pod \"nova-api-0\" (UID: \"69d47892-79da-4e4e-8de2-a84801d4d6b9\") " pod="openstack/nova-api-0" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.681091 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/69d47892-79da-4e4e-8de2-a84801d4d6b9-internal-tls-certs\") pod \"nova-api-0\" (UID: \"69d47892-79da-4e4e-8de2-a84801d4d6b9\") " pod="openstack/nova-api-0" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.785944 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69d47892-79da-4e4e-8de2-a84801d4d6b9-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"69d47892-79da-4e4e-8de2-a84801d4d6b9\") " pod="openstack/nova-api-0" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.786822 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/69d47892-79da-4e4e-8de2-a84801d4d6b9-public-tls-certs\") pod \"nova-api-0\" (UID: \"69d47892-79da-4e4e-8de2-a84801d4d6b9\") " pod="openstack/nova-api-0" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.786859 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/69d47892-79da-4e4e-8de2-a84801d4d6b9-logs\") pod \"nova-api-0\" (UID: \"69d47892-79da-4e4e-8de2-a84801d4d6b9\") " pod="openstack/nova-api-0" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.786941 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-blnzp\" (UniqueName: \"kubernetes.io/projected/69d47892-79da-4e4e-8de2-a84801d4d6b9-kube-api-access-blnzp\") pod \"nova-api-0\" (UID: \"69d47892-79da-4e4e-8de2-a84801d4d6b9\") " pod="openstack/nova-api-0" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.786983 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69d47892-79da-4e4e-8de2-a84801d4d6b9-config-data\") pod \"nova-api-0\" (UID: \"69d47892-79da-4e4e-8de2-a84801d4d6b9\") " pod="openstack/nova-api-0" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.787097 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/69d47892-79da-4e4e-8de2-a84801d4d6b9-internal-tls-certs\") pod \"nova-api-0\" (UID: \"69d47892-79da-4e4e-8de2-a84801d4d6b9\") " pod="openstack/nova-api-0" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.788809 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/69d47892-79da-4e4e-8de2-a84801d4d6b9-logs\") pod \"nova-api-0\" (UID: \"69d47892-79da-4e4e-8de2-a84801d4d6b9\") " pod="openstack/nova-api-0" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.793243 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/69d47892-79da-4e4e-8de2-a84801d4d6b9-public-tls-certs\") pod \"nova-api-0\" (UID: \"69d47892-79da-4e4e-8de2-a84801d4d6b9\") " pod="openstack/nova-api-0" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.797134 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69d47892-79da-4e4e-8de2-a84801d4d6b9-config-data\") pod \"nova-api-0\" (UID: \"69d47892-79da-4e4e-8de2-a84801d4d6b9\") " pod="openstack/nova-api-0" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.800041 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69d47892-79da-4e4e-8de2-a84801d4d6b9-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"69d47892-79da-4e4e-8de2-a84801d4d6b9\") " pod="openstack/nova-api-0" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.801614 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/69d47892-79da-4e4e-8de2-a84801d4d6b9-internal-tls-certs\") pod \"nova-api-0\" (UID: \"69d47892-79da-4e4e-8de2-a84801d4d6b9\") " pod="openstack/nova-api-0" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.826619 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-blnzp\" (UniqueName: \"kubernetes.io/projected/69d47892-79da-4e4e-8de2-a84801d4d6b9-kube-api-access-blnzp\") pod \"nova-api-0\" (UID: \"69d47892-79da-4e4e-8de2-a84801d4d6b9\") " pod="openstack/nova-api-0" Nov 27 07:18:04 crc kubenswrapper[4971]: I1127 07:18:04.906359 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 27 07:18:05 crc kubenswrapper[4971]: I1127 07:18:05.175990 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"36cda10d-7f8f-403c-82b8-fbbdf89e8ed5","Type":"ContainerStarted","Data":"b97556ba2fd93384c3dcc5c7577bad7f95a4205ce2fc105bf6f734f22b25ff2a"} Nov 27 07:18:05 crc kubenswrapper[4971]: I1127 07:18:05.176314 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"36cda10d-7f8f-403c-82b8-fbbdf89e8ed5","Type":"ContainerStarted","Data":"0ee8e4cedf669da3fc4b843f49bd5320748a375994ea141b5f0843e3240ccf4b"} Nov 27 07:18:05 crc kubenswrapper[4971]: I1127 07:18:05.179018 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"35b90587-df5b-4f15-8c34-f1b0a8506d85","Type":"ContainerStarted","Data":"c581d4d83b4f2a55778d440cd3c9dd25c65d57ded13db9b0fc5dd5a6570dbd13"} Nov 27 07:18:05 crc kubenswrapper[4971]: I1127 07:18:05.179077 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"35b90587-df5b-4f15-8c34-f1b0a8506d85","Type":"ContainerStarted","Data":"9889def6b44901ab96b7e37483508d92be73fc8357e22d9f41857a9c610d8c0a"} Nov 27 07:18:05 crc kubenswrapper[4971]: I1127 07:18:05.179092 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"35b90587-df5b-4f15-8c34-f1b0a8506d85","Type":"ContainerStarted","Data":"1c2a4e6850f940a70a6eff35d37aea64d1c56493a7927207282006b381b5fd35"} Nov 27 07:18:05 crc kubenswrapper[4971]: I1127 07:18:05.200068 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.200043506 podStartE2EDuration="2.200043506s" podCreationTimestamp="2025-11-27 07:18:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:18:05.194342013 +0000 UTC m=+1523.386385951" watchObservedRunningTime="2025-11-27 07:18:05.200043506 +0000 UTC m=+1523.392087444" Nov 27 07:18:05 crc kubenswrapper[4971]: I1127 07:18:05.223633 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.223614762 podStartE2EDuration="2.223614762s" podCreationTimestamp="2025-11-27 07:18:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:18:05.210491998 +0000 UTC m=+1523.402535936" watchObservedRunningTime="2025-11-27 07:18:05.223614762 +0000 UTC m=+1523.415658680" Nov 27 07:18:05 crc kubenswrapper[4971]: I1127 07:18:05.376718 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 27 07:18:05 crc kubenswrapper[4971]: W1127 07:18:05.379443 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod69d47892_79da_4e4e_8de2_a84801d4d6b9.slice/crio-f5a480690dc6e4f0b04176d5578489243dc597f63a060eb82d971a0d3e3ccd17 WatchSource:0}: Error finding container f5a480690dc6e4f0b04176d5578489243dc597f63a060eb82d971a0d3e3ccd17: Status 404 returned error can't find the container with id f5a480690dc6e4f0b04176d5578489243dc597f63a060eb82d971a0d3e3ccd17 Nov 27 07:18:06 crc kubenswrapper[4971]: I1127 07:18:06.192953 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"69d47892-79da-4e4e-8de2-a84801d4d6b9","Type":"ContainerStarted","Data":"e6ecd3fddd03d9acdb8874669eb4ae87d6f12ad1116ff9c9cbf2e034df95e87e"} Nov 27 07:18:06 crc kubenswrapper[4971]: I1127 07:18:06.193335 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"69d47892-79da-4e4e-8de2-a84801d4d6b9","Type":"ContainerStarted","Data":"bf254fe08bcc7d4264d54d1c7f653dc9ece6ef4aa57f2cd553f285b6b06f6503"} Nov 27 07:18:06 crc kubenswrapper[4971]: I1127 07:18:06.193350 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"69d47892-79da-4e4e-8de2-a84801d4d6b9","Type":"ContainerStarted","Data":"f5a480690dc6e4f0b04176d5578489243dc597f63a060eb82d971a0d3e3ccd17"} Nov 27 07:18:06 crc kubenswrapper[4971]: I1127 07:18:06.242413 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.242393821 podStartE2EDuration="2.242393821s" podCreationTimestamp="2025-11-27 07:18:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:18:06.210973925 +0000 UTC m=+1524.403017883" watchObservedRunningTime="2025-11-27 07:18:06.242393821 +0000 UTC m=+1524.434437729" Nov 27 07:18:06 crc kubenswrapper[4971]: I1127 07:18:06.571853 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0a02759-f505-4e1c-ad20-d7c2b7bd4a03" path="/var/lib/kubelet/pods/c0a02759-f505-4e1c-ad20-d7c2b7bd4a03/volumes" Nov 27 07:18:08 crc kubenswrapper[4971]: I1127 07:18:08.588563 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Nov 27 07:18:08 crc kubenswrapper[4971]: I1127 07:18:08.686199 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-dmvjh" Nov 27 07:18:08 crc kubenswrapper[4971]: I1127 07:18:08.686247 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-dmvjh" Nov 27 07:18:08 crc kubenswrapper[4971]: I1127 07:18:08.738109 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-dmvjh" Nov 27 07:18:08 crc kubenswrapper[4971]: I1127 07:18:08.884080 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-k7z68" Nov 27 07:18:08 crc kubenswrapper[4971]: I1127 07:18:08.886365 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 27 07:18:08 crc kubenswrapper[4971]: I1127 07:18:08.886626 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 27 07:18:08 crc kubenswrapper[4971]: I1127 07:18:08.932288 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-k7z68" Nov 27 07:18:09 crc kubenswrapper[4971]: I1127 07:18:09.316766 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-dmvjh" Nov 27 07:18:10 crc kubenswrapper[4971]: I1127 07:18:10.333582 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-k7z68"] Nov 27 07:18:10 crc kubenswrapper[4971]: I1127 07:18:10.334094 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-k7z68" podUID="ce5e91d8-51b9-4c86-85aa-3d78b68c2e23" containerName="registry-server" containerID="cri-o://794da1a3241b802c1829cd89faaa822c91072e57e9db1b66ade83b481438c498" gracePeriod=2 Nov 27 07:18:10 crc kubenswrapper[4971]: I1127 07:18:10.828003 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k7z68" Nov 27 07:18:11 crc kubenswrapper[4971]: I1127 07:18:11.013002 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w8gqt\" (UniqueName: \"kubernetes.io/projected/ce5e91d8-51b9-4c86-85aa-3d78b68c2e23-kube-api-access-w8gqt\") pod \"ce5e91d8-51b9-4c86-85aa-3d78b68c2e23\" (UID: \"ce5e91d8-51b9-4c86-85aa-3d78b68c2e23\") " Nov 27 07:18:11 crc kubenswrapper[4971]: I1127 07:18:11.013227 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce5e91d8-51b9-4c86-85aa-3d78b68c2e23-catalog-content\") pod \"ce5e91d8-51b9-4c86-85aa-3d78b68c2e23\" (UID: \"ce5e91d8-51b9-4c86-85aa-3d78b68c2e23\") " Nov 27 07:18:11 crc kubenswrapper[4971]: I1127 07:18:11.013288 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce5e91d8-51b9-4c86-85aa-3d78b68c2e23-utilities\") pod \"ce5e91d8-51b9-4c86-85aa-3d78b68c2e23\" (UID: \"ce5e91d8-51b9-4c86-85aa-3d78b68c2e23\") " Nov 27 07:18:11 crc kubenswrapper[4971]: I1127 07:18:11.014193 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce5e91d8-51b9-4c86-85aa-3d78b68c2e23-utilities" (OuterVolumeSpecName: "utilities") pod "ce5e91d8-51b9-4c86-85aa-3d78b68c2e23" (UID: "ce5e91d8-51b9-4c86-85aa-3d78b68c2e23"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:18:11 crc kubenswrapper[4971]: I1127 07:18:11.019143 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce5e91d8-51b9-4c86-85aa-3d78b68c2e23-kube-api-access-w8gqt" (OuterVolumeSpecName: "kube-api-access-w8gqt") pod "ce5e91d8-51b9-4c86-85aa-3d78b68c2e23" (UID: "ce5e91d8-51b9-4c86-85aa-3d78b68c2e23"). InnerVolumeSpecName "kube-api-access-w8gqt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:11 crc kubenswrapper[4971]: I1127 07:18:11.110389 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce5e91d8-51b9-4c86-85aa-3d78b68c2e23-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ce5e91d8-51b9-4c86-85aa-3d78b68c2e23" (UID: "ce5e91d8-51b9-4c86-85aa-3d78b68c2e23"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:18:11 crc kubenswrapper[4971]: I1127 07:18:11.116739 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce5e91d8-51b9-4c86-85aa-3d78b68c2e23-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:11 crc kubenswrapper[4971]: I1127 07:18:11.116793 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w8gqt\" (UniqueName: \"kubernetes.io/projected/ce5e91d8-51b9-4c86-85aa-3d78b68c2e23-kube-api-access-w8gqt\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:11 crc kubenswrapper[4971]: I1127 07:18:11.116809 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce5e91d8-51b9-4c86-85aa-3d78b68c2e23-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:11 crc kubenswrapper[4971]: I1127 07:18:11.247091 4971 generic.go:334] "Generic (PLEG): container finished" podID="ce5e91d8-51b9-4c86-85aa-3d78b68c2e23" containerID="794da1a3241b802c1829cd89faaa822c91072e57e9db1b66ade83b481438c498" exitCode=0 Nov 27 07:18:11 crc kubenswrapper[4971]: I1127 07:18:11.247149 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k7z68" Nov 27 07:18:11 crc kubenswrapper[4971]: I1127 07:18:11.247180 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k7z68" event={"ID":"ce5e91d8-51b9-4c86-85aa-3d78b68c2e23","Type":"ContainerDied","Data":"794da1a3241b802c1829cd89faaa822c91072e57e9db1b66ade83b481438c498"} Nov 27 07:18:11 crc kubenswrapper[4971]: I1127 07:18:11.247237 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k7z68" event={"ID":"ce5e91d8-51b9-4c86-85aa-3d78b68c2e23","Type":"ContainerDied","Data":"a27e4dc636c997bc3820f4e772856f9f9241daebde4aba60e9fcd1ecc51f9d8d"} Nov 27 07:18:11 crc kubenswrapper[4971]: I1127 07:18:11.247264 4971 scope.go:117] "RemoveContainer" containerID="794da1a3241b802c1829cd89faaa822c91072e57e9db1b66ade83b481438c498" Nov 27 07:18:11 crc kubenswrapper[4971]: I1127 07:18:11.280886 4971 scope.go:117] "RemoveContainer" containerID="a30fd31a8f0a9d180a2532c1c651736becb00ce80e5f048de979d4958545ca3a" Nov 27 07:18:11 crc kubenswrapper[4971]: I1127 07:18:11.284068 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-k7z68"] Nov 27 07:18:11 crc kubenswrapper[4971]: I1127 07:18:11.293629 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-k7z68"] Nov 27 07:18:11 crc kubenswrapper[4971]: I1127 07:18:11.304479 4971 scope.go:117] "RemoveContainer" containerID="c204b0c7313f8addf18fb4df4db55222a083f76eddd95340d8ff4a9ee468f9fe" Nov 27 07:18:11 crc kubenswrapper[4971]: I1127 07:18:11.357893 4971 scope.go:117] "RemoveContainer" containerID="794da1a3241b802c1829cd89faaa822c91072e57e9db1b66ade83b481438c498" Nov 27 07:18:11 crc kubenswrapper[4971]: E1127 07:18:11.358521 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"794da1a3241b802c1829cd89faaa822c91072e57e9db1b66ade83b481438c498\": container with ID starting with 794da1a3241b802c1829cd89faaa822c91072e57e9db1b66ade83b481438c498 not found: ID does not exist" containerID="794da1a3241b802c1829cd89faaa822c91072e57e9db1b66ade83b481438c498" Nov 27 07:18:11 crc kubenswrapper[4971]: I1127 07:18:11.358641 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"794da1a3241b802c1829cd89faaa822c91072e57e9db1b66ade83b481438c498"} err="failed to get container status \"794da1a3241b802c1829cd89faaa822c91072e57e9db1b66ade83b481438c498\": rpc error: code = NotFound desc = could not find container \"794da1a3241b802c1829cd89faaa822c91072e57e9db1b66ade83b481438c498\": container with ID starting with 794da1a3241b802c1829cd89faaa822c91072e57e9db1b66ade83b481438c498 not found: ID does not exist" Nov 27 07:18:11 crc kubenswrapper[4971]: I1127 07:18:11.358681 4971 scope.go:117] "RemoveContainer" containerID="a30fd31a8f0a9d180a2532c1c651736becb00ce80e5f048de979d4958545ca3a" Nov 27 07:18:11 crc kubenswrapper[4971]: E1127 07:18:11.359337 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a30fd31a8f0a9d180a2532c1c651736becb00ce80e5f048de979d4958545ca3a\": container with ID starting with a30fd31a8f0a9d180a2532c1c651736becb00ce80e5f048de979d4958545ca3a not found: ID does not exist" containerID="a30fd31a8f0a9d180a2532c1c651736becb00ce80e5f048de979d4958545ca3a" Nov 27 07:18:11 crc kubenswrapper[4971]: I1127 07:18:11.359374 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a30fd31a8f0a9d180a2532c1c651736becb00ce80e5f048de979d4958545ca3a"} err="failed to get container status \"a30fd31a8f0a9d180a2532c1c651736becb00ce80e5f048de979d4958545ca3a\": rpc error: code = NotFound desc = could not find container \"a30fd31a8f0a9d180a2532c1c651736becb00ce80e5f048de979d4958545ca3a\": container with ID starting with a30fd31a8f0a9d180a2532c1c651736becb00ce80e5f048de979d4958545ca3a not found: ID does not exist" Nov 27 07:18:11 crc kubenswrapper[4971]: I1127 07:18:11.359404 4971 scope.go:117] "RemoveContainer" containerID="c204b0c7313f8addf18fb4df4db55222a083f76eddd95340d8ff4a9ee468f9fe" Nov 27 07:18:11 crc kubenswrapper[4971]: E1127 07:18:11.359923 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c204b0c7313f8addf18fb4df4db55222a083f76eddd95340d8ff4a9ee468f9fe\": container with ID starting with c204b0c7313f8addf18fb4df4db55222a083f76eddd95340d8ff4a9ee468f9fe not found: ID does not exist" containerID="c204b0c7313f8addf18fb4df4db55222a083f76eddd95340d8ff4a9ee468f9fe" Nov 27 07:18:11 crc kubenswrapper[4971]: I1127 07:18:11.359971 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c204b0c7313f8addf18fb4df4db55222a083f76eddd95340d8ff4a9ee468f9fe"} err="failed to get container status \"c204b0c7313f8addf18fb4df4db55222a083f76eddd95340d8ff4a9ee468f9fe\": rpc error: code = NotFound desc = could not find container \"c204b0c7313f8addf18fb4df4db55222a083f76eddd95340d8ff4a9ee468f9fe\": container with ID starting with c204b0c7313f8addf18fb4df4db55222a083f76eddd95340d8ff4a9ee468f9fe not found: ID does not exist" Nov 27 07:18:11 crc kubenswrapper[4971]: I1127 07:18:11.735974 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dmvjh"] Nov 27 07:18:11 crc kubenswrapper[4971]: I1127 07:18:11.737109 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-dmvjh" podUID="5d86569d-88c3-4ae0-9261-affa354b038d" containerName="registry-server" containerID="cri-o://9fc2f4619499ea4dba496e222ee218aa3e14e7f44bd1003b6ac22d3f7d010aea" gracePeriod=2 Nov 27 07:18:12 crc kubenswrapper[4971]: I1127 07:18:12.215065 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dmvjh" Nov 27 07:18:12 crc kubenswrapper[4971]: I1127 07:18:12.274749 4971 generic.go:334] "Generic (PLEG): container finished" podID="5d86569d-88c3-4ae0-9261-affa354b038d" containerID="9fc2f4619499ea4dba496e222ee218aa3e14e7f44bd1003b6ac22d3f7d010aea" exitCode=0 Nov 27 07:18:12 crc kubenswrapper[4971]: I1127 07:18:12.274795 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dmvjh" event={"ID":"5d86569d-88c3-4ae0-9261-affa354b038d","Type":"ContainerDied","Data":"9fc2f4619499ea4dba496e222ee218aa3e14e7f44bd1003b6ac22d3f7d010aea"} Nov 27 07:18:12 crc kubenswrapper[4971]: I1127 07:18:12.274818 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dmvjh" event={"ID":"5d86569d-88c3-4ae0-9261-affa354b038d","Type":"ContainerDied","Data":"40d748253ee5190170fc8dd898f0251deaeb7bb5e454dca2188274ed2b3ad1f7"} Nov 27 07:18:12 crc kubenswrapper[4971]: I1127 07:18:12.274834 4971 scope.go:117] "RemoveContainer" containerID="9fc2f4619499ea4dba496e222ee218aa3e14e7f44bd1003b6ac22d3f7d010aea" Nov 27 07:18:12 crc kubenswrapper[4971]: I1127 07:18:12.274981 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dmvjh" Nov 27 07:18:12 crc kubenswrapper[4971]: I1127 07:18:12.304375 4971 scope.go:117] "RemoveContainer" containerID="157c9a1f7be93fef1701d4f01336c177b13478b95c2a1a1425b285d3d31e589c" Nov 27 07:18:12 crc kubenswrapper[4971]: I1127 07:18:12.333801 4971 scope.go:117] "RemoveContainer" containerID="78350e242cebc9d5ceece552a607bfa10b3679b3da8c2e88fdbc92b04b1d78c6" Nov 27 07:18:12 crc kubenswrapper[4971]: I1127 07:18:12.371572 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d86569d-88c3-4ae0-9261-affa354b038d-catalog-content\") pod \"5d86569d-88c3-4ae0-9261-affa354b038d\" (UID: \"5d86569d-88c3-4ae0-9261-affa354b038d\") " Nov 27 07:18:12 crc kubenswrapper[4971]: I1127 07:18:12.371826 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d86569d-88c3-4ae0-9261-affa354b038d-utilities\") pod \"5d86569d-88c3-4ae0-9261-affa354b038d\" (UID: \"5d86569d-88c3-4ae0-9261-affa354b038d\") " Nov 27 07:18:12 crc kubenswrapper[4971]: I1127 07:18:12.371923 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-52cgl\" (UniqueName: \"kubernetes.io/projected/5d86569d-88c3-4ae0-9261-affa354b038d-kube-api-access-52cgl\") pod \"5d86569d-88c3-4ae0-9261-affa354b038d\" (UID: \"5d86569d-88c3-4ae0-9261-affa354b038d\") " Nov 27 07:18:12 crc kubenswrapper[4971]: I1127 07:18:12.372036 4971 scope.go:117] "RemoveContainer" containerID="9fc2f4619499ea4dba496e222ee218aa3e14e7f44bd1003b6ac22d3f7d010aea" Nov 27 07:18:12 crc kubenswrapper[4971]: I1127 07:18:12.373129 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d86569d-88c3-4ae0-9261-affa354b038d-utilities" (OuterVolumeSpecName: "utilities") pod "5d86569d-88c3-4ae0-9261-affa354b038d" (UID: "5d86569d-88c3-4ae0-9261-affa354b038d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:18:12 crc kubenswrapper[4971]: E1127 07:18:12.373222 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9fc2f4619499ea4dba496e222ee218aa3e14e7f44bd1003b6ac22d3f7d010aea\": container with ID starting with 9fc2f4619499ea4dba496e222ee218aa3e14e7f44bd1003b6ac22d3f7d010aea not found: ID does not exist" containerID="9fc2f4619499ea4dba496e222ee218aa3e14e7f44bd1003b6ac22d3f7d010aea" Nov 27 07:18:12 crc kubenswrapper[4971]: I1127 07:18:12.373255 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9fc2f4619499ea4dba496e222ee218aa3e14e7f44bd1003b6ac22d3f7d010aea"} err="failed to get container status \"9fc2f4619499ea4dba496e222ee218aa3e14e7f44bd1003b6ac22d3f7d010aea\": rpc error: code = NotFound desc = could not find container \"9fc2f4619499ea4dba496e222ee218aa3e14e7f44bd1003b6ac22d3f7d010aea\": container with ID starting with 9fc2f4619499ea4dba496e222ee218aa3e14e7f44bd1003b6ac22d3f7d010aea not found: ID does not exist" Nov 27 07:18:12 crc kubenswrapper[4971]: I1127 07:18:12.373277 4971 scope.go:117] "RemoveContainer" containerID="157c9a1f7be93fef1701d4f01336c177b13478b95c2a1a1425b285d3d31e589c" Nov 27 07:18:12 crc kubenswrapper[4971]: E1127 07:18:12.374310 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"157c9a1f7be93fef1701d4f01336c177b13478b95c2a1a1425b285d3d31e589c\": container with ID starting with 157c9a1f7be93fef1701d4f01336c177b13478b95c2a1a1425b285d3d31e589c not found: ID does not exist" containerID="157c9a1f7be93fef1701d4f01336c177b13478b95c2a1a1425b285d3d31e589c" Nov 27 07:18:12 crc kubenswrapper[4971]: I1127 07:18:12.374338 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"157c9a1f7be93fef1701d4f01336c177b13478b95c2a1a1425b285d3d31e589c"} err="failed to get container status \"157c9a1f7be93fef1701d4f01336c177b13478b95c2a1a1425b285d3d31e589c\": rpc error: code = NotFound desc = could not find container \"157c9a1f7be93fef1701d4f01336c177b13478b95c2a1a1425b285d3d31e589c\": container with ID starting with 157c9a1f7be93fef1701d4f01336c177b13478b95c2a1a1425b285d3d31e589c not found: ID does not exist" Nov 27 07:18:12 crc kubenswrapper[4971]: I1127 07:18:12.374361 4971 scope.go:117] "RemoveContainer" containerID="78350e242cebc9d5ceece552a607bfa10b3679b3da8c2e88fdbc92b04b1d78c6" Nov 27 07:18:12 crc kubenswrapper[4971]: E1127 07:18:12.374703 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"78350e242cebc9d5ceece552a607bfa10b3679b3da8c2e88fdbc92b04b1d78c6\": container with ID starting with 78350e242cebc9d5ceece552a607bfa10b3679b3da8c2e88fdbc92b04b1d78c6 not found: ID does not exist" containerID="78350e242cebc9d5ceece552a607bfa10b3679b3da8c2e88fdbc92b04b1d78c6" Nov 27 07:18:12 crc kubenswrapper[4971]: I1127 07:18:12.374729 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78350e242cebc9d5ceece552a607bfa10b3679b3da8c2e88fdbc92b04b1d78c6"} err="failed to get container status \"78350e242cebc9d5ceece552a607bfa10b3679b3da8c2e88fdbc92b04b1d78c6\": rpc error: code = NotFound desc = could not find container \"78350e242cebc9d5ceece552a607bfa10b3679b3da8c2e88fdbc92b04b1d78c6\": container with ID starting with 78350e242cebc9d5ceece552a607bfa10b3679b3da8c2e88fdbc92b04b1d78c6 not found: ID does not exist" Nov 27 07:18:12 crc kubenswrapper[4971]: I1127 07:18:12.375458 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d86569d-88c3-4ae0-9261-affa354b038d-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:12 crc kubenswrapper[4971]: I1127 07:18:12.379556 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d86569d-88c3-4ae0-9261-affa354b038d-kube-api-access-52cgl" (OuterVolumeSpecName: "kube-api-access-52cgl") pod "5d86569d-88c3-4ae0-9261-affa354b038d" (UID: "5d86569d-88c3-4ae0-9261-affa354b038d"). InnerVolumeSpecName "kube-api-access-52cgl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:12 crc kubenswrapper[4971]: I1127 07:18:12.419255 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d86569d-88c3-4ae0-9261-affa354b038d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5d86569d-88c3-4ae0-9261-affa354b038d" (UID: "5d86569d-88c3-4ae0-9261-affa354b038d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:18:12 crc kubenswrapper[4971]: I1127 07:18:12.477236 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-52cgl\" (UniqueName: \"kubernetes.io/projected/5d86569d-88c3-4ae0-9261-affa354b038d-kube-api-access-52cgl\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:12 crc kubenswrapper[4971]: I1127 07:18:12.477276 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d86569d-88c3-4ae0-9261-affa354b038d-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:12 crc kubenswrapper[4971]: I1127 07:18:12.562506 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce5e91d8-51b9-4c86-85aa-3d78b68c2e23" path="/var/lib/kubelet/pods/ce5e91d8-51b9-4c86-85aa-3d78b68c2e23/volumes" Nov 27 07:18:12 crc kubenswrapper[4971]: I1127 07:18:12.604147 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dmvjh"] Nov 27 07:18:12 crc kubenswrapper[4971]: I1127 07:18:12.612385 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-dmvjh"] Nov 27 07:18:13 crc kubenswrapper[4971]: I1127 07:18:13.589238 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Nov 27 07:18:13 crc kubenswrapper[4971]: I1127 07:18:13.620418 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Nov 27 07:18:13 crc kubenswrapper[4971]: I1127 07:18:13.886446 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 27 07:18:13 crc kubenswrapper[4971]: I1127 07:18:13.886503 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 27 07:18:14 crc kubenswrapper[4971]: I1127 07:18:14.323865 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Nov 27 07:18:14 crc kubenswrapper[4971]: I1127 07:18:14.563405 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d86569d-88c3-4ae0-9261-affa354b038d" path="/var/lib/kubelet/pods/5d86569d-88c3-4ae0-9261-affa354b038d/volumes" Nov 27 07:18:14 crc kubenswrapper[4971]: I1127 07:18:14.901775 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="35b90587-df5b-4f15-8c34-f1b0a8506d85" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.205:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Nov 27 07:18:14 crc kubenswrapper[4971]: I1127 07:18:14.901814 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="35b90587-df5b-4f15-8c34-f1b0a8506d85" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.205:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Nov 27 07:18:14 crc kubenswrapper[4971]: I1127 07:18:14.907169 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 27 07:18:14 crc kubenswrapper[4971]: I1127 07:18:14.907221 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 27 07:18:15 crc kubenswrapper[4971]: I1127 07:18:15.917684 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="69d47892-79da-4e4e-8de2-a84801d4d6b9" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.206:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Nov 27 07:18:15 crc kubenswrapper[4971]: I1127 07:18:15.917759 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="69d47892-79da-4e4e-8de2-a84801d4d6b9" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.206:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Nov 27 07:18:17 crc kubenswrapper[4971]: I1127 07:18:17.622825 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Nov 27 07:18:23 crc kubenswrapper[4971]: I1127 07:18:23.893079 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 27 07:18:23 crc kubenswrapper[4971]: I1127 07:18:23.893725 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 27 07:18:23 crc kubenswrapper[4971]: I1127 07:18:23.898740 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 27 07:18:23 crc kubenswrapper[4971]: I1127 07:18:23.898874 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 27 07:18:24 crc kubenswrapper[4971]: I1127 07:18:24.916118 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 27 07:18:24 crc kubenswrapper[4971]: I1127 07:18:24.917090 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 27 07:18:24 crc kubenswrapper[4971]: I1127 07:18:24.918743 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 27 07:18:24 crc kubenswrapper[4971]: I1127 07:18:24.923166 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 27 07:18:25 crc kubenswrapper[4971]: I1127 07:18:25.393773 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 27 07:18:25 crc kubenswrapper[4971]: I1127 07:18:25.410139 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 27 07:18:26 crc kubenswrapper[4971]: I1127 07:18:26.413800 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 07:18:26 crc kubenswrapper[4971]: I1127 07:18:26.413887 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 07:18:26 crc kubenswrapper[4971]: I1127 07:18:26.413974 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 07:18:26 crc kubenswrapper[4971]: I1127 07:18:26.414683 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0cad2c6a32c384b5b56d689d3a634854a392f78c534cb53e6b7da7260dd94a04"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 07:18:26 crc kubenswrapper[4971]: I1127 07:18:26.414771 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://0cad2c6a32c384b5b56d689d3a634854a392f78c534cb53e6b7da7260dd94a04" gracePeriod=600 Nov 27 07:18:26 crc kubenswrapper[4971]: E1127 07:18:26.551269 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:18:27 crc kubenswrapper[4971]: I1127 07:18:27.414723 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="0cad2c6a32c384b5b56d689d3a634854a392f78c534cb53e6b7da7260dd94a04" exitCode=0 Nov 27 07:18:27 crc kubenswrapper[4971]: I1127 07:18:27.414801 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"0cad2c6a32c384b5b56d689d3a634854a392f78c534cb53e6b7da7260dd94a04"} Nov 27 07:18:27 crc kubenswrapper[4971]: I1127 07:18:27.415326 4971 scope.go:117] "RemoveContainer" containerID="5137c970e6cdfd6e2022087bc7914ffa98a593d65bc63eec37cd087c8fcec32d" Nov 27 07:18:27 crc kubenswrapper[4971]: I1127 07:18:27.416202 4971 scope.go:117] "RemoveContainer" containerID="0cad2c6a32c384b5b56d689d3a634854a392f78c534cb53e6b7da7260dd94a04" Nov 27 07:18:27 crc kubenswrapper[4971]: E1127 07:18:27.416488 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:18:32 crc kubenswrapper[4971]: I1127 07:18:32.736284 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-v4r7q"] Nov 27 07:18:32 crc kubenswrapper[4971]: E1127 07:18:32.737116 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce5e91d8-51b9-4c86-85aa-3d78b68c2e23" containerName="registry-server" Nov 27 07:18:32 crc kubenswrapper[4971]: I1127 07:18:32.737135 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce5e91d8-51b9-4c86-85aa-3d78b68c2e23" containerName="registry-server" Nov 27 07:18:32 crc kubenswrapper[4971]: E1127 07:18:32.737149 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce5e91d8-51b9-4c86-85aa-3d78b68c2e23" containerName="extract-utilities" Nov 27 07:18:32 crc kubenswrapper[4971]: I1127 07:18:32.737158 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce5e91d8-51b9-4c86-85aa-3d78b68c2e23" containerName="extract-utilities" Nov 27 07:18:32 crc kubenswrapper[4971]: E1127 07:18:32.737174 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d86569d-88c3-4ae0-9261-affa354b038d" containerName="extract-utilities" Nov 27 07:18:32 crc kubenswrapper[4971]: I1127 07:18:32.737183 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d86569d-88c3-4ae0-9261-affa354b038d" containerName="extract-utilities" Nov 27 07:18:32 crc kubenswrapper[4971]: E1127 07:18:32.737222 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d86569d-88c3-4ae0-9261-affa354b038d" containerName="extract-content" Nov 27 07:18:32 crc kubenswrapper[4971]: I1127 07:18:32.737230 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d86569d-88c3-4ae0-9261-affa354b038d" containerName="extract-content" Nov 27 07:18:32 crc kubenswrapper[4971]: E1127 07:18:32.737253 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce5e91d8-51b9-4c86-85aa-3d78b68c2e23" containerName="extract-content" Nov 27 07:18:32 crc kubenswrapper[4971]: I1127 07:18:32.737261 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce5e91d8-51b9-4c86-85aa-3d78b68c2e23" containerName="extract-content" Nov 27 07:18:32 crc kubenswrapper[4971]: E1127 07:18:32.737277 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d86569d-88c3-4ae0-9261-affa354b038d" containerName="registry-server" Nov 27 07:18:32 crc kubenswrapper[4971]: I1127 07:18:32.737284 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d86569d-88c3-4ae0-9261-affa354b038d" containerName="registry-server" Nov 27 07:18:32 crc kubenswrapper[4971]: I1127 07:18:32.737518 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d86569d-88c3-4ae0-9261-affa354b038d" containerName="registry-server" Nov 27 07:18:32 crc kubenswrapper[4971]: I1127 07:18:32.737564 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce5e91d8-51b9-4c86-85aa-3d78b68c2e23" containerName="registry-server" Nov 27 07:18:32 crc kubenswrapper[4971]: I1127 07:18:32.739327 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-v4r7q" Nov 27 07:18:32 crc kubenswrapper[4971]: I1127 07:18:32.750349 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-v4r7q"] Nov 27 07:18:32 crc kubenswrapper[4971]: I1127 07:18:32.857103 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v5bb8\" (UniqueName: \"kubernetes.io/projected/fabf66ef-ae78-4497-998d-95abd13bbab8-kube-api-access-v5bb8\") pod \"certified-operators-v4r7q\" (UID: \"fabf66ef-ae78-4497-998d-95abd13bbab8\") " pod="openshift-marketplace/certified-operators-v4r7q" Nov 27 07:18:32 crc kubenswrapper[4971]: I1127 07:18:32.857231 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fabf66ef-ae78-4497-998d-95abd13bbab8-catalog-content\") pod \"certified-operators-v4r7q\" (UID: \"fabf66ef-ae78-4497-998d-95abd13bbab8\") " pod="openshift-marketplace/certified-operators-v4r7q" Nov 27 07:18:32 crc kubenswrapper[4971]: I1127 07:18:32.857287 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fabf66ef-ae78-4497-998d-95abd13bbab8-utilities\") pod \"certified-operators-v4r7q\" (UID: \"fabf66ef-ae78-4497-998d-95abd13bbab8\") " pod="openshift-marketplace/certified-operators-v4r7q" Nov 27 07:18:32 crc kubenswrapper[4971]: I1127 07:18:32.959843 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v5bb8\" (UniqueName: \"kubernetes.io/projected/fabf66ef-ae78-4497-998d-95abd13bbab8-kube-api-access-v5bb8\") pod \"certified-operators-v4r7q\" (UID: \"fabf66ef-ae78-4497-998d-95abd13bbab8\") " pod="openshift-marketplace/certified-operators-v4r7q" Nov 27 07:18:32 crc kubenswrapper[4971]: I1127 07:18:32.959907 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fabf66ef-ae78-4497-998d-95abd13bbab8-catalog-content\") pod \"certified-operators-v4r7q\" (UID: \"fabf66ef-ae78-4497-998d-95abd13bbab8\") " pod="openshift-marketplace/certified-operators-v4r7q" Nov 27 07:18:32 crc kubenswrapper[4971]: I1127 07:18:32.959942 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fabf66ef-ae78-4497-998d-95abd13bbab8-utilities\") pod \"certified-operators-v4r7q\" (UID: \"fabf66ef-ae78-4497-998d-95abd13bbab8\") " pod="openshift-marketplace/certified-operators-v4r7q" Nov 27 07:18:32 crc kubenswrapper[4971]: I1127 07:18:32.960418 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fabf66ef-ae78-4497-998d-95abd13bbab8-utilities\") pod \"certified-operators-v4r7q\" (UID: \"fabf66ef-ae78-4497-998d-95abd13bbab8\") " pod="openshift-marketplace/certified-operators-v4r7q" Nov 27 07:18:32 crc kubenswrapper[4971]: I1127 07:18:32.960578 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fabf66ef-ae78-4497-998d-95abd13bbab8-catalog-content\") pod \"certified-operators-v4r7q\" (UID: \"fabf66ef-ae78-4497-998d-95abd13bbab8\") " pod="openshift-marketplace/certified-operators-v4r7q" Nov 27 07:18:32 crc kubenswrapper[4971]: I1127 07:18:32.990851 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v5bb8\" (UniqueName: \"kubernetes.io/projected/fabf66ef-ae78-4497-998d-95abd13bbab8-kube-api-access-v5bb8\") pod \"certified-operators-v4r7q\" (UID: \"fabf66ef-ae78-4497-998d-95abd13bbab8\") " pod="openshift-marketplace/certified-operators-v4r7q" Nov 27 07:18:33 crc kubenswrapper[4971]: I1127 07:18:33.061576 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-v4r7q" Nov 27 07:18:34 crc kubenswrapper[4971]: I1127 07:18:34.220990 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-v4r7q"] Nov 27 07:18:34 crc kubenswrapper[4971]: W1127 07:18:34.229787 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfabf66ef_ae78_4497_998d_95abd13bbab8.slice/crio-5ea99795f94d15e7d8d2a64a2e44aed9de79b2ff44562f37067d7b501a968ed0 WatchSource:0}: Error finding container 5ea99795f94d15e7d8d2a64a2e44aed9de79b2ff44562f37067d7b501a968ed0: Status 404 returned error can't find the container with id 5ea99795f94d15e7d8d2a64a2e44aed9de79b2ff44562f37067d7b501a968ed0 Nov 27 07:18:34 crc kubenswrapper[4971]: I1127 07:18:34.486484 4971 generic.go:334] "Generic (PLEG): container finished" podID="fabf66ef-ae78-4497-998d-95abd13bbab8" containerID="6353f174a6c8b0b83eddbe0ddd990ad6ef08c5fe350d99ff37ab1e31a82eb790" exitCode=0 Nov 27 07:18:34 crc kubenswrapper[4971]: I1127 07:18:34.486549 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v4r7q" event={"ID":"fabf66ef-ae78-4497-998d-95abd13bbab8","Type":"ContainerDied","Data":"6353f174a6c8b0b83eddbe0ddd990ad6ef08c5fe350d99ff37ab1e31a82eb790"} Nov 27 07:18:34 crc kubenswrapper[4971]: I1127 07:18:34.486582 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v4r7q" event={"ID":"fabf66ef-ae78-4497-998d-95abd13bbab8","Type":"ContainerStarted","Data":"5ea99795f94d15e7d8d2a64a2e44aed9de79b2ff44562f37067d7b501a968ed0"} Nov 27 07:18:35 crc kubenswrapper[4971]: I1127 07:18:35.500282 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v4r7q" event={"ID":"fabf66ef-ae78-4497-998d-95abd13bbab8","Type":"ContainerStarted","Data":"9b107fde4f5f63e1923a73cfe0c3ffca844d489bcb74c6b62efcceacaef66508"} Nov 27 07:18:36 crc kubenswrapper[4971]: I1127 07:18:36.527163 4971 generic.go:334] "Generic (PLEG): container finished" podID="fabf66ef-ae78-4497-998d-95abd13bbab8" containerID="9b107fde4f5f63e1923a73cfe0c3ffca844d489bcb74c6b62efcceacaef66508" exitCode=0 Nov 27 07:18:36 crc kubenswrapper[4971]: I1127 07:18:36.527216 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v4r7q" event={"ID":"fabf66ef-ae78-4497-998d-95abd13bbab8","Type":"ContainerDied","Data":"9b107fde4f5f63e1923a73cfe0c3ffca844d489bcb74c6b62efcceacaef66508"} Nov 27 07:18:37 crc kubenswrapper[4971]: I1127 07:18:37.539803 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v4r7q" event={"ID":"fabf66ef-ae78-4497-998d-95abd13bbab8","Type":"ContainerStarted","Data":"172f687c02402ec48aa19bcb923e165cebb3972c8e1b83d44601a1d5f44f7220"} Nov 27 07:18:37 crc kubenswrapper[4971]: I1127 07:18:37.567769 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-v4r7q" podStartSLOduration=2.923844223 podStartE2EDuration="5.567739766s" podCreationTimestamp="2025-11-27 07:18:32 +0000 UTC" firstStartedPulling="2025-11-27 07:18:34.487984928 +0000 UTC m=+1552.680028846" lastFinishedPulling="2025-11-27 07:18:37.131880471 +0000 UTC m=+1555.323924389" observedRunningTime="2025-11-27 07:18:37.558745087 +0000 UTC m=+1555.750789055" watchObservedRunningTime="2025-11-27 07:18:37.567739766 +0000 UTC m=+1555.759783684" Nov 27 07:18:42 crc kubenswrapper[4971]: I1127 07:18:42.557596 4971 scope.go:117] "RemoveContainer" containerID="0cad2c6a32c384b5b56d689d3a634854a392f78c534cb53e6b7da7260dd94a04" Nov 27 07:18:42 crc kubenswrapper[4971]: E1127 07:18:42.558413 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:18:43 crc kubenswrapper[4971]: I1127 07:18:43.062061 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-v4r7q" Nov 27 07:18:43 crc kubenswrapper[4971]: I1127 07:18:43.062387 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-v4r7q" Nov 27 07:18:43 crc kubenswrapper[4971]: I1127 07:18:43.149247 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-v4r7q" Nov 27 07:18:43 crc kubenswrapper[4971]: I1127 07:18:43.661813 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-v4r7q" Nov 27 07:18:43 crc kubenswrapper[4971]: I1127 07:18:43.711508 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-v4r7q"] Nov 27 07:18:44 crc kubenswrapper[4971]: I1127 07:18:44.352103 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Nov 27 07:18:44 crc kubenswrapper[4971]: I1127 07:18:44.352808 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="36d86c04-b5e4-4334-832b-10953c1d0b1d" containerName="openstackclient" containerID="cri-o://605fc6ed04781d6ae79576290eb45958f2fcc0fff09021399050e8cc200f2306" gracePeriod=2 Nov 27 07:18:44 crc kubenswrapper[4971]: I1127 07:18:44.375152 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Nov 27 07:18:44 crc kubenswrapper[4971]: I1127 07:18:44.585150 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder6f3a-account-delete-nzrmk"] Nov 27 07:18:44 crc kubenswrapper[4971]: E1127 07:18:44.585692 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36d86c04-b5e4-4334-832b-10953c1d0b1d" containerName="openstackclient" Nov 27 07:18:44 crc kubenswrapper[4971]: I1127 07:18:44.585710 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="36d86c04-b5e4-4334-832b-10953c1d0b1d" containerName="openstackclient" Nov 27 07:18:44 crc kubenswrapper[4971]: I1127 07:18:44.585924 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="36d86c04-b5e4-4334-832b-10953c1d0b1d" containerName="openstackclient" Nov 27 07:18:44 crc kubenswrapper[4971]: I1127 07:18:44.586679 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder6f3a-account-delete-nzrmk" Nov 27 07:18:44 crc kubenswrapper[4971]: I1127 07:18:44.601665 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6ndq\" (UniqueName: \"kubernetes.io/projected/51e69600-7930-4ccb-a8ff-0ad3377bf3a7-kube-api-access-r6ndq\") pod \"cinder6f3a-account-delete-nzrmk\" (UID: \"51e69600-7930-4ccb-a8ff-0ad3377bf3a7\") " pod="openstack/cinder6f3a-account-delete-nzrmk" Nov 27 07:18:44 crc kubenswrapper[4971]: I1127 07:18:44.601724 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/51e69600-7930-4ccb-a8ff-0ad3377bf3a7-operator-scripts\") pod \"cinder6f3a-account-delete-nzrmk\" (UID: \"51e69600-7930-4ccb-a8ff-0ad3377bf3a7\") " pod="openstack/cinder6f3a-account-delete-nzrmk" Nov 27 07:18:44 crc kubenswrapper[4971]: I1127 07:18:44.623291 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder6f3a-account-delete-nzrmk"] Nov 27 07:18:44 crc kubenswrapper[4971]: I1127 07:18:44.700168 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance7577-account-delete-hmqt9"] Nov 27 07:18:44 crc kubenswrapper[4971]: I1127 07:18:44.703588 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance7577-account-delete-hmqt9" Nov 27 07:18:44 crc kubenswrapper[4971]: I1127 07:18:44.704677 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/51e69600-7930-4ccb-a8ff-0ad3377bf3a7-operator-scripts\") pod \"cinder6f3a-account-delete-nzrmk\" (UID: \"51e69600-7930-4ccb-a8ff-0ad3377bf3a7\") " pod="openstack/cinder6f3a-account-delete-nzrmk" Nov 27 07:18:44 crc kubenswrapper[4971]: I1127 07:18:44.704895 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6ndq\" (UniqueName: \"kubernetes.io/projected/51e69600-7930-4ccb-a8ff-0ad3377bf3a7-kube-api-access-r6ndq\") pod \"cinder6f3a-account-delete-nzrmk\" (UID: \"51e69600-7930-4ccb-a8ff-0ad3377bf3a7\") " pod="openstack/cinder6f3a-account-delete-nzrmk" Nov 27 07:18:44 crc kubenswrapper[4971]: I1127 07:18:44.706026 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/51e69600-7930-4ccb-a8ff-0ad3377bf3a7-operator-scripts\") pod \"cinder6f3a-account-delete-nzrmk\" (UID: \"51e69600-7930-4ccb-a8ff-0ad3377bf3a7\") " pod="openstack/cinder6f3a-account-delete-nzrmk" Nov 27 07:18:44 crc kubenswrapper[4971]: I1127 07:18:44.729305 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance7577-account-delete-hmqt9"] Nov 27 07:18:44 crc kubenswrapper[4971]: I1127 07:18:44.756700 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6ndq\" (UniqueName: \"kubernetes.io/projected/51e69600-7930-4ccb-a8ff-0ad3377bf3a7-kube-api-access-r6ndq\") pod \"cinder6f3a-account-delete-nzrmk\" (UID: \"51e69600-7930-4ccb-a8ff-0ad3377bf3a7\") " pod="openstack/cinder6f3a-account-delete-nzrmk" Nov 27 07:18:44 crc kubenswrapper[4971]: I1127 07:18:44.800578 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement0147-account-delete-w7x5l"] Nov 27 07:18:44 crc kubenswrapper[4971]: I1127 07:18:44.801977 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement0147-account-delete-w7x5l" Nov 27 07:18:44 crc kubenswrapper[4971]: I1127 07:18:44.806669 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/83ad8915-ac3b-4891-ae7a-9b862747569f-operator-scripts\") pod \"glance7577-account-delete-hmqt9\" (UID: \"83ad8915-ac3b-4891-ae7a-9b862747569f\") " pod="openstack/glance7577-account-delete-hmqt9" Nov 27 07:18:44 crc kubenswrapper[4971]: I1127 07:18:44.806722 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfwrd\" (UniqueName: \"kubernetes.io/projected/c81bcd57-9f3c-4511-a6f1-ca0bbb68af0d-kube-api-access-nfwrd\") pod \"placement0147-account-delete-w7x5l\" (UID: \"c81bcd57-9f3c-4511-a6f1-ca0bbb68af0d\") " pod="openstack/placement0147-account-delete-w7x5l" Nov 27 07:18:44 crc kubenswrapper[4971]: I1127 07:18:44.806762 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8hqt\" (UniqueName: \"kubernetes.io/projected/83ad8915-ac3b-4891-ae7a-9b862747569f-kube-api-access-w8hqt\") pod \"glance7577-account-delete-hmqt9\" (UID: \"83ad8915-ac3b-4891-ae7a-9b862747569f\") " pod="openstack/glance7577-account-delete-hmqt9" Nov 27 07:18:44 crc kubenswrapper[4971]: I1127 07:18:44.806810 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c81bcd57-9f3c-4511-a6f1-ca0bbb68af0d-operator-scripts\") pod \"placement0147-account-delete-w7x5l\" (UID: \"c81bcd57-9f3c-4511-a6f1-ca0bbb68af0d\") " pod="openstack/placement0147-account-delete-w7x5l" Nov 27 07:18:44 crc kubenswrapper[4971]: I1127 07:18:44.857500 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement0147-account-delete-w7x5l"] Nov 27 07:18:44 crc kubenswrapper[4971]: I1127 07:18:44.884595 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-th4ww"] Nov 27 07:18:44 crc kubenswrapper[4971]: I1127 07:18:44.910052 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8hqt\" (UniqueName: \"kubernetes.io/projected/83ad8915-ac3b-4891-ae7a-9b862747569f-kube-api-access-w8hqt\") pod \"glance7577-account-delete-hmqt9\" (UID: \"83ad8915-ac3b-4891-ae7a-9b862747569f\") " pod="openstack/glance7577-account-delete-hmqt9" Nov 27 07:18:44 crc kubenswrapper[4971]: I1127 07:18:44.910139 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c81bcd57-9f3c-4511-a6f1-ca0bbb68af0d-operator-scripts\") pod \"placement0147-account-delete-w7x5l\" (UID: \"c81bcd57-9f3c-4511-a6f1-ca0bbb68af0d\") " pod="openstack/placement0147-account-delete-w7x5l" Nov 27 07:18:44 crc kubenswrapper[4971]: I1127 07:18:44.910234 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/83ad8915-ac3b-4891-ae7a-9b862747569f-operator-scripts\") pod \"glance7577-account-delete-hmqt9\" (UID: \"83ad8915-ac3b-4891-ae7a-9b862747569f\") " pod="openstack/glance7577-account-delete-hmqt9" Nov 27 07:18:44 crc kubenswrapper[4971]: I1127 07:18:44.910287 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfwrd\" (UniqueName: \"kubernetes.io/projected/c81bcd57-9f3c-4511-a6f1-ca0bbb68af0d-kube-api-access-nfwrd\") pod \"placement0147-account-delete-w7x5l\" (UID: \"c81bcd57-9f3c-4511-a6f1-ca0bbb68af0d\") " pod="openstack/placement0147-account-delete-w7x5l" Nov 27 07:18:44 crc kubenswrapper[4971]: I1127 07:18:44.911887 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c81bcd57-9f3c-4511-a6f1-ca0bbb68af0d-operator-scripts\") pod \"placement0147-account-delete-w7x5l\" (UID: \"c81bcd57-9f3c-4511-a6f1-ca0bbb68af0d\") " pod="openstack/placement0147-account-delete-w7x5l" Nov 27 07:18:44 crc kubenswrapper[4971]: I1127 07:18:44.912059 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder6f3a-account-delete-nzrmk" Nov 27 07:18:44 crc kubenswrapper[4971]: I1127 07:18:44.912174 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/83ad8915-ac3b-4891-ae7a-9b862747569f-operator-scripts\") pod \"glance7577-account-delete-hmqt9\" (UID: \"83ad8915-ac3b-4891-ae7a-9b862747569f\") " pod="openstack/glance7577-account-delete-hmqt9" Nov 27 07:18:44 crc kubenswrapper[4971]: I1127 07:18:44.914217 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-th4ww"] Nov 27 07:18:44 crc kubenswrapper[4971]: I1127 07:18:44.962614 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Nov 27 07:18:44 crc kubenswrapper[4971]: I1127 07:18:44.962900 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="abd4a589-1b2e-4559-852f-2c27c0d8c459" containerName="ovn-northd" containerID="cri-o://c9d172c81f9f3625bed2063b71b15aa0aaee90448e7484f3536af33829d91bf8" gracePeriod=30 Nov 27 07:18:44 crc kubenswrapper[4971]: I1127 07:18:44.963032 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="abd4a589-1b2e-4559-852f-2c27c0d8c459" containerName="openstack-network-exporter" containerID="cri-o://fe9a50947e8810fe514a39d8fd3842aa695ffb13f22c248d0a3f4b1749faa50c" gracePeriod=30 Nov 27 07:18:44 crc kubenswrapper[4971]: I1127 07:18:44.987746 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8hqt\" (UniqueName: \"kubernetes.io/projected/83ad8915-ac3b-4891-ae7a-9b862747569f-kube-api-access-w8hqt\") pod \"glance7577-account-delete-hmqt9\" (UID: \"83ad8915-ac3b-4891-ae7a-9b862747569f\") " pod="openstack/glance7577-account-delete-hmqt9" Nov 27 07:18:44 crc kubenswrapper[4971]: I1127 07:18:44.996491 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfwrd\" (UniqueName: \"kubernetes.io/projected/c81bcd57-9f3c-4511-a6f1-ca0bbb68af0d-kube-api-access-nfwrd\") pod \"placement0147-account-delete-w7x5l\" (UID: \"c81bcd57-9f3c-4511-a6f1-ca0bbb68af0d\") " pod="openstack/placement0147-account-delete-w7x5l" Nov 27 07:18:45 crc kubenswrapper[4971]: I1127 07:18:44.999814 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 27 07:18:45 crc kubenswrapper[4971]: I1127 07:18:45.043409 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-kv5bz"] Nov 27 07:18:45 crc kubenswrapper[4971]: I1127 07:18:45.050021 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance7577-account-delete-hmqt9" Nov 27 07:18:45 crc kubenswrapper[4971]: I1127 07:18:45.066754 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-z4gfl"] Nov 27 07:18:45 crc kubenswrapper[4971]: I1127 07:18:45.079079 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-kv5bz"] Nov 27 07:18:45 crc kubenswrapper[4971]: I1127 07:18:45.095171 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-z4gfl"] Nov 27 07:18:45 crc kubenswrapper[4971]: I1127 07:18:45.142397 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement0147-account-delete-w7x5l" Nov 27 07:18:45 crc kubenswrapper[4971]: I1127 07:18:45.195264 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-45rt8"] Nov 27 07:18:45 crc kubenswrapper[4971]: E1127 07:18:45.229645 4971 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Nov 27 07:18:45 crc kubenswrapper[4971]: E1127 07:18:45.229725 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d6439a3c-ee26-467c-8e42-5abbbf390f16-config-data podName:d6439a3c-ee26-467c-8e42-5abbbf390f16 nodeName:}" failed. No retries permitted until 2025-11-27 07:18:45.729706286 +0000 UTC m=+1563.921750204 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/d6439a3c-ee26-467c-8e42-5abbbf390f16-config-data") pod "rabbitmq-server-0" (UID: "d6439a3c-ee26-467c-8e42-5abbbf390f16") : configmap "rabbitmq-config-data" not found Nov 27 07:18:45 crc kubenswrapper[4971]: I1127 07:18:45.292082 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-db2qc"] Nov 27 07:18:45 crc kubenswrapper[4971]: I1127 07:18:45.316603 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-x4rpv"] Nov 27 07:18:45 crc kubenswrapper[4971]: I1127 07:18:45.316890 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-metrics-x4rpv" podUID="31d8afb0-f8c2-4d34-879f-260e94779de0" containerName="openstack-network-exporter" containerID="cri-o://ac55cff26fc898c9d7406993cb5d12c6b2b8a73445d04ffc7d5949cc59f486b9" gracePeriod=30 Nov 27 07:18:45 crc kubenswrapper[4971]: I1127 07:18:45.327934 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 27 07:18:45 crc kubenswrapper[4971]: I1127 07:18:45.328270 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="6754a19e-e024-4b15-8464-49e127bd35ad" containerName="cinder-scheduler" containerID="cri-o://f25c15b6849ee4de1357a742e5ff9d74b03bc332580c116f4c0b091d577d4a1d" gracePeriod=30 Nov 27 07:18:45 crc kubenswrapper[4971]: I1127 07:18:45.328425 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="6754a19e-e024-4b15-8464-49e127bd35ad" containerName="probe" containerID="cri-o://11a43f33fa3835d6bc3caba0e4cd634a1bc0dcb8a9fc8879c3ba3bbe47cba2b2" gracePeriod=30 Nov 27 07:18:45 crc kubenswrapper[4971]: I1127 07:18:45.365607 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-xpjjx"] Nov 27 07:18:45 crc kubenswrapper[4971]: I1127 07:18:45.404651 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-xpjjx"] Nov 27 07:18:45 crc kubenswrapper[4971]: I1127 07:18:45.429219 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican2596-account-delete-pttgn"] Nov 27 07:18:45 crc kubenswrapper[4971]: I1127 07:18:45.430673 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican2596-account-delete-pttgn" Nov 27 07:18:45 crc kubenswrapper[4971]: I1127 07:18:45.502601 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican2596-account-delete-pttgn"] Nov 27 07:18:45 crc kubenswrapper[4971]: I1127 07:18:45.677929 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7h9l\" (UniqueName: \"kubernetes.io/projected/aba3aa08-61e1-48d3-bf4b-cb45e0b71561-kube-api-access-k7h9l\") pod \"barbican2596-account-delete-pttgn\" (UID: \"aba3aa08-61e1-48d3-bf4b-cb45e0b71561\") " pod="openstack/barbican2596-account-delete-pttgn" Nov 27 07:18:45 crc kubenswrapper[4971]: I1127 07:18:45.678153 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aba3aa08-61e1-48d3-bf4b-cb45e0b71561-operator-scripts\") pod \"barbican2596-account-delete-pttgn\" (UID: \"aba3aa08-61e1-48d3-bf4b-cb45e0b71561\") " pod="openstack/barbican2596-account-delete-pttgn" Nov 27 07:18:45 crc kubenswrapper[4971]: I1127 07:18:45.796161 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7h9l\" (UniqueName: \"kubernetes.io/projected/aba3aa08-61e1-48d3-bf4b-cb45e0b71561-kube-api-access-k7h9l\") pod \"barbican2596-account-delete-pttgn\" (UID: \"aba3aa08-61e1-48d3-bf4b-cb45e0b71561\") " pod="openstack/barbican2596-account-delete-pttgn" Nov 27 07:18:45 crc kubenswrapper[4971]: I1127 07:18:45.796743 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aba3aa08-61e1-48d3-bf4b-cb45e0b71561-operator-scripts\") pod \"barbican2596-account-delete-pttgn\" (UID: \"aba3aa08-61e1-48d3-bf4b-cb45e0b71561\") " pod="openstack/barbican2596-account-delete-pttgn" Nov 27 07:18:45 crc kubenswrapper[4971]: I1127 07:18:45.797998 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aba3aa08-61e1-48d3-bf4b-cb45e0b71561-operator-scripts\") pod \"barbican2596-account-delete-pttgn\" (UID: \"aba3aa08-61e1-48d3-bf4b-cb45e0b71561\") " pod="openstack/barbican2596-account-delete-pttgn" Nov 27 07:18:45 crc kubenswrapper[4971]: E1127 07:18:45.827192 4971 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Nov 27 07:18:45 crc kubenswrapper[4971]: E1127 07:18:45.827269 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d6439a3c-ee26-467c-8e42-5abbbf390f16-config-data podName:d6439a3c-ee26-467c-8e42-5abbbf390f16 nodeName:}" failed. No retries permitted until 2025-11-27 07:18:46.827244083 +0000 UTC m=+1565.019288001 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/d6439a3c-ee26-467c-8e42-5abbbf390f16-config-data") pod "rabbitmq-server-0" (UID: "d6439a3c-ee26-467c-8e42-5abbbf390f16") : configmap "rabbitmq-config-data" not found Nov 27 07:18:45 crc kubenswrapper[4971]: I1127 07:18:45.915135 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7h9l\" (UniqueName: \"kubernetes.io/projected/aba3aa08-61e1-48d3-bf4b-cb45e0b71561-kube-api-access-k7h9l\") pod \"barbican2596-account-delete-pttgn\" (UID: \"aba3aa08-61e1-48d3-bf4b-cb45e0b71561\") " pod="openstack/barbican2596-account-delete-pttgn" Nov 27 07:18:45 crc kubenswrapper[4971]: I1127 07:18:45.916329 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 27 07:18:45 crc kubenswrapper[4971]: I1127 07:18:45.917359 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="e060bc1f-a47d-45ef-88bd-cfd0645f9ce2" containerName="openstack-network-exporter" containerID="cri-o://7a4e158b7c7e9c61d16087892eb264fb5c50d094f187b71cab60d863a90c10b0" gracePeriod=300 Nov 27 07:18:45 crc kubenswrapper[4971]: I1127 07:18:45.969321 4971 generic.go:334] "Generic (PLEG): container finished" podID="abd4a589-1b2e-4559-852f-2c27c0d8c459" containerID="fe9a50947e8810fe514a39d8fd3842aa695ffb13f22c248d0a3f4b1749faa50c" exitCode=2 Nov 27 07:18:45 crc kubenswrapper[4971]: I1127 07:18:45.969585 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-v4r7q" podUID="fabf66ef-ae78-4497-998d-95abd13bbab8" containerName="registry-server" containerID="cri-o://172f687c02402ec48aa19bcb923e165cebb3972c8e1b83d44601a1d5f44f7220" gracePeriod=2 Nov 27 07:18:45 crc kubenswrapper[4971]: I1127 07:18:45.969858 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"abd4a589-1b2e-4559-852f-2c27c0d8c459","Type":"ContainerDied","Data":"fe9a50947e8810fe514a39d8fd3842aa695ffb13f22c248d0a3f4b1749faa50c"} Nov 27 07:18:45 crc kubenswrapper[4971]: I1127 07:18:45.983609 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Nov 27 07:18:45 crc kubenswrapper[4971]: I1127 07:18:45.983915 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="2988a7d8-1d6b-46d8-b204-8e02d0be3b4d" containerName="cinder-api-log" containerID="cri-o://9cbb1e297d7eff42db8e89fa629eec8f4c7fb3f3405eaec525774254aea00153" gracePeriod=30 Nov 27 07:18:45 crc kubenswrapper[4971]: I1127 07:18:45.984058 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="2988a7d8-1d6b-46d8-b204-8e02d0be3b4d" containerName="cinder-api" containerID="cri-o://32b6c0f43018436689e6595a7fbba50fff0b5d5d5f4ec9b1c259ec629b4ab6bc" gracePeriod=30 Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.087025 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican2596-account-delete-pttgn" Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.123031 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron1af4-account-delete-48m5w"] Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.142642 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron1af4-account-delete-48m5w" Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.170815 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron1af4-account-delete-48m5w"] Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.188181 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-65cf5bcbb-zf65t"] Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.188435 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-65cf5bcbb-zf65t" podUID="c88f9c00-b02f-4070-b81e-733009e44691" containerName="placement-log" containerID="cri-o://e70c98903785fe4279c39fe33060ba6d64e42416ec0207f04ea6c26602ecfd68" gracePeriod=30 Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.188990 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-65cf5bcbb-zf65t" podUID="c88f9c00-b02f-4070-b81e-733009e44691" containerName="placement-api" containerID="cri-o://669e2c2b2985b62934235229da4195fd416e9129758e938e024eadd9d158c9c5" gracePeriod=30 Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.232514 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.233222 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="6e13a581-61d0-4a1f-ad42-5f2783417c70" containerName="openstack-network-exporter" containerID="cri-o://35daabf30eff3474954a06c52ad28d749b52aae9fc9b781f447588cf8b408b5f" gracePeriod=300 Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.234938 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="e060bc1f-a47d-45ef-88bd-cfd0645f9ce2" containerName="ovsdbserver-nb" containerID="cri-o://ccb0479ba9995bc8550fbc2dbf993c3449d21c9af046183d2f478467bb11fc07" gracePeriod=300 Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.258405 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91f979c6-21cc-4848-9eb5-b8bfc4abf082-operator-scripts\") pod \"neutron1af4-account-delete-48m5w\" (UID: \"91f979c6-21cc-4848-9eb5-b8bfc4abf082\") " pod="openstack/neutron1af4-account-delete-48m5w" Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.258514 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tllm6\" (UniqueName: \"kubernetes.io/projected/91f979c6-21cc-4848-9eb5-b8bfc4abf082-kube-api-access-tllm6\") pod \"neutron1af4-account-delete-48m5w\" (UID: \"91f979c6-21cc-4848-9eb5-b8bfc4abf082\") " pod="openstack/neutron1af4-account-delete-48m5w" Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.283986 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.284603 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="c2e2055e-1200-46e8-a49e-c6b490702c9b" containerName="glance-log" containerID="cri-o://8dbea1c5d8961b687109edee2265a2311adecbc668e362357af80dca1ef37927" gracePeriod=30 Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.285232 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="c2e2055e-1200-46e8-a49e-c6b490702c9b" containerName="glance-httpd" containerID="cri-o://c813e7acb7f4a74ba8b74e4f55535ea0e0307a8468b0623d9ec10735c4807fad" gracePeriod=30 Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.322899 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/novaapi6022-account-delete-bhttv"] Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.324375 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novaapi6022-account-delete-bhttv" Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.361003 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tllm6\" (UniqueName: \"kubernetes.io/projected/91f979c6-21cc-4848-9eb5-b8bfc4abf082-kube-api-access-tllm6\") pod \"neutron1af4-account-delete-48m5w\" (UID: \"91f979c6-21cc-4848-9eb5-b8bfc4abf082\") " pod="openstack/neutron1af4-account-delete-48m5w" Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.361164 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91f979c6-21cc-4848-9eb5-b8bfc4abf082-operator-scripts\") pod \"neutron1af4-account-delete-48m5w\" (UID: \"91f979c6-21cc-4848-9eb5-b8bfc4abf082\") " pod="openstack/neutron1af4-account-delete-48m5w" Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.362413 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novaapi6022-account-delete-bhttv"] Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.385981 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91f979c6-21cc-4848-9eb5-b8bfc4abf082-operator-scripts\") pod \"neutron1af4-account-delete-48m5w\" (UID: \"91f979c6-21cc-4848-9eb5-b8bfc4abf082\") " pod="openstack/neutron1af4-account-delete-48m5w" Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.392149 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tllm6\" (UniqueName: \"kubernetes.io/projected/91f979c6-21cc-4848-9eb5-b8bfc4abf082-kube-api-access-tllm6\") pod \"neutron1af4-account-delete-48m5w\" (UID: \"91f979c6-21cc-4848-9eb5-b8bfc4abf082\") " pod="openstack/neutron1af4-account-delete-48m5w" Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.414109 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/novacell16991-account-delete-ftkpc"] Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.415579 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novacell16991-account-delete-ftkpc" Nov 27 07:18:46 crc kubenswrapper[4971]: E1127 07:18:46.444828 4971 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err="command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: " execCommand=["/usr/share/ovn/scripts/ovn-ctl","stop_controller"] containerName="ovn-controller" pod="openstack/ovn-controller-db2qc" message=< Nov 27 07:18:46 crc kubenswrapper[4971]: Exiting ovn-controller (1) [ OK ] Nov 27 07:18:46 crc kubenswrapper[4971]: > Nov 27 07:18:46 crc kubenswrapper[4971]: E1127 07:18:46.444873 4971 kuberuntime_container.go:691] "PreStop hook failed" err="command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: " pod="openstack/ovn-controller-db2qc" podUID="0aa6e1b6-c18b-4a02-a396-880350cde407" containerName="ovn-controller" containerID="cri-o://fc482f882a3b7633d1172ebad636e951956bbf0dc53007a5de81a84d031ea868" Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.444913 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-db2qc" podUID="0aa6e1b6-c18b-4a02-a396-880350cde407" containerName="ovn-controller" containerID="cri-o://fc482f882a3b7633d1172ebad636e951956bbf0dc53007a5de81a84d031ea868" gracePeriod=29 Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.465180 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5v769\" (UniqueName: \"kubernetes.io/projected/cbef532c-59fb-40ac-bde3-35b8f3616d85-kube-api-access-5v769\") pod \"novaapi6022-account-delete-bhttv\" (UID: \"cbef532c-59fb-40ac-bde3-35b8f3616d85\") " pod="openstack/novaapi6022-account-delete-bhttv" Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.465391 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cbef532c-59fb-40ac-bde3-35b8f3616d85-operator-scripts\") pod \"novaapi6022-account-delete-bhttv\" (UID: \"cbef532c-59fb-40ac-bde3-35b8f3616d85\") " pod="openstack/novaapi6022-account-delete-bhttv" Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.468952 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-669669cf59-5rwqh"] Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.469165 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-669669cf59-5rwqh" podUID="b3fa1872-f7d9-4531-bc33-619419f530a5" containerName="dnsmasq-dns" containerID="cri-o://8e8e087cfd5dc9b5572bd417f86679e6c4d8fa3a5d6ad123e1e7cf39a19c38ce" gracePeriod=10 Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.500603 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novacell16991-account-delete-ftkpc"] Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.510455 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-s6jkh"] Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.525074 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-s6jkh"] Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.538034 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.538237 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="1206c914-fbe7-4e8f-8470-861b0ebf75de" containerName="glance-log" containerID="cri-o://80065ffd1cc70ab9f5e46cbb1026eb2c1f6926d1938126123dd083ba516fd507" gracePeriod=30 Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.538681 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="1206c914-fbe7-4e8f-8470-861b0ebf75de" containerName="glance-httpd" containerID="cri-o://5668516f8e5f98d27ae8ef1c9b2857419b64f8bc06f8aeffeddbb87691114627" gracePeriod=30 Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.578326 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cbef532c-59fb-40ac-bde3-35b8f3616d85-operator-scripts\") pod \"novaapi6022-account-delete-bhttv\" (UID: \"cbef532c-59fb-40ac-bde3-35b8f3616d85\") " pod="openstack/novaapi6022-account-delete-bhttv" Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.578382 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9411d1e4-0384-4caf-b95e-e891b811b402-operator-scripts\") pod \"novacell16991-account-delete-ftkpc\" (UID: \"9411d1e4-0384-4caf-b95e-e891b811b402\") " pod="openstack/novacell16991-account-delete-ftkpc" Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.578487 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5v769\" (UniqueName: \"kubernetes.io/projected/cbef532c-59fb-40ac-bde3-35b8f3616d85-kube-api-access-5v769\") pod \"novaapi6022-account-delete-bhttv\" (UID: \"cbef532c-59fb-40ac-bde3-35b8f3616d85\") " pod="openstack/novaapi6022-account-delete-bhttv" Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.578505 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vzqfv\" (UniqueName: \"kubernetes.io/projected/9411d1e4-0384-4caf-b95e-e891b811b402-kube-api-access-vzqfv\") pod \"novacell16991-account-delete-ftkpc\" (UID: \"9411d1e4-0384-4caf-b95e-e891b811b402\") " pod="openstack/novacell16991-account-delete-ftkpc" Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.579258 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cbef532c-59fb-40ac-bde3-35b8f3616d85-operator-scripts\") pod \"novaapi6022-account-delete-bhttv\" (UID: \"cbef532c-59fb-40ac-bde3-35b8f3616d85\") " pod="openstack/novaapi6022-account-delete-bhttv" Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.620601 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5v769\" (UniqueName: \"kubernetes.io/projected/cbef532c-59fb-40ac-bde3-35b8f3616d85-kube-api-access-5v769\") pod \"novaapi6022-account-delete-bhttv\" (UID: \"cbef532c-59fb-40ac-bde3-35b8f3616d85\") " pod="openstack/novaapi6022-account-delete-bhttv" Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.629444 4971 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/swift-storage-0" secret="" err="secret \"swift-swift-dockercfg-k5bl4\" not found" Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.680366 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9411d1e4-0384-4caf-b95e-e891b811b402-operator-scripts\") pod \"novacell16991-account-delete-ftkpc\" (UID: \"9411d1e4-0384-4caf-b95e-e891b811b402\") " pod="openstack/novacell16991-account-delete-ftkpc" Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.680490 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzqfv\" (UniqueName: \"kubernetes.io/projected/9411d1e4-0384-4caf-b95e-e891b811b402-kube-api-access-vzqfv\") pod \"novacell16991-account-delete-ftkpc\" (UID: \"9411d1e4-0384-4caf-b95e-e891b811b402\") " pod="openstack/novacell16991-account-delete-ftkpc" Nov 27 07:18:46 crc kubenswrapper[4971]: E1127 07:18:46.681026 4971 configmap.go:193] Couldn't get configMap openstack/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Nov 27 07:18:46 crc kubenswrapper[4971]: E1127 07:18:46.681089 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/9411d1e4-0384-4caf-b95e-e891b811b402-operator-scripts podName:9411d1e4-0384-4caf-b95e-e891b811b402 nodeName:}" failed. No retries permitted until 2025-11-27 07:18:47.181069049 +0000 UTC m=+1565.373112967 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/9411d1e4-0384-4caf-b95e-e891b811b402-operator-scripts") pod "novacell16991-account-delete-ftkpc" (UID: "9411d1e4-0384-4caf-b95e-e891b811b402") : configmap "openstack-cell1-scripts" not found Nov 27 07:18:46 crc kubenswrapper[4971]: E1127 07:18:46.707487 4971 projected.go:194] Error preparing data for projected volume kube-api-access-vzqfv for pod openstack/novacell16991-account-delete-ftkpc: failed to fetch token: serviceaccounts "galera-openstack-cell1" not found Nov 27 07:18:46 crc kubenswrapper[4971]: E1127 07:18:46.707766 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9411d1e4-0384-4caf-b95e-e891b811b402-kube-api-access-vzqfv podName:9411d1e4-0384-4caf-b95e-e891b811b402 nodeName:}" failed. No retries permitted until 2025-11-27 07:18:47.207743228 +0000 UTC m=+1565.399787146 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-vzqfv" (UniqueName: "kubernetes.io/projected/9411d1e4-0384-4caf-b95e-e891b811b402-kube-api-access-vzqfv") pod "novacell16991-account-delete-ftkpc" (UID: "9411d1e4-0384-4caf-b95e-e891b811b402") : failed to fetch token: serviceaccounts "galera-openstack-cell1" not found Nov 27 07:18:46 crc kubenswrapper[4971]: E1127 07:18:46.783564 4971 projected.go:288] Couldn't get configMap openstack/swift-storage-config-data: configmap "swift-storage-config-data" not found Nov 27 07:18:46 crc kubenswrapper[4971]: E1127 07:18:46.783594 4971 projected.go:263] Couldn't get secret openstack/swift-conf: secret "swift-conf" not found Nov 27 07:18:46 crc kubenswrapper[4971]: E1127 07:18:46.783602 4971 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Nov 27 07:18:46 crc kubenswrapper[4971]: E1127 07:18:46.783617 4971 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: [configmap "swift-storage-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Nov 27 07:18:46 crc kubenswrapper[4971]: E1127 07:18:46.783665 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b9ccc9bd-d955-4853-986f-95597f2c70e6-etc-swift podName:b9ccc9bd-d955-4853-986f-95597f2c70e6 nodeName:}" failed. No retries permitted until 2025-11-27 07:18:47.283646496 +0000 UTC m=+1565.475690414 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b9ccc9bd-d955-4853-986f-95597f2c70e6-etc-swift") pod "swift-storage-0" (UID: "b9ccc9bd-d955-4853-986f-95597f2c70e6") : [configmap "swift-storage-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Nov 27 07:18:46 crc kubenswrapper[4971]: E1127 07:18:46.893545 4971 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Nov 27 07:18:46 crc kubenswrapper[4971]: E1127 07:18:46.893625 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d6439a3c-ee26-467c-8e42-5abbbf390f16-config-data podName:d6439a3c-ee26-467c-8e42-5abbbf390f16 nodeName:}" failed. No retries permitted until 2025-11-27 07:18:48.893608406 +0000 UTC m=+1567.085652314 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/d6439a3c-ee26-467c-8e42-5abbbf390f16-config-data") pod "rabbitmq-server-0" (UID: "d6439a3c-ee26-467c-8e42-5abbbf390f16") : configmap "rabbitmq-config-data" not found Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.898064 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="6e13a581-61d0-4a1f-ad42-5f2783417c70" containerName="ovsdbserver-sb" containerID="cri-o://203479ecfee56a9441b43631622196bf609ce8fbfc30cbc6c31eb9c3e9440056" gracePeriod=300 Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.920794 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04c6fe9e-2c13-4757-b2f7-237b1b8849f7" path="/var/lib/kubelet/pods/04c6fe9e-2c13-4757-b2f7-237b1b8849f7/volumes" Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.921581 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1098ae71-b794-4670-af66-67bd17375e2c" path="/var/lib/kubelet/pods/1098ae71-b794-4670-af66-67bd17375e2c/volumes" Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.922367 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6652b368-b37b-48f3-839e-a89982fec862" path="/var/lib/kubelet/pods/6652b368-b37b-48f3-839e-a89982fec862/volumes" Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.923657 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5689fc8-154a-4710-a2d6-9bc41e927f77" path="/var/lib/kubelet/pods/c5689fc8-154a-4710-a2d6-9bc41e927f77/volumes" Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.924764 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd83d353-e492-4119-8890-77569c78bed2" path="/var/lib/kubelet/pods/cd83d353-e492-4119-8890-77569c78bed2/volumes" Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.925878 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/novacell091e4-account-delete-8kghx"] Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.928470 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.928549 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novacell091e4-account-delete-8kghx"] Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.928564 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-wsg7b"] Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.928581 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-wsg7b"] Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.928598 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.928617 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-k2bcl"] Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.928630 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-k2bcl"] Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.928644 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-pkkk2"] Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.928656 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-pkkk2"] Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.928669 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-66674dfb5f-52hcq"] Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.928942 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-66674dfb5f-52hcq" podUID="82a33cc1-32fe-464f-ac33-b802fd32a4c1" containerName="proxy-httpd" containerID="cri-o://22d3d2192ae3c18ce743c1ddda10575c592232c7adf55747fe07b64dfb765bb4" gracePeriod=30 Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.929265 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-66674dfb5f-52hcq" podUID="82a33cc1-32fe-464f-ac33-b802fd32a4c1" containerName="proxy-server" containerID="cri-o://bce205f17147516a60414770d105f40e40e7784c6e25ddf99d12115151c6a716" gracePeriod=30 Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.929765 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novacell091e4-account-delete-8kghx" Nov 27 07:18:46 crc kubenswrapper[4971]: I1127 07:18:46.964628 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder6f3a-account-delete-nzrmk"] Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.026036 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7b8cfffdbf-92cj6"] Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.026329 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7b8cfffdbf-92cj6" podUID="622e028f-779d-4306-923c-ee204fdef6b0" containerName="neutron-api" containerID="cri-o://2685d0639fa1ae15b7a9608c81aa2b95f5d3f50c5aefc4159568fe3afd5b0c33" gracePeriod=30 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.029981 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7b8cfffdbf-92cj6" podUID="622e028f-779d-4306-923c-ee204fdef6b0" containerName="neutron-httpd" containerID="cri-o://360d9e165c0c9e2b9b0b1cee20e9ddcb63854ee168b8dc44e7daf6d665e3532e" gracePeriod=30 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.036113 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-45rt8" podUID="6facf3b5-48aa-4a38-823e-6b7adbbcdfee" containerName="ovs-vswitchd" containerID="cri-o://8f63e70bc533062faba370a611b76ae194f52aeabfa2c53bea1503c7257a0a34" gracePeriod=29 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.049813 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55tdk\" (UniqueName: \"kubernetes.io/projected/19b68245-2b99-4337-892f-059f05113ad6-kube-api-access-55tdk\") pod \"novacell091e4-account-delete-8kghx\" (UID: \"19b68245-2b99-4337-892f-059f05113ad6\") " pod="openstack/novacell091e4-account-delete-8kghx" Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.051150 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19b68245-2b99-4337-892f-059f05113ad6-operator-scripts\") pod \"novacell091e4-account-delete-8kghx\" (UID: \"19b68245-2b99-4337-892f-059f05113ad6\") " pod="openstack/novacell091e4-account-delete-8kghx" Nov 27 07:18:47 crc kubenswrapper[4971]: E1127 07:18:47.055610 4971 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Nov 27 07:18:47 crc kubenswrapper[4971]: E1127 07:18:47.055675 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/640c3829-d2e9-49e1-82e3-bd213aa992dd-config-data podName:640c3829-d2e9-49e1-82e3-bd213aa992dd nodeName:}" failed. No retries permitted until 2025-11-27 07:18:47.555655538 +0000 UTC m=+1565.747699456 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/640c3829-d2e9-49e1-82e3-bd213aa992dd-config-data") pod "rabbitmq-cell1-server-0" (UID: "640c3829-d2e9-49e1-82e3-bd213aa992dd") : configmap "rabbitmq-cell1-config-data" not found Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.060216 4971 generic.go:334] "Generic (PLEG): container finished" podID="1206c914-fbe7-4e8f-8470-861b0ebf75de" containerID="80065ffd1cc70ab9f5e46cbb1026eb2c1f6926d1938126123dd083ba516fd507" exitCode=143 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.060302 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"1206c914-fbe7-4e8f-8470-861b0ebf75de","Type":"ContainerDied","Data":"80065ffd1cc70ab9f5e46cbb1026eb2c1f6926d1938126123dd083ba516fd507"} Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.062285 4971 generic.go:334] "Generic (PLEG): container finished" podID="c88f9c00-b02f-4070-b81e-733009e44691" containerID="e70c98903785fe4279c39fe33060ba6d64e42416ec0207f04ea6c26602ecfd68" exitCode=143 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.069618 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-65cf5bcbb-zf65t" event={"ID":"c88f9c00-b02f-4070-b81e-733009e44691","Type":"ContainerDied","Data":"e70c98903785fe4279c39fe33060ba6d64e42416ec0207f04ea6c26602ecfd68"} Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.097794 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-5b7b448d48-2wggc"] Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.097743 4971 generic.go:334] "Generic (PLEG): container finished" podID="b3fa1872-f7d9-4531-bc33-619419f530a5" containerID="8e8e087cfd5dc9b5572bd417f86679e6c4d8fa3a5d6ad123e1e7cf39a19c38ce" exitCode=0 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.097955 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-669669cf59-5rwqh" event={"ID":"b3fa1872-f7d9-4531-bc33-619419f530a5","Type":"ContainerDied","Data":"8e8e087cfd5dc9b5572bd417f86679e6c4d8fa3a5d6ad123e1e7cf39a19c38ce"} Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.098690 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-5b7b448d48-2wggc" podUID="6e067498-7bc0-4bc5-a9a6-696c8aa3cf71" containerName="barbican-keystone-listener-log" containerID="cri-o://cce9aeab95941685da7fd97b4408ffcb6ac7f7c473cf5bbf43edec5fe397efd6" gracePeriod=30 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.099511 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-5b7b448d48-2wggc" podUID="6e067498-7bc0-4bc5-a9a6-696c8aa3cf71" containerName="barbican-keystone-listener" containerID="cri-o://0d9471b432378d52a3ce483c444b3f5beceab405dd8b991eb4ecb32b754851b8" gracePeriod=30 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.139896 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-7865dbd7d9-zp55h"] Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.140289 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-7865dbd7d9-zp55h" podUID="b3616559-d640-4b3b-a4b3-b9d9af1d0061" containerName="barbican-worker-log" containerID="cri-o://85ed74a43c71f2fd0dc70f5ef0da68149334b289a557cdd8a6923696f588e66b" gracePeriod=30 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.140945 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-7865dbd7d9-zp55h" podUID="b3616559-d640-4b3b-a4b3-b9d9af1d0061" containerName="barbican-worker" containerID="cri-o://432905efdade8907249e6259f8519c70b607016106a34709d73f373c91efe0a5" gracePeriod=30 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.155143 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55tdk\" (UniqueName: \"kubernetes.io/projected/19b68245-2b99-4337-892f-059f05113ad6-kube-api-access-55tdk\") pod \"novacell091e4-account-delete-8kghx\" (UID: \"19b68245-2b99-4337-892f-059f05113ad6\") " pod="openstack/novacell091e4-account-delete-8kghx" Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.155305 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19b68245-2b99-4337-892f-059f05113ad6-operator-scripts\") pod \"novacell091e4-account-delete-8kghx\" (UID: \"19b68245-2b99-4337-892f-059f05113ad6\") " pod="openstack/novacell091e4-account-delete-8kghx" Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.158075 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19b68245-2b99-4337-892f-059f05113ad6-operator-scripts\") pod \"novacell091e4-account-delete-8kghx\" (UID: \"19b68245-2b99-4337-892f-059f05113ad6\") " pod="openstack/novacell091e4-account-delete-8kghx" Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.169649 4971 generic.go:334] "Generic (PLEG): container finished" podID="36d86c04-b5e4-4334-832b-10953c1d0b1d" containerID="605fc6ed04781d6ae79576290eb45958f2fcc0fff09021399050e8cc200f2306" exitCode=137 Nov 27 07:18:47 crc kubenswrapper[4971]: E1127 07:18:47.173156 4971 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Nov 27 07:18:47 crc kubenswrapper[4971]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Nov 27 07:18:47 crc kubenswrapper[4971]: + source /usr/local/bin/container-scripts/functions Nov 27 07:18:47 crc kubenswrapper[4971]: ++ OVNBridge=br-int Nov 27 07:18:47 crc kubenswrapper[4971]: ++ OVNRemote=tcp:localhost:6642 Nov 27 07:18:47 crc kubenswrapper[4971]: ++ OVNEncapType=geneve Nov 27 07:18:47 crc kubenswrapper[4971]: ++ OVNAvailabilityZones= Nov 27 07:18:47 crc kubenswrapper[4971]: ++ EnableChassisAsGateway=true Nov 27 07:18:47 crc kubenswrapper[4971]: ++ PhysicalNetworks= Nov 27 07:18:47 crc kubenswrapper[4971]: ++ OVNHostName= Nov 27 07:18:47 crc kubenswrapper[4971]: ++ DB_FILE=/etc/openvswitch/conf.db Nov 27 07:18:47 crc kubenswrapper[4971]: ++ ovs_dir=/var/lib/openvswitch Nov 27 07:18:47 crc kubenswrapper[4971]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Nov 27 07:18:47 crc kubenswrapper[4971]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Nov 27 07:18:47 crc kubenswrapper[4971]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Nov 27 07:18:47 crc kubenswrapper[4971]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 27 07:18:47 crc kubenswrapper[4971]: + sleep 0.5 Nov 27 07:18:47 crc kubenswrapper[4971]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 27 07:18:47 crc kubenswrapper[4971]: + sleep 0.5 Nov 27 07:18:47 crc kubenswrapper[4971]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 27 07:18:47 crc kubenswrapper[4971]: + cleanup_ovsdb_server_semaphore Nov 27 07:18:47 crc kubenswrapper[4971]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Nov 27 07:18:47 crc kubenswrapper[4971]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Nov 27 07:18:47 crc kubenswrapper[4971]: > execCommand=["/usr/local/bin/container-scripts/stop-ovsdb-server.sh"] containerName="ovsdb-server" pod="openstack/ovn-controller-ovs-45rt8" message=< Nov 27 07:18:47 crc kubenswrapper[4971]: Exiting ovsdb-server (5) [ OK ] Nov 27 07:18:47 crc kubenswrapper[4971]: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Nov 27 07:18:47 crc kubenswrapper[4971]: + source /usr/local/bin/container-scripts/functions Nov 27 07:18:47 crc kubenswrapper[4971]: ++ OVNBridge=br-int Nov 27 07:18:47 crc kubenswrapper[4971]: ++ OVNRemote=tcp:localhost:6642 Nov 27 07:18:47 crc kubenswrapper[4971]: ++ OVNEncapType=geneve Nov 27 07:18:47 crc kubenswrapper[4971]: ++ OVNAvailabilityZones= Nov 27 07:18:47 crc kubenswrapper[4971]: ++ EnableChassisAsGateway=true Nov 27 07:18:47 crc kubenswrapper[4971]: ++ PhysicalNetworks= Nov 27 07:18:47 crc kubenswrapper[4971]: ++ OVNHostName= Nov 27 07:18:47 crc kubenswrapper[4971]: ++ DB_FILE=/etc/openvswitch/conf.db Nov 27 07:18:47 crc kubenswrapper[4971]: ++ ovs_dir=/var/lib/openvswitch Nov 27 07:18:47 crc kubenswrapper[4971]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Nov 27 07:18:47 crc kubenswrapper[4971]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Nov 27 07:18:47 crc kubenswrapper[4971]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Nov 27 07:18:47 crc kubenswrapper[4971]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 27 07:18:47 crc kubenswrapper[4971]: + sleep 0.5 Nov 27 07:18:47 crc kubenswrapper[4971]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 27 07:18:47 crc kubenswrapper[4971]: + sleep 0.5 Nov 27 07:18:47 crc kubenswrapper[4971]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 27 07:18:47 crc kubenswrapper[4971]: + cleanup_ovsdb_server_semaphore Nov 27 07:18:47 crc kubenswrapper[4971]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Nov 27 07:18:47 crc kubenswrapper[4971]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Nov 27 07:18:47 crc kubenswrapper[4971]: > Nov 27 07:18:47 crc kubenswrapper[4971]: E1127 07:18:47.175250 4971 kuberuntime_container.go:691] "PreStop hook failed" err=< Nov 27 07:18:47 crc kubenswrapper[4971]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Nov 27 07:18:47 crc kubenswrapper[4971]: + source /usr/local/bin/container-scripts/functions Nov 27 07:18:47 crc kubenswrapper[4971]: ++ OVNBridge=br-int Nov 27 07:18:47 crc kubenswrapper[4971]: ++ OVNRemote=tcp:localhost:6642 Nov 27 07:18:47 crc kubenswrapper[4971]: ++ OVNEncapType=geneve Nov 27 07:18:47 crc kubenswrapper[4971]: ++ OVNAvailabilityZones= Nov 27 07:18:47 crc kubenswrapper[4971]: ++ EnableChassisAsGateway=true Nov 27 07:18:47 crc kubenswrapper[4971]: ++ PhysicalNetworks= Nov 27 07:18:47 crc kubenswrapper[4971]: ++ OVNHostName= Nov 27 07:18:47 crc kubenswrapper[4971]: ++ DB_FILE=/etc/openvswitch/conf.db Nov 27 07:18:47 crc kubenswrapper[4971]: ++ ovs_dir=/var/lib/openvswitch Nov 27 07:18:47 crc kubenswrapper[4971]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Nov 27 07:18:47 crc kubenswrapper[4971]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Nov 27 07:18:47 crc kubenswrapper[4971]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Nov 27 07:18:47 crc kubenswrapper[4971]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 27 07:18:47 crc kubenswrapper[4971]: + sleep 0.5 Nov 27 07:18:47 crc kubenswrapper[4971]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 27 07:18:47 crc kubenswrapper[4971]: + sleep 0.5 Nov 27 07:18:47 crc kubenswrapper[4971]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 27 07:18:47 crc kubenswrapper[4971]: + cleanup_ovsdb_server_semaphore Nov 27 07:18:47 crc kubenswrapper[4971]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Nov 27 07:18:47 crc kubenswrapper[4971]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Nov 27 07:18:47 crc kubenswrapper[4971]: > pod="openstack/ovn-controller-ovs-45rt8" podUID="6facf3b5-48aa-4a38-823e-6b7adbbcdfee" containerName="ovsdb-server" containerID="cri-o://d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2" Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.176089 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-45rt8" podUID="6facf3b5-48aa-4a38-823e-6b7adbbcdfee" containerName="ovsdb-server" containerID="cri-o://d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2" gracePeriod=29 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.179050 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55tdk\" (UniqueName: \"kubernetes.io/projected/19b68245-2b99-4337-892f-059f05113ad6-kube-api-access-55tdk\") pod \"novacell091e4-account-delete-8kghx\" (UID: \"19b68245-2b99-4337-892f-059f05113ad6\") " pod="openstack/novacell091e4-account-delete-8kghx" Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.213243 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder6f3a-account-delete-nzrmk" event={"ID":"51e69600-7930-4ccb-a8ff-0ad3377bf3a7","Type":"ContainerStarted","Data":"ca3642a9f3127eb0e1a15792b08a5f5b011734d077acdae27a1d254b9bbafb2d"} Nov 27 07:18:47 crc kubenswrapper[4971]: E1127 07:18:47.254404 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of fc482f882a3b7633d1172ebad636e951956bbf0dc53007a5de81a84d031ea868 is running failed: container process not found" containerID="fc482f882a3b7633d1172ebad636e951956bbf0dc53007a5de81a84d031ea868" cmd=["/usr/local/bin/container-scripts/ovn_controller_readiness.sh"] Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.257357 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzqfv\" (UniqueName: \"kubernetes.io/projected/9411d1e4-0384-4caf-b95e-e891b811b402-kube-api-access-vzqfv\") pod \"novacell16991-account-delete-ftkpc\" (UID: \"9411d1e4-0384-4caf-b95e-e891b811b402\") " pod="openstack/novacell16991-account-delete-ftkpc" Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.257795 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9411d1e4-0384-4caf-b95e-e891b811b402-operator-scripts\") pod \"novacell16991-account-delete-ftkpc\" (UID: \"9411d1e4-0384-4caf-b95e-e891b811b402\") " pod="openstack/novacell16991-account-delete-ftkpc" Nov 27 07:18:47 crc kubenswrapper[4971]: E1127 07:18:47.258067 4971 configmap.go:193] Couldn't get configMap openstack/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Nov 27 07:18:47 crc kubenswrapper[4971]: E1127 07:18:47.258175 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/9411d1e4-0384-4caf-b95e-e891b811b402-operator-scripts podName:9411d1e4-0384-4caf-b95e-e891b811b402 nodeName:}" failed. No retries permitted until 2025-11-27 07:18:48.258155766 +0000 UTC m=+1566.450199684 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/9411d1e4-0384-4caf-b95e-e891b811b402-operator-scripts") pod "novacell16991-account-delete-ftkpc" (UID: "9411d1e4-0384-4caf-b95e-e891b811b402") : configmap "openstack-cell1-scripts" not found Nov 27 07:18:47 crc kubenswrapper[4971]: E1127 07:18:47.266221 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of fc482f882a3b7633d1172ebad636e951956bbf0dc53007a5de81a84d031ea868 is running failed: container process not found" containerID="fc482f882a3b7633d1172ebad636e951956bbf0dc53007a5de81a84d031ea868" cmd=["/usr/local/bin/container-scripts/ovn_controller_readiness.sh"] Nov 27 07:18:47 crc kubenswrapper[4971]: E1127 07:18:47.266369 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2 is running failed: container process not found" containerID="d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 27 07:18:47 crc kubenswrapper[4971]: E1127 07:18:47.266892 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2 is running failed: container process not found" containerID="d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 27 07:18:47 crc kubenswrapper[4971]: E1127 07:18:47.267055 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of fc482f882a3b7633d1172ebad636e951956bbf0dc53007a5de81a84d031ea868 is running failed: container process not found" containerID="fc482f882a3b7633d1172ebad636e951956bbf0dc53007a5de81a84d031ea868" cmd=["/usr/local/bin/container-scripts/ovn_controller_readiness.sh"] Nov 27 07:18:47 crc kubenswrapper[4971]: E1127 07:18:47.267084 4971 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of fc482f882a3b7633d1172ebad636e951956bbf0dc53007a5de81a84d031ea868 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-db2qc" podUID="0aa6e1b6-c18b-4a02-a396-880350cde407" containerName="ovn-controller" Nov 27 07:18:47 crc kubenswrapper[4971]: E1127 07:18:47.267326 4971 projected.go:194] Error preparing data for projected volume kube-api-access-vzqfv for pod openstack/novacell16991-account-delete-ftkpc: failed to fetch token: serviceaccounts "galera-openstack-cell1" not found Nov 27 07:18:47 crc kubenswrapper[4971]: E1127 07:18:47.267408 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9411d1e4-0384-4caf-b95e-e891b811b402-kube-api-access-vzqfv podName:9411d1e4-0384-4caf-b95e-e891b811b402 nodeName:}" failed. No retries permitted until 2025-11-27 07:18:48.267386092 +0000 UTC m=+1566.459430010 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-vzqfv" (UniqueName: "kubernetes.io/projected/9411d1e4-0384-4caf-b95e-e891b811b402-kube-api-access-vzqfv") pod "novacell16991-account-delete-ftkpc" (UID: "9411d1e4-0384-4caf-b95e-e891b811b402") : failed to fetch token: serviceaccounts "galera-openstack-cell1" not found Nov 27 07:18:47 crc kubenswrapper[4971]: E1127 07:18:47.269636 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2 is running failed: container process not found" containerID="d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 27 07:18:47 crc kubenswrapper[4971]: E1127 07:18:47.269678 4971 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-45rt8" podUID="6facf3b5-48aa-4a38-823e-6b7adbbcdfee" containerName="ovsdb-server" Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.269989 4971 generic.go:334] "Generic (PLEG): container finished" podID="c2e2055e-1200-46e8-a49e-c6b490702c9b" containerID="8dbea1c5d8961b687109edee2265a2311adecbc668e362357af80dca1ef37927" exitCode=143 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.270062 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c2e2055e-1200-46e8-a49e-c6b490702c9b","Type":"ContainerDied","Data":"8dbea1c5d8961b687109edee2265a2311adecbc668e362357af80dca1ef37927"} Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.274224 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-85d46db856-nzmcq"] Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.274606 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-85d46db856-nzmcq" podUID="fc889790-089f-4007-876f-874880dad975" containerName="barbican-api-log" containerID="cri-o://213b456ae37e6e54ce799e71136d0150d71e3784b58ba6e15c73b1242adc249b" gracePeriod=30 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.274775 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-85d46db856-nzmcq" podUID="fc889790-089f-4007-876f-874880dad975" containerName="barbican-api" containerID="cri-o://5d760ae55be3a486a97462d9bd8576d1f7879d82b2f6d6318d4076d0797b7097" gracePeriod=30 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.286113 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-x4rpv_31d8afb0-f8c2-4d34-879f-260e94779de0/openstack-network-exporter/0.log" Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.286172 4971 generic.go:334] "Generic (PLEG): container finished" podID="31d8afb0-f8c2-4d34-879f-260e94779de0" containerID="ac55cff26fc898c9d7406993cb5d12c6b2b8a73445d04ffc7d5949cc59f486b9" exitCode=2 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.286278 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-x4rpv" event={"ID":"31d8afb0-f8c2-4d34-879f-260e94779de0","Type":"ContainerDied","Data":"ac55cff26fc898c9d7406993cb5d12c6b2b8a73445d04ffc7d5949cc59f486b9"} Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.286311 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-x4rpv" event={"ID":"31d8afb0-f8c2-4d34-879f-260e94779de0","Type":"ContainerDied","Data":"8c11713125217089a5a7277a83bdc4139c332115aa50c492c205289ce5af7eed"} Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.286324 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8c11713125217089a5a7277a83bdc4139c332115aa50c492c205289ce5af7eed" Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.290216 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_e060bc1f-a47d-45ef-88bd-cfd0645f9ce2/ovsdbserver-nb/0.log" Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.290266 4971 generic.go:334] "Generic (PLEG): container finished" podID="e060bc1f-a47d-45ef-88bd-cfd0645f9ce2" containerID="7a4e158b7c7e9c61d16087892eb264fb5c50d094f187b71cab60d863a90c10b0" exitCode=2 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.290285 4971 generic.go:334] "Generic (PLEG): container finished" podID="e060bc1f-a47d-45ef-88bd-cfd0645f9ce2" containerID="ccb0479ba9995bc8550fbc2dbf993c3449d21c9af046183d2f478467bb11fc07" exitCode=143 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.290331 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2","Type":"ContainerDied","Data":"7a4e158b7c7e9c61d16087892eb264fb5c50d094f187b71cab60d863a90c10b0"} Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.290365 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2","Type":"ContainerDied","Data":"ccb0479ba9995bc8550fbc2dbf993c3449d21c9af046183d2f478467bb11fc07"} Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.292894 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 07:18:47 crc kubenswrapper[4971]: E1127 07:18:47.295498 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8f63e70bc533062faba370a611b76ae194f52aeabfa2c53bea1503c7257a0a34" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.295987 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="35b90587-df5b-4f15-8c34-f1b0a8506d85" containerName="nova-metadata-metadata" containerID="cri-o://c581d4d83b4f2a55778d440cd3c9dd25c65d57ded13db9b0fc5dd5a6570dbd13" gracePeriod=30 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.296140 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="35b90587-df5b-4f15-8c34-f1b0a8506d85" containerName="nova-metadata-log" containerID="cri-o://9889def6b44901ab96b7e37483508d92be73fc8357e22d9f41857a9c610d8c0a" gracePeriod=30 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.299357 4971 generic.go:334] "Generic (PLEG): container finished" podID="0aa6e1b6-c18b-4a02-a396-880350cde407" containerID="fc482f882a3b7633d1172ebad636e951956bbf0dc53007a5de81a84d031ea868" exitCode=0 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.299432 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-db2qc" event={"ID":"0aa6e1b6-c18b-4a02-a396-880350cde407","Type":"ContainerDied","Data":"fc482f882a3b7633d1172ebad636e951956bbf0dc53007a5de81a84d031ea868"} Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.329786 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance7577-account-delete-hmqt9" event={"ID":"83ad8915-ac3b-4891-ae7a-9b862747569f","Type":"ContainerStarted","Data":"365e13d3fa7c5a7ff8d525defda7a68d1d726d4644424a9c76752122827d963f"} Nov 27 07:18:47 crc kubenswrapper[4971]: E1127 07:18:47.330249 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8f63e70bc533062faba370a611b76ae194f52aeabfa2c53bea1503c7257a0a34" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 27 07:18:47 crc kubenswrapper[4971]: E1127 07:18:47.343086 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8f63e70bc533062faba370a611b76ae194f52aeabfa2c53bea1503c7257a0a34" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 27 07:18:47 crc kubenswrapper[4971]: E1127 07:18:47.343160 4971 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-45rt8" podUID="6facf3b5-48aa-4a38-823e-6b7adbbcdfee" containerName="ovs-vswitchd" Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.358871 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_6e13a581-61d0-4a1f-ad42-5f2783417c70/ovsdbserver-sb/0.log" Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.358919 4971 generic.go:334] "Generic (PLEG): container finished" podID="6e13a581-61d0-4a1f-ad42-5f2783417c70" containerID="35daabf30eff3474954a06c52ad28d749b52aae9fc9b781f447588cf8b408b5f" exitCode=2 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.359025 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"6e13a581-61d0-4a1f-ad42-5f2783417c70","Type":"ContainerDied","Data":"35daabf30eff3474954a06c52ad28d749b52aae9fc9b781f447588cf8b408b5f"} Nov 27 07:18:47 crc kubenswrapper[4971]: E1127 07:18:47.360436 4971 projected.go:288] Couldn't get configMap openstack/swift-storage-config-data: configmap "swift-storage-config-data" not found Nov 27 07:18:47 crc kubenswrapper[4971]: E1127 07:18:47.360464 4971 projected.go:263] Couldn't get secret openstack/swift-conf: secret "swift-conf" not found Nov 27 07:18:47 crc kubenswrapper[4971]: E1127 07:18:47.360473 4971 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Nov 27 07:18:47 crc kubenswrapper[4971]: E1127 07:18:47.360484 4971 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: [configmap "swift-storage-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Nov 27 07:18:47 crc kubenswrapper[4971]: E1127 07:18:47.360546 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b9ccc9bd-d955-4853-986f-95597f2c70e6-etc-swift podName:b9ccc9bd-d955-4853-986f-95597f2c70e6 nodeName:}" failed. No retries permitted until 2025-11-27 07:18:48.360513937 +0000 UTC m=+1566.552557845 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b9ccc9bd-d955-4853-986f-95597f2c70e6-etc-swift") pod "swift-storage-0" (UID: "b9ccc9bd-d955-4853-986f-95597f2c70e6") : [configmap "swift-storage-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.385941 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.433420 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.433720 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="69d47892-79da-4e4e-8de2-a84801d4d6b9" containerName="nova-api-log" containerID="cri-o://bf254fe08bcc7d4264d54d1c7f653dc9ece6ef4aa57f2cd553f285b6b06f6503" gracePeriod=30 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.434325 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="69d47892-79da-4e4e-8de2-a84801d4d6b9" containerName="nova-api-api" containerID="cri-o://e6ecd3fddd03d9acdb8874669eb4ae87d6f12ad1116ff9c9cbf2e034df95e87e" gracePeriod=30 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.446079 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.449371 4971 generic.go:334] "Generic (PLEG): container finished" podID="fabf66ef-ae78-4497-998d-95abd13bbab8" containerID="172f687c02402ec48aa19bcb923e165cebb3972c8e1b83d44601a1d5f44f7220" exitCode=0 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.449477 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v4r7q" event={"ID":"fabf66ef-ae78-4497-998d-95abd13bbab8","Type":"ContainerDied","Data":"172f687c02402ec48aa19bcb923e165cebb3972c8e1b83d44601a1d5f44f7220"} Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.449512 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v4r7q" event={"ID":"fabf66ef-ae78-4497-998d-95abd13bbab8","Type":"ContainerDied","Data":"5ea99795f94d15e7d8d2a64a2e44aed9de79b2ff44562f37067d7b501a968ed0"} Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.449538 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5ea99795f94d15e7d8d2a64a2e44aed9de79b2ff44562f37067d7b501a968ed0" Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.462185 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="d6439a3c-ee26-467c-8e42-5abbbf390f16" containerName="rabbitmq" containerID="cri-o://77882cbee517c91fc3e1f3a6a066be0183a0d5dff26c18bb82c636aa29cd5b4c" gracePeriod=604800 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.468228 4971 generic.go:334] "Generic (PLEG): container finished" podID="2988a7d8-1d6b-46d8-b204-8e02d0be3b4d" containerID="9cbb1e297d7eff42db8e89fa629eec8f4c7fb3f3405eaec525774254aea00153" exitCode=143 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.468734 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="account-server" containerID="cri-o://dc6a6bc3b9ba7cb3360f786ead574e2d92d8ae060c201dc5a7136ed40e797c3d" gracePeriod=30 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.468973 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d","Type":"ContainerDied","Data":"9cbb1e297d7eff42db8e89fa629eec8f4c7fb3f3405eaec525774254aea00153"} Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.470789 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="swift-recon-cron" containerID="cri-o://85a6791fcdc604b55a12b82702e281ab125eb3d1d4235331866d471d20e24d57" gracePeriod=30 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.470883 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="rsync" containerID="cri-o://c21be005b4f8466d61ef2da25c8b9ce2faf1be2408be9a01a2cad03ede7d5772" gracePeriod=30 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.470959 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="object-expirer" containerID="cri-o://4a867f795f4a9c59b6d95924698a7ae6fcb777907d413b4d91002386f13f43ce" gracePeriod=30 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.471036 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="object-updater" containerID="cri-o://91caa3649b14fd1b0674535f37443068dd6301ac20d525ca2334e554de92f5ce" gracePeriod=30 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.471565 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="object-auditor" containerID="cri-o://d156d1511b10631f281c42115007fa1a0c13d1d792aafd4fd1d7868813992990" gracePeriod=30 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.471615 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="object-replicator" containerID="cri-o://61eee222331bc4cf51d73a2109e6aaf653d19b6ab409e2c0ab5b7ddfc9466313" gracePeriod=30 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.471655 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="object-server" containerID="cri-o://9d10d81588ceda578cec676b35a37e455caa807351436843b76133d0dfb2603b" gracePeriod=30 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.471695 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="container-updater" containerID="cri-o://7170edadd0854106871150c55e357e6c6f8c393bb654d550d3f39d6435d67112" gracePeriod=30 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.471736 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="container-auditor" containerID="cri-o://fad54635f828db66a56bd717dd26946c6194a973dd020bda60d63ac8508944b7" gracePeriod=30 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.471814 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="container-replicator" containerID="cri-o://3b8d32d89ca168e8803a3c1d366cfdd72575c01e7332b5f3d73f865eaf632a07" gracePeriod=30 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.471856 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="container-server" containerID="cri-o://f747a88650d47a0b28abe01bd9c86b1ce37125bcbf5d4510f59880020f4c6436" gracePeriod=30 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.471889 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="account-reaper" containerID="cri-o://d4737f67b50d857fb0c555964409bd7354a856636790cbe70eea9e16bffacc6e" gracePeriod=30 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.471921 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="account-auditor" containerID="cri-o://6c4925ec24ba366b0b0454d48d8855400c5d48953e95c1f7428155fd84127936" gracePeriod=30 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.471950 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="account-replicator" containerID="cri-o://b7eb7df2e485a291e4bff098a5b8b04d0f94dc8a687ed53e0061a899159d7b70" gracePeriod=30 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.485053 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.485288 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://86a75ee86a1e45fea7cc0b2bf039f079e8bc453673a092221bda14b2056c6baf" gracePeriod=30 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.502934 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance7577-account-delete-hmqt9"] Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.538446 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 27 07:18:47 crc kubenswrapper[4971]: E1127 07:18:47.569930 4971 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Nov 27 07:18:47 crc kubenswrapper[4971]: E1127 07:18:47.570006 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/640c3829-d2e9-49e1-82e3-bd213aa992dd-config-data podName:640c3829-d2e9-49e1-82e3-bd213aa992dd nodeName:}" failed. No retries permitted until 2025-11-27 07:18:48.569988486 +0000 UTC m=+1566.762032404 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/640c3829-d2e9-49e1-82e3-bd213aa992dd-config-data") pod "rabbitmq-cell1-server-0" (UID: "640c3829-d2e9-49e1-82e3-bd213aa992dd") : configmap "rabbitmq-cell1-config-data" not found Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.667585 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement0147-account-delete-w7x5l"] Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.691425 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="640c3829-d2e9-49e1-82e3-bd213aa992dd" containerName="rabbitmq" containerID="cri-o://f5b92bc920247db85a57df75b23c5a3d6d3ba39c4b4024014d369972c4b8162a" gracePeriod=604800 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.707825 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron1af4-account-delete-48m5w" Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.723696 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.723944 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="36cda10d-7f8f-403c-82b8-fbbdf89e8ed5" containerName="nova-scheduler-scheduler" containerID="cri-o://b97556ba2fd93384c3dcc5c7577bad7f95a4205ce2fc105bf6f734f22b25ff2a" gracePeriod=30 Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.736266 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novaapi6022-account-delete-bhttv" Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.793668 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-x4rpv_31d8afb0-f8c2-4d34-879f-260e94779de0/openstack-network-exporter/0.log" Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.793729 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-x4rpv" Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.803610 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican2596-account-delete-pttgn"] Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.826012 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-v4r7q" Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.908859 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/31d8afb0-f8c2-4d34-879f-260e94779de0-ovn-rundir\") pod \"31d8afb0-f8c2-4d34-879f-260e94779de0\" (UID: \"31d8afb0-f8c2-4d34-879f-260e94779de0\") " Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.908900 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31d8afb0-f8c2-4d34-879f-260e94779de0-combined-ca-bundle\") pod \"31d8afb0-f8c2-4d34-879f-260e94779de0\" (UID: \"31d8afb0-f8c2-4d34-879f-260e94779de0\") " Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.908931 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/31d8afb0-f8c2-4d34-879f-260e94779de0-ovs-rundir\") pod \"31d8afb0-f8c2-4d34-879f-260e94779de0\" (UID: \"31d8afb0-f8c2-4d34-879f-260e94779de0\") " Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.908974 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v5bb8\" (UniqueName: \"kubernetes.io/projected/fabf66ef-ae78-4497-998d-95abd13bbab8-kube-api-access-v5bb8\") pod \"fabf66ef-ae78-4497-998d-95abd13bbab8\" (UID: \"fabf66ef-ae78-4497-998d-95abd13bbab8\") " Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.909043 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fabf66ef-ae78-4497-998d-95abd13bbab8-catalog-content\") pod \"fabf66ef-ae78-4497-998d-95abd13bbab8\" (UID: \"fabf66ef-ae78-4497-998d-95abd13bbab8\") " Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.909202 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fabf66ef-ae78-4497-998d-95abd13bbab8-utilities\") pod \"fabf66ef-ae78-4497-998d-95abd13bbab8\" (UID: \"fabf66ef-ae78-4497-998d-95abd13bbab8\") " Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.909234 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-48q8t\" (UniqueName: \"kubernetes.io/projected/31d8afb0-f8c2-4d34-879f-260e94779de0-kube-api-access-48q8t\") pod \"31d8afb0-f8c2-4d34-879f-260e94779de0\" (UID: \"31d8afb0-f8c2-4d34-879f-260e94779de0\") " Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.909260 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/31d8afb0-f8c2-4d34-879f-260e94779de0-metrics-certs-tls-certs\") pod \"31d8afb0-f8c2-4d34-879f-260e94779de0\" (UID: \"31d8afb0-f8c2-4d34-879f-260e94779de0\") " Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.909282 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31d8afb0-f8c2-4d34-879f-260e94779de0-config\") pod \"31d8afb0-f8c2-4d34-879f-260e94779de0\" (UID: \"31d8afb0-f8c2-4d34-879f-260e94779de0\") " Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.912981 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fabf66ef-ae78-4497-998d-95abd13bbab8-utilities" (OuterVolumeSpecName: "utilities") pod "fabf66ef-ae78-4497-998d-95abd13bbab8" (UID: "fabf66ef-ae78-4497-998d-95abd13bbab8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.913522 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8afb0-f8c2-4d34-879f-260e94779de0-config" (OuterVolumeSpecName: "config") pod "31d8afb0-f8c2-4d34-879f-260e94779de0" (UID: "31d8afb0-f8c2-4d34-879f-260e94779de0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.913568 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/31d8afb0-f8c2-4d34-879f-260e94779de0-ovs-rundir" (OuterVolumeSpecName: "ovs-rundir") pod "31d8afb0-f8c2-4d34-879f-260e94779de0" (UID: "31d8afb0-f8c2-4d34-879f-260e94779de0"). InnerVolumeSpecName "ovs-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.913601 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/31d8afb0-f8c2-4d34-879f-260e94779de0-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "31d8afb0-f8c2-4d34-879f-260e94779de0" (UID: "31d8afb0-f8c2-4d34-879f-260e94779de0"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.922173 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8afb0-f8c2-4d34-879f-260e94779de0-kube-api-access-48q8t" (OuterVolumeSpecName: "kube-api-access-48q8t") pod "31d8afb0-f8c2-4d34-879f-260e94779de0" (UID: "31d8afb0-f8c2-4d34-879f-260e94779de0"). InnerVolumeSpecName "kube-api-access-48q8t". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:47 crc kubenswrapper[4971]: I1127 07:18:47.925775 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fabf66ef-ae78-4497-998d-95abd13bbab8-kube-api-access-v5bb8" (OuterVolumeSpecName: "kube-api-access-v5bb8") pod "fabf66ef-ae78-4497-998d-95abd13bbab8" (UID: "fabf66ef-ae78-4497-998d-95abd13bbab8"). InnerVolumeSpecName "kube-api-access-v5bb8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:48 crc kubenswrapper[4971]: I1127 07:18:48.022120 4971 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/31d8afb0-f8c2-4d34-879f-260e94779de0-ovn-rundir\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:48 crc kubenswrapper[4971]: I1127 07:18:48.022155 4971 reconciler_common.go:293] "Volume detached for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/31d8afb0-f8c2-4d34-879f-260e94779de0-ovs-rundir\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:48 crc kubenswrapper[4971]: I1127 07:18:48.022164 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v5bb8\" (UniqueName: \"kubernetes.io/projected/fabf66ef-ae78-4497-998d-95abd13bbab8-kube-api-access-v5bb8\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:48 crc kubenswrapper[4971]: I1127 07:18:48.022176 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fabf66ef-ae78-4497-998d-95abd13bbab8-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:48 crc kubenswrapper[4971]: I1127 07:18:48.022185 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-48q8t\" (UniqueName: \"kubernetes.io/projected/31d8afb0-f8c2-4d34-879f-260e94779de0-kube-api-access-48q8t\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:48 crc kubenswrapper[4971]: I1127 07:18:48.022196 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31d8afb0-f8c2-4d34-879f-260e94779de0-config\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:48 crc kubenswrapper[4971]: I1127 07:18:48.055942 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8afb0-f8c2-4d34-879f-260e94779de0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "31d8afb0-f8c2-4d34-879f-260e94779de0" (UID: "31d8afb0-f8c2-4d34-879f-260e94779de0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:48 crc kubenswrapper[4971]: I1127 07:18:48.124423 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31d8afb0-f8c2-4d34-879f-260e94779de0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:48 crc kubenswrapper[4971]: I1127 07:18:48.126525 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fabf66ef-ae78-4497-998d-95abd13bbab8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fabf66ef-ae78-4497-998d-95abd13bbab8" (UID: "fabf66ef-ae78-4497-998d-95abd13bbab8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:18:48 crc kubenswrapper[4971]: E1127 07:18:48.155926 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 203479ecfee56a9441b43631622196bf609ce8fbfc30cbc6c31eb9c3e9440056 is running failed: container process not found" containerID="203479ecfee56a9441b43631622196bf609ce8fbfc30cbc6c31eb9c3e9440056" cmd=["/usr/bin/pidof","ovsdb-server"] Nov 27 07:18:48 crc kubenswrapper[4971]: E1127 07:18:48.159088 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 203479ecfee56a9441b43631622196bf609ce8fbfc30cbc6c31eb9c3e9440056 is running failed: container process not found" containerID="203479ecfee56a9441b43631622196bf609ce8fbfc30cbc6c31eb9c3e9440056" cmd=["/usr/bin/pidof","ovsdb-server"] Nov 27 07:18:48 crc kubenswrapper[4971]: E1127 07:18:48.159654 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 203479ecfee56a9441b43631622196bf609ce8fbfc30cbc6c31eb9c3e9440056 is running failed: container process not found" containerID="203479ecfee56a9441b43631622196bf609ce8fbfc30cbc6c31eb9c3e9440056" cmd=["/usr/bin/pidof","ovsdb-server"] Nov 27 07:18:48 crc kubenswrapper[4971]: E1127 07:18:48.159696 4971 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 203479ecfee56a9441b43631622196bf609ce8fbfc30cbc6c31eb9c3e9440056 is running failed: container process not found" probeType="Readiness" pod="openstack/ovsdbserver-sb-0" podUID="6e13a581-61d0-4a1f-ad42-5f2783417c70" containerName="ovsdbserver-sb" Nov 27 07:18:48 crc kubenswrapper[4971]: I1127 07:18:48.164120 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-cell1-galera-0" podUID="bcf58afd-21c6-4c9d-8702-09bc98859732" containerName="galera" containerID="cri-o://d8cae12287c5ef396f90e407b77789281e9654588ff7861e1db29623eaae2e8c" gracePeriod=30 Nov 27 07:18:48 crc kubenswrapper[4971]: I1127 07:18:48.243675 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fabf66ef-ae78-4497-998d-95abd13bbab8-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:48 crc kubenswrapper[4971]: I1127 07:18:48.245256 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8afb0-f8c2-4d34-879f-260e94779de0-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "31d8afb0-f8c2-4d34-879f-260e94779de0" (UID: "31d8afb0-f8c2-4d34-879f-260e94779de0"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:48 crc kubenswrapper[4971]: I1127 07:18:48.271805 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-dj8cq"] Nov 27 07:18:48 crc kubenswrapper[4971]: I1127 07:18:48.307706 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 27 07:18:48 crc kubenswrapper[4971]: I1127 07:18:48.307970 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad" containerName="nova-cell0-conductor-conductor" containerID="cri-o://960448ee56e126a49d6f57a232fb215e9851f384a304f93ed3bcb04c83b92864" gracePeriod=30 Nov 27 07:18:48 crc kubenswrapper[4971]: I1127 07:18:48.345613 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzqfv\" (UniqueName: \"kubernetes.io/projected/9411d1e4-0384-4caf-b95e-e891b811b402-kube-api-access-vzqfv\") pod \"novacell16991-account-delete-ftkpc\" (UID: \"9411d1e4-0384-4caf-b95e-e891b811b402\") " pod="openstack/novacell16991-account-delete-ftkpc" Nov 27 07:18:48 crc kubenswrapper[4971]: I1127 07:18:48.347076 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9411d1e4-0384-4caf-b95e-e891b811b402-operator-scripts\") pod \"novacell16991-account-delete-ftkpc\" (UID: \"9411d1e4-0384-4caf-b95e-e891b811b402\") " pod="openstack/novacell16991-account-delete-ftkpc" Nov 27 07:18:48 crc kubenswrapper[4971]: I1127 07:18:48.347456 4971 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/31d8afb0-f8c2-4d34-879f-260e94779de0-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:48 crc kubenswrapper[4971]: E1127 07:18:48.347562 4971 configmap.go:193] Couldn't get configMap openstack/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Nov 27 07:18:48 crc kubenswrapper[4971]: E1127 07:18:48.347615 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/9411d1e4-0384-4caf-b95e-e891b811b402-operator-scripts podName:9411d1e4-0384-4caf-b95e-e891b811b402 nodeName:}" failed. No retries permitted until 2025-11-27 07:18:50.347596575 +0000 UTC m=+1568.539640493 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/9411d1e4-0384-4caf-b95e-e891b811b402-operator-scripts") pod "novacell16991-account-delete-ftkpc" (UID: "9411d1e4-0384-4caf-b95e-e891b811b402") : configmap "openstack-cell1-scripts" not found Nov 27 07:18:48 crc kubenswrapper[4971]: E1127 07:18:48.354847 4971 projected.go:194] Error preparing data for projected volume kube-api-access-vzqfv for pod openstack/novacell16991-account-delete-ftkpc: failed to fetch token: serviceaccounts "galera-openstack-cell1" not found Nov 27 07:18:48 crc kubenswrapper[4971]: E1127 07:18:48.354922 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9411d1e4-0384-4caf-b95e-e891b811b402-kube-api-access-vzqfv podName:9411d1e4-0384-4caf-b95e-e891b811b402 nodeName:}" failed. No retries permitted until 2025-11-27 07:18:50.354899065 +0000 UTC m=+1568.546942983 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-vzqfv" (UniqueName: "kubernetes.io/projected/9411d1e4-0384-4caf-b95e-e891b811b402-kube-api-access-vzqfv") pod "novacell16991-account-delete-ftkpc" (UID: "9411d1e4-0384-4caf-b95e-e891b811b402") : failed to fetch token: serviceaccounts "galera-openstack-cell1" not found Nov 27 07:18:48 crc kubenswrapper[4971]: I1127 07:18:48.363508 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-dj8cq"] Nov 27 07:18:48 crc kubenswrapper[4971]: I1127 07:18:48.378852 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 27 07:18:48 crc kubenswrapper[4971]: I1127 07:18:48.379151 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="7f461fed-9df2-44a5-b99c-17f30adf0d9c" containerName="nova-cell1-conductor-conductor" containerID="cri-o://bacf1459d637ebaf1473ffbba9c90d2886c77004f0c01d13c0668368ade96df2" gracePeriod=30 Nov 27 07:18:48 crc kubenswrapper[4971]: I1127 07:18:48.387488 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-6npvv"] Nov 27 07:18:48 crc kubenswrapper[4971]: I1127 07:18:48.397390 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-6npvv"] Nov 27 07:18:48 crc kubenswrapper[4971]: E1127 07:18:48.452258 4971 projected.go:288] Couldn't get configMap openstack/swift-storage-config-data: configmap "swift-storage-config-data" not found Nov 27 07:18:48 crc kubenswrapper[4971]: E1127 07:18:48.452293 4971 projected.go:263] Couldn't get secret openstack/swift-conf: secret "swift-conf" not found Nov 27 07:18:48 crc kubenswrapper[4971]: E1127 07:18:48.452302 4971 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Nov 27 07:18:48 crc kubenswrapper[4971]: E1127 07:18:48.452316 4971 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: [configmap "swift-storage-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Nov 27 07:18:48 crc kubenswrapper[4971]: E1127 07:18:48.452369 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b9ccc9bd-d955-4853-986f-95597f2c70e6-etc-swift podName:b9ccc9bd-d955-4853-986f-95597f2c70e6 nodeName:}" failed. No retries permitted until 2025-11-27 07:18:50.452351035 +0000 UTC m=+1568.644394953 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b9ccc9bd-d955-4853-986f-95597f2c70e6-etc-swift") pod "swift-storage-0" (UID: "b9ccc9bd-d955-4853-986f-95597f2c70e6") : [configmap "swift-storage-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Nov 27 07:18:48 crc kubenswrapper[4971]: I1127 07:18:48.499661 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican2596-account-delete-pttgn" event={"ID":"aba3aa08-61e1-48d3-bf4b-cb45e0b71561","Type":"ContainerStarted","Data":"9544c98a91892292ec750565e653396ca5b32ab1848e93c8e927a3485832c427"} Nov 27 07:18:48 crc kubenswrapper[4971]: I1127 07:18:48.554130 4971 generic.go:334] "Generic (PLEG): container finished" podID="35b90587-df5b-4f15-8c34-f1b0a8506d85" containerID="9889def6b44901ab96b7e37483508d92be73fc8357e22d9f41857a9c610d8c0a" exitCode=143 Nov 27 07:18:48 crc kubenswrapper[4971]: I1127 07:18:48.554220 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"35b90587-df5b-4f15-8c34-f1b0a8506d85","Type":"ContainerDied","Data":"9889def6b44901ab96b7e37483508d92be73fc8357e22d9f41857a9c610d8c0a"} Nov 27 07:18:49 crc kubenswrapper[4971]: E1127 07:18:48.619762 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b97556ba2fd93384c3dcc5c7577bad7f95a4205ce2fc105bf6f734f22b25ff2a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 27 07:18:49 crc kubenswrapper[4971]: E1127 07:18:48.633998 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b97556ba2fd93384c3dcc5c7577bad7f95a4205ce2fc105bf6f734f22b25ff2a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:48.634977 4971 generic.go:334] "Generic (PLEG): container finished" podID="51e69600-7930-4ccb-a8ff-0ad3377bf3a7" containerID="0f8d01fbf6c7eb20e7a39706dcdd74da63c98558af8036f29f36eda787a01536" exitCode=0 Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:48.637438 4971 generic.go:334] "Generic (PLEG): container finished" podID="fc889790-089f-4007-876f-874880dad975" containerID="213b456ae37e6e54ce799e71136d0150d71e3784b58ba6e15c73b1242adc249b" exitCode=143 Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:48.642859 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_e060bc1f-a47d-45ef-88bd-cfd0645f9ce2/ovsdbserver-nb/0.log" Nov 27 07:18:49 crc kubenswrapper[4971]: E1127 07:18:48.643418 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="bacf1459d637ebaf1473ffbba9c90d2886c77004f0c01d13c0668368ade96df2" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 27 07:18:49 crc kubenswrapper[4971]: E1127 07:18:48.661829 4971 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Nov 27 07:18:49 crc kubenswrapper[4971]: E1127 07:18:48.661899 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/640c3829-d2e9-49e1-82e3-bd213aa992dd-config-data podName:640c3829-d2e9-49e1-82e3-bd213aa992dd nodeName:}" failed. No retries permitted until 2025-11-27 07:18:50.661883404 +0000 UTC m=+1568.853927312 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/640c3829-d2e9-49e1-82e3-bd213aa992dd-config-data") pod "rabbitmq-cell1-server-0" (UID: "640c3829-d2e9-49e1-82e3-bd213aa992dd") : configmap "rabbitmq-cell1-config-data" not found Nov 27 07:18:49 crc kubenswrapper[4971]: E1127 07:18:48.663968 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b97556ba2fd93384c3dcc5c7577bad7f95a4205ce2fc105bf6f734f22b25ff2a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 27 07:18:49 crc kubenswrapper[4971]: E1127 07:18:48.664024 4971 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="36cda10d-7f8f-403c-82b8-fbbdf89e8ed5" containerName="nova-scheduler-scheduler" Nov 27 07:18:49 crc kubenswrapper[4971]: E1127 07:18:48.697815 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="bacf1459d637ebaf1473ffbba9c90d2886c77004f0c01d13c0668368ade96df2" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 27 07:18:49 crc kubenswrapper[4971]: E1127 07:18:48.701718 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="bacf1459d637ebaf1473ffbba9c90d2886c77004f0c01d13c0668368ade96df2" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 27 07:18:49 crc kubenswrapper[4971]: E1127 07:18:48.701795 4971 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="7f461fed-9df2-44a5-b99c-17f30adf0d9c" containerName="nova-cell1-conductor-conductor" Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:48.717884 4971 generic.go:334] "Generic (PLEG): container finished" podID="6e067498-7bc0-4bc5-a9a6-696c8aa3cf71" containerID="cce9aeab95941685da7fd97b4408ffcb6ac7f7c473cf5bbf43edec5fe397efd6" exitCode=143 Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:48.767400 4971 generic.go:334] "Generic (PLEG): container finished" podID="83ad8915-ac3b-4891-ae7a-9b862747569f" containerID="f42ea15691565b6f744579005d0dff4dd176377414435bc8e4f9733d230cc174" exitCode=0 Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:48.803027 4971 generic.go:334] "Generic (PLEG): container finished" podID="69d47892-79da-4e4e-8de2-a84801d4d6b9" containerID="bf254fe08bcc7d4264d54d1c7f653dc9ece6ef4aa57f2cd553f285b6b06f6503" exitCode=143 Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:48.818184 4971 generic.go:334] "Generic (PLEG): container finished" podID="622e028f-779d-4306-923c-ee204fdef6b0" containerID="360d9e165c0c9e2b9b0b1cee20e9ddcb63854ee168b8dc44e7daf6d665e3532e" exitCode=0 Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:48.926226 4971 generic.go:334] "Generic (PLEG): container finished" podID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerID="c21be005b4f8466d61ef2da25c8b9ce2faf1be2408be9a01a2cad03ede7d5772" exitCode=0 Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:48.926253 4971 generic.go:334] "Generic (PLEG): container finished" podID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerID="4a867f795f4a9c59b6d95924698a7ae6fcb777907d413b4d91002386f13f43ce" exitCode=0 Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:48.926261 4971 generic.go:334] "Generic (PLEG): container finished" podID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerID="91caa3649b14fd1b0674535f37443068dd6301ac20d525ca2334e554de92f5ce" exitCode=0 Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:48.926269 4971 generic.go:334] "Generic (PLEG): container finished" podID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerID="d156d1511b10631f281c42115007fa1a0c13d1d792aafd4fd1d7868813992990" exitCode=0 Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:48.926276 4971 generic.go:334] "Generic (PLEG): container finished" podID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerID="61eee222331bc4cf51d73a2109e6aaf653d19b6ab409e2c0ab5b7ddfc9466313" exitCode=0 Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:48.926282 4971 generic.go:334] "Generic (PLEG): container finished" podID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerID="9d10d81588ceda578cec676b35a37e455caa807351436843b76133d0dfb2603b" exitCode=0 Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:48.926289 4971 generic.go:334] "Generic (PLEG): container finished" podID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerID="7170edadd0854106871150c55e357e6c6f8c393bb654d550d3f39d6435d67112" exitCode=0 Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:48.926295 4971 generic.go:334] "Generic (PLEG): container finished" podID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerID="fad54635f828db66a56bd717dd26946c6194a973dd020bda60d63ac8508944b7" exitCode=0 Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:48.926300 4971 generic.go:334] "Generic (PLEG): container finished" podID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerID="3b8d32d89ca168e8803a3c1d366cfdd72575c01e7332b5f3d73f865eaf632a07" exitCode=0 Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:48.926306 4971 generic.go:334] "Generic (PLEG): container finished" podID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerID="f747a88650d47a0b28abe01bd9c86b1ce37125bcbf5d4510f59880020f4c6436" exitCode=0 Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:48.926312 4971 generic.go:334] "Generic (PLEG): container finished" podID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerID="d4737f67b50d857fb0c555964409bd7354a856636790cbe70eea9e16bffacc6e" exitCode=0 Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:48.926317 4971 generic.go:334] "Generic (PLEG): container finished" podID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerID="6c4925ec24ba366b0b0454d48d8855400c5d48953e95c1f7428155fd84127936" exitCode=0 Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:48.926323 4971 generic.go:334] "Generic (PLEG): container finished" podID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerID="b7eb7df2e485a291e4bff098a5b8b04d0f94dc8a687ed53e0061a899159d7b70" exitCode=0 Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:48.926329 4971 generic.go:334] "Generic (PLEG): container finished" podID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerID="dc6a6bc3b9ba7cb3360f786ead574e2d92d8ae060c201dc5a7136ed40e797c3d" exitCode=0 Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:48.935793 4971 generic.go:334] "Generic (PLEG): container finished" podID="82a33cc1-32fe-464f-ac33-b802fd32a4c1" containerID="bce205f17147516a60414770d105f40e40e7784c6e25ddf99d12115151c6a716" exitCode=0 Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:48.935818 4971 generic.go:334] "Generic (PLEG): container finished" podID="82a33cc1-32fe-464f-ac33-b802fd32a4c1" containerID="22d3d2192ae3c18ce743c1ddda10575c592232c7adf55747fe07b64dfb765bb4" exitCode=0 Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:48.949688 4971 generic.go:334] "Generic (PLEG): container finished" podID="fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea" containerID="86a75ee86a1e45fea7cc0b2bf039f079e8bc453673a092221bda14b2056c6baf" exitCode=0 Nov 27 07:18:49 crc kubenswrapper[4971]: E1127 07:18:48.973006 4971 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Nov 27 07:18:49 crc kubenswrapper[4971]: E1127 07:18:48.973056 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d6439a3c-ee26-467c-8e42-5abbbf390f16-config-data podName:d6439a3c-ee26-467c-8e42-5abbbf390f16 nodeName:}" failed. No retries permitted until 2025-11-27 07:18:52.973040765 +0000 UTC m=+1571.165084683 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/d6439a3c-ee26-467c-8e42-5abbbf390f16-config-data") pod "rabbitmq-server-0" (UID: "d6439a3c-ee26-467c-8e42-5abbbf390f16") : configmap "rabbitmq-config-data" not found Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:48.978245 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_6e13a581-61d0-4a1f-ad42-5f2783417c70/ovsdbserver-sb/0.log" Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:48.978280 4971 generic.go:334] "Generic (PLEG): container finished" podID="6e13a581-61d0-4a1f-ad42-5f2783417c70" containerID="203479ecfee56a9441b43631622196bf609ce8fbfc30cbc6c31eb9c3e9440056" exitCode=143 Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:48.997991 4971 generic.go:334] "Generic (PLEG): container finished" podID="6754a19e-e024-4b15-8464-49e127bd35ad" containerID="11a43f33fa3835d6bc3caba0e4cd634a1bc0dcb8a9fc8879c3ba3bbe47cba2b2" exitCode=0 Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.020128 4971 generic.go:334] "Generic (PLEG): container finished" podID="6facf3b5-48aa-4a38-823e-6b7adbbcdfee" containerID="d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2" exitCode=0 Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.039743 4971 generic.go:334] "Generic (PLEG): container finished" podID="b3616559-d640-4b3b-a4b3-b9d9af1d0061" containerID="85ed74a43c71f2fd0dc70f5ef0da68149334b289a557cdd8a6923696f588e66b" exitCode=143 Nov 27 07:18:49 crc kubenswrapper[4971]: E1127 07:18:49.070248 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ccb0479ba9995bc8550fbc2dbf993c3449d21c9af046183d2f478467bb11fc07 is running failed: container process not found" containerID="ccb0479ba9995bc8550fbc2dbf993c3449d21c9af046183d2f478467bb11fc07" cmd=["/usr/bin/pidof","ovsdb-server"] Nov 27 07:18:49 crc kubenswrapper[4971]: E1127 07:18:49.076920 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ccb0479ba9995bc8550fbc2dbf993c3449d21c9af046183d2f478467bb11fc07 is running failed: container process not found" containerID="ccb0479ba9995bc8550fbc2dbf993c3449d21c9af046183d2f478467bb11fc07" cmd=["/usr/bin/pidof","ovsdb-server"] Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.078337 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-v4r7q" Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.078372 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-x4rpv" Nov 27 07:18:49 crc kubenswrapper[4971]: E1127 07:18:49.080811 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ccb0479ba9995bc8550fbc2dbf993c3449d21c9af046183d2f478467bb11fc07 is running failed: container process not found" containerID="ccb0479ba9995bc8550fbc2dbf993c3449d21c9af046183d2f478467bb11fc07" cmd=["/usr/bin/pidof","ovsdb-server"] Nov 27 07:18:49 crc kubenswrapper[4971]: E1127 07:18:49.080857 4971 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ccb0479ba9995bc8550fbc2dbf993c3449d21c9af046183d2f478467bb11fc07 is running failed: container process not found" probeType="Readiness" pod="openstack/ovsdbserver-nb-0" podUID="e060bc1f-a47d-45ef-88bd-cfd0645f9ce2" containerName="ovsdbserver-nb" Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.145272 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="146ec507-b566-4056-92f6-2f12b2d6f11c" path="/var/lib/kubelet/pods/146ec507-b566-4056-92f6-2f12b2d6f11c/volumes" Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.148869 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="158bee38-b35a-43f7-a8e2-547220d8f339" path="/var/lib/kubelet/pods/158bee38-b35a-43f7-a8e2-547220d8f339/volumes" Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.149767 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="457ab172-18f9-4aa4-85b8-eae591089ad7" path="/var/lib/kubelet/pods/457ab172-18f9-4aa4-85b8-eae591089ad7/volumes" Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.150370 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e9e64a6-205f-4e28-87a7-d9061d5931d1" path="/var/lib/kubelet/pods/4e9e64a6-205f-4e28-87a7-d9061d5931d1/volumes" Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.151652 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="58ccbc84-3120-4e10-a215-07d41267629d" path="/var/lib/kubelet/pods/58ccbc84-3120-4e10-a215-07d41267629d/volumes" Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.154176 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-669669cf59-5rwqh" event={"ID":"b3fa1872-f7d9-4531-bc33-619419f530a5","Type":"ContainerDied","Data":"36d81b94fdfead0137bb05bc0e7411297f1b21b70c1bddab11f0ae4747cb68a6"} Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.154218 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="36d81b94fdfead0137bb05bc0e7411297f1b21b70c1bddab11f0ae4747cb68a6" Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.154232 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder6f3a-account-delete-nzrmk" event={"ID":"51e69600-7930-4ccb-a8ff-0ad3377bf3a7","Type":"ContainerDied","Data":"0f8d01fbf6c7eb20e7a39706dcdd74da63c98558af8036f29f36eda787a01536"} Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.154323 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-85d46db856-nzmcq" event={"ID":"fc889790-089f-4007-876f-874880dad975","Type":"ContainerDied","Data":"213b456ae37e6e54ce799e71136d0150d71e3784b58ba6e15c73b1242adc249b"} Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.154372 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4f944caeffccc2743f5571c0ce0f60199160438ea14fabd30d8720b6c08ab302" Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.154385 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2","Type":"ContainerDied","Data":"9bc43782eba8c4cf00dbf1e71cf92628b20a8a75f49f39680989ed8e4c815c99"} Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.154399 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9bc43782eba8c4cf00dbf1e71cf92628b20a8a75f49f39680989ed8e4c815c99" Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.154409 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5b7b448d48-2wggc" event={"ID":"6e067498-7bc0-4bc5-a9a6-696c8aa3cf71","Type":"ContainerDied","Data":"cce9aeab95941685da7fd97b4408ffcb6ac7f7c473cf5bbf43edec5fe397efd6"} Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.154450 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance7577-account-delete-hmqt9" event={"ID":"83ad8915-ac3b-4891-ae7a-9b862747569f","Type":"ContainerDied","Data":"f42ea15691565b6f744579005d0dff4dd176377414435bc8e4f9733d230cc174"} Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.154466 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"69d47892-79da-4e4e-8de2-a84801d4d6b9","Type":"ContainerDied","Data":"bf254fe08bcc7d4264d54d1c7f653dc9ece6ef4aa57f2cd553f285b6b06f6503"} Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.154480 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7b8cfffdbf-92cj6" event={"ID":"622e028f-779d-4306-923c-ee204fdef6b0","Type":"ContainerDied","Data":"360d9e165c0c9e2b9b0b1cee20e9ddcb63854ee168b8dc44e7daf6d665e3532e"} Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.154495 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b9ccc9bd-d955-4853-986f-95597f2c70e6","Type":"ContainerDied","Data":"c21be005b4f8466d61ef2da25c8b9ce2faf1be2408be9a01a2cad03ede7d5772"} Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.154541 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b9ccc9bd-d955-4853-986f-95597f2c70e6","Type":"ContainerDied","Data":"4a867f795f4a9c59b6d95924698a7ae6fcb777907d413b4d91002386f13f43ce"} Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.154556 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b9ccc9bd-d955-4853-986f-95597f2c70e6","Type":"ContainerDied","Data":"91caa3649b14fd1b0674535f37443068dd6301ac20d525ca2334e554de92f5ce"} Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.154567 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b9ccc9bd-d955-4853-986f-95597f2c70e6","Type":"ContainerDied","Data":"d156d1511b10631f281c42115007fa1a0c13d1d792aafd4fd1d7868813992990"} Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.154580 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b9ccc9bd-d955-4853-986f-95597f2c70e6","Type":"ContainerDied","Data":"61eee222331bc4cf51d73a2109e6aaf653d19b6ab409e2c0ab5b7ddfc9466313"} Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.154618 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b9ccc9bd-d955-4853-986f-95597f2c70e6","Type":"ContainerDied","Data":"9d10d81588ceda578cec676b35a37e455caa807351436843b76133d0dfb2603b"} Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.154631 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b9ccc9bd-d955-4853-986f-95597f2c70e6","Type":"ContainerDied","Data":"7170edadd0854106871150c55e357e6c6f8c393bb654d550d3f39d6435d67112"} Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.154642 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b9ccc9bd-d955-4853-986f-95597f2c70e6","Type":"ContainerDied","Data":"fad54635f828db66a56bd717dd26946c6194a973dd020bda60d63ac8508944b7"} Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.154655 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b9ccc9bd-d955-4853-986f-95597f2c70e6","Type":"ContainerDied","Data":"3b8d32d89ca168e8803a3c1d366cfdd72575c01e7332b5f3d73f865eaf632a07"} Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.154707 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b9ccc9bd-d955-4853-986f-95597f2c70e6","Type":"ContainerDied","Data":"f747a88650d47a0b28abe01bd9c86b1ce37125bcbf5d4510f59880020f4c6436"} Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.154723 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b9ccc9bd-d955-4853-986f-95597f2c70e6","Type":"ContainerDied","Data":"d4737f67b50d857fb0c555964409bd7354a856636790cbe70eea9e16bffacc6e"} Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.154734 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b9ccc9bd-d955-4853-986f-95597f2c70e6","Type":"ContainerDied","Data":"6c4925ec24ba366b0b0454d48d8855400c5d48953e95c1f7428155fd84127936"} Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.154746 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b9ccc9bd-d955-4853-986f-95597f2c70e6","Type":"ContainerDied","Data":"b7eb7df2e485a291e4bff098a5b8b04d0f94dc8a687ed53e0061a899159d7b70"} Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.154780 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b9ccc9bd-d955-4853-986f-95597f2c70e6","Type":"ContainerDied","Data":"dc6a6bc3b9ba7cb3360f786ead574e2d92d8ae060c201dc5a7136ed40e797c3d"} Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.154793 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-66674dfb5f-52hcq" event={"ID":"82a33cc1-32fe-464f-ac33-b802fd32a4c1","Type":"ContainerDied","Data":"bce205f17147516a60414770d105f40e40e7784c6e25ddf99d12115151c6a716"} Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.154807 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-66674dfb5f-52hcq" event={"ID":"82a33cc1-32fe-464f-ac33-b802fd32a4c1","Type":"ContainerDied","Data":"22d3d2192ae3c18ce743c1ddda10575c592232c7adf55747fe07b64dfb765bb4"} Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.154820 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea","Type":"ContainerDied","Data":"86a75ee86a1e45fea7cc0b2bf039f079e8bc453673a092221bda14b2056c6baf"} Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.154854 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"6e13a581-61d0-4a1f-ad42-5f2783417c70","Type":"ContainerDied","Data":"203479ecfee56a9441b43631622196bf609ce8fbfc30cbc6c31eb9c3e9440056"} Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.154872 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"6e13a581-61d0-4a1f-ad42-5f2783417c70","Type":"ContainerDied","Data":"2d362db6b1447b211c560391e76ed1612f2cc461550022715c739ababa1142f2"} Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.154885 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2d362db6b1447b211c560391e76ed1612f2cc461550022715c739ababa1142f2" Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.154894 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"6754a19e-e024-4b15-8464-49e127bd35ad","Type":"ContainerDied","Data":"11a43f33fa3835d6bc3caba0e4cd634a1bc0dcb8a9fc8879c3ba3bbe47cba2b2"} Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.154910 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement0147-account-delete-w7x5l" event={"ID":"c81bcd57-9f3c-4511-a6f1-ca0bbb68af0d","Type":"ContainerStarted","Data":"db9036a8968208018dfc91ff69b123ee88b9ad6478e7efc9d25548f5de33cf10"} Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.154946 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-45rt8" event={"ID":"6facf3b5-48aa-4a38-823e-6b7adbbcdfee","Type":"ContainerDied","Data":"d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2"} Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.154962 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7865dbd7d9-zp55h" event={"ID":"b3616559-d640-4b3b-a4b3-b9d9af1d0061","Type":"ContainerDied","Data":"85ed74a43c71f2fd0dc70f5ef0da68149334b289a557cdd8a6923696f588e66b"} Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.154976 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-db2qc" event={"ID":"0aa6e1b6-c18b-4a02-a396-880350cde407","Type":"ContainerDied","Data":"dcd26d31f61dc261b0ae4502413881a35341b72b65ea0b54917e2204fa51658a"} Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.154988 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dcd26d31f61dc261b0ae4502413881a35341b72b65ea0b54917e2204fa51658a" Nov 27 07:18:49 crc kubenswrapper[4971]: E1127 07:18:49.157360 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c9d172c81f9f3625bed2063b71b15aa0aaee90448e7484f3536af33829d91bf8" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Nov 27 07:18:49 crc kubenswrapper[4971]: E1127 07:18:49.158880 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c9d172c81f9f3625bed2063b71b15aa0aaee90448e7484f3536af33829d91bf8" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Nov 27 07:18:49 crc kubenswrapper[4971]: E1127 07:18:49.180524 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c9d172c81f9f3625bed2063b71b15aa0aaee90448e7484f3536af33829d91bf8" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Nov 27 07:18:49 crc kubenswrapper[4971]: E1127 07:18:49.180615 4971 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="abd4a589-1b2e-4559-852f-2c27c0d8c459" containerName="ovn-northd" Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.644256 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="2988a7d8-1d6b-46d8-b204-8e02d0be3b4d" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.163:8776/healthcheck\": read tcp 10.217.0.2:49106->10.217.0.163:8776: read: connection reset by peer" Nov 27 07:18:49 crc kubenswrapper[4971]: E1127 07:18:49.654400 4971 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod82a33cc1_32fe_464f_ac33_b802fd32a4c1.slice/crio-bce205f17147516a60414770d105f40e40e7784c6e25ddf99d12115151c6a716.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod83ad8915_ac3b_4891_ae7a_9b862747569f.slice/crio-f42ea15691565b6f744579005d0dff4dd176377414435bc8e4f9733d230cc174.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb9ccc9bd_d955_4853_986f_95597f2c70e6.slice/crio-conmon-f747a88650d47a0b28abe01bd9c86b1ce37125bcbf5d4510f59880020f4c6436.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod83ad8915_ac3b_4891_ae7a_9b862747569f.slice/crio-conmon-f42ea15691565b6f744579005d0dff4dd176377414435bc8e4f9733d230cc174.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod82a33cc1_32fe_464f_ac33_b802fd32a4c1.slice/crio-conmon-bce205f17147516a60414770d105f40e40e7784c6e25ddf99d12115151c6a716.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb9ccc9bd_d955_4853_986f_95597f2c70e6.slice/crio-conmon-c21be005b4f8466d61ef2da25c8b9ce2faf1be2408be9a01a2cad03ede7d5772.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb9ccc9bd_d955_4853_986f_95597f2c70e6.slice/crio-9d10d81588ceda578cec676b35a37e455caa807351436843b76133d0dfb2603b.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb9ccc9bd_d955_4853_986f_95597f2c70e6.slice/crio-conmon-9d10d81588ceda578cec676b35a37e455caa807351436843b76133d0dfb2603b.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb3616559_d640_4b3b_a4b3_b9d9af1d0061.slice/crio-432905efdade8907249e6259f8519c70b607016106a34709d73f373c91efe0a5.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfa693ecf_e3d2_4a1b_9bd0_df1e508e3bea.slice/crio-86a75ee86a1e45fea7cc0b2bf039f079e8bc453673a092221bda14b2056c6baf.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb9ccc9bd_d955_4853_986f_95597f2c70e6.slice/crio-conmon-dc6a6bc3b9ba7cb3360f786ead574e2d92d8ae060c201dc5a7136ed40e797c3d.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb9ccc9bd_d955_4853_986f_95597f2c70e6.slice/crio-c21be005b4f8466d61ef2da25c8b9ce2faf1be2408be9a01a2cad03ede7d5772.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfa693ecf_e3d2_4a1b_9bd0_df1e508e3bea.slice/crio-conmon-86a75ee86a1e45fea7cc0b2bf039f079e8bc453673a092221bda14b2056c6baf.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod51e69600_7930_4ccb_a8ff_0ad3377bf3a7.slice/crio-conmon-0f8d01fbf6c7eb20e7a39706dcdd74da63c98558af8036f29f36eda787a01536.scope\": RecentStats: unable to find data in memory cache]" Nov 27 07:18:49 crc kubenswrapper[4971]: I1127 07:18:49.768093 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron1af4-account-delete-48m5w"] Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.110905 4971 generic.go:334] "Generic (PLEG): container finished" podID="c88f9c00-b02f-4070-b81e-733009e44691" containerID="669e2c2b2985b62934235229da4195fd416e9129758e938e024eadd9d158c9c5" exitCode=0 Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.111100 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-65cf5bcbb-zf65t" event={"ID":"c88f9c00-b02f-4070-b81e-733009e44691","Type":"ContainerDied","Data":"669e2c2b2985b62934235229da4195fd416e9129758e938e024eadd9d158c9c5"} Nov 27 07:18:50 crc kubenswrapper[4971]: E1127 07:18:50.118141 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d8cae12287c5ef396f90e407b77789281e9654588ff7861e1db29623eaae2e8c is running failed: container process not found" containerID="d8cae12287c5ef396f90e407b77789281e9654588ff7861e1db29623eaae2e8c" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Nov 27 07:18:50 crc kubenswrapper[4971]: E1127 07:18:50.121825 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d8cae12287c5ef396f90e407b77789281e9654588ff7861e1db29623eaae2e8c is running failed: container process not found" containerID="d8cae12287c5ef396f90e407b77789281e9654588ff7861e1db29623eaae2e8c" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Nov 27 07:18:50 crc kubenswrapper[4971]: E1127 07:18:50.122382 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d8cae12287c5ef396f90e407b77789281e9654588ff7861e1db29623eaae2e8c is running failed: container process not found" containerID="d8cae12287c5ef396f90e407b77789281e9654588ff7861e1db29623eaae2e8c" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.122430 4971 generic.go:334] "Generic (PLEG): container finished" podID="bcf58afd-21c6-4c9d-8702-09bc98859732" containerID="d8cae12287c5ef396f90e407b77789281e9654588ff7861e1db29623eaae2e8c" exitCode=0 Nov 27 07:18:50 crc kubenswrapper[4971]: E1127 07:18:50.122445 4971 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d8cae12287c5ef396f90e407b77789281e9654588ff7861e1db29623eaae2e8c is running failed: container process not found" probeType="Readiness" pod="openstack/openstack-cell1-galera-0" podUID="bcf58afd-21c6-4c9d-8702-09bc98859732" containerName="galera" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.122485 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"bcf58afd-21c6-4c9d-8702-09bc98859732","Type":"ContainerDied","Data":"d8cae12287c5ef396f90e407b77789281e9654588ff7861e1db29623eaae2e8c"} Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.133945 4971 generic.go:334] "Generic (PLEG): container finished" podID="6e067498-7bc0-4bc5-a9a6-696c8aa3cf71" containerID="0d9471b432378d52a3ce483c444b3f5beceab405dd8b991eb4ecb32b754851b8" exitCode=0 Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.134004 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5b7b448d48-2wggc" event={"ID":"6e067498-7bc0-4bc5-a9a6-696c8aa3cf71","Type":"ContainerDied","Data":"0d9471b432378d52a3ce483c444b3f5beceab405dd8b991eb4ecb32b754851b8"} Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.135637 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea","Type":"ContainerDied","Data":"7101867e5e94352aa26ebb236111effa5248597ea686da5f10e624a1125b4366"} Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.135656 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7101867e5e94352aa26ebb236111effa5248597ea686da5f10e624a1125b4366" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.137423 4971 generic.go:334] "Generic (PLEG): container finished" podID="c2e2055e-1200-46e8-a49e-c6b490702c9b" containerID="c813e7acb7f4a74ba8b74e4f55535ea0e0307a8468b0623d9ec10735c4807fad" exitCode=0 Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.137459 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c2e2055e-1200-46e8-a49e-c6b490702c9b","Type":"ContainerDied","Data":"c813e7acb7f4a74ba8b74e4f55535ea0e0307a8468b0623d9ec10735c4807fad"} Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.139810 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron1af4-account-delete-48m5w" event={"ID":"91f979c6-21cc-4848-9eb5-b8bfc4abf082","Type":"ContainerStarted","Data":"fd5d4c775a7fb450ac8ff1db182633c99de47c66fdf05041c3bf7ad5c72a8003"} Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.150820 4971 generic.go:334] "Generic (PLEG): container finished" podID="6754a19e-e024-4b15-8464-49e127bd35ad" containerID="f25c15b6849ee4de1357a742e5ff9d74b03bc332580c116f4c0b091d577d4a1d" exitCode=0 Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.150903 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"6754a19e-e024-4b15-8464-49e127bd35ad","Type":"ContainerDied","Data":"f25c15b6849ee4de1357a742e5ff9d74b03bc332580c116f4c0b091d577d4a1d"} Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.153961 4971 generic.go:334] "Generic (PLEG): container finished" podID="1206c914-fbe7-4e8f-8470-861b0ebf75de" containerID="5668516f8e5f98d27ae8ef1c9b2857419b64f8bc06f8aeffeddbb87691114627" exitCode=0 Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.154013 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"1206c914-fbe7-4e8f-8470-861b0ebf75de","Type":"ContainerDied","Data":"5668516f8e5f98d27ae8ef1c9b2857419b64f8bc06f8aeffeddbb87691114627"} Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.155486 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-66674dfb5f-52hcq" event={"ID":"82a33cc1-32fe-464f-ac33-b802fd32a4c1","Type":"ContainerDied","Data":"e5e757909b07d75f7894463f911274815946df5ca2fcfd472e16750d3eb2c73d"} Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.155519 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e5e757909b07d75f7894463f911274815946df5ca2fcfd472e16750d3eb2c73d" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.160583 4971 generic.go:334] "Generic (PLEG): container finished" podID="b3616559-d640-4b3b-a4b3-b9d9af1d0061" containerID="432905efdade8907249e6259f8519c70b607016106a34709d73f373c91efe0a5" exitCode=0 Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.160648 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7865dbd7d9-zp55h" event={"ID":"b3616559-d640-4b3b-a4b3-b9d9af1d0061","Type":"ContainerDied","Data":"432905efdade8907249e6259f8519c70b607016106a34709d73f373c91efe0a5"} Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.162719 4971 generic.go:334] "Generic (PLEG): container finished" podID="2988a7d8-1d6b-46d8-b204-8e02d0be3b4d" containerID="32b6c0f43018436689e6595a7fbba50fff0b5d5d5f4ec9b1c259ec629b4ab6bc" exitCode=0 Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.162869 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d","Type":"ContainerDied","Data":"32b6c0f43018436689e6595a7fbba50fff0b5d5d5f4ec9b1c259ec629b4ab6bc"} Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.206873 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.254131 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-669669cf59-5rwqh" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.260305 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novacell091e4-account-delete-8kghx" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.317180 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-db2qc" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.321681 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-v4r7q"] Nov 27 07:18:50 crc kubenswrapper[4971]: E1127 07:18:50.326717 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="960448ee56e126a49d6f57a232fb215e9851f384a304f93ed3bcb04c83b92864" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.328201 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b3fa1872-f7d9-4531-bc33-619419f530a5-dns-svc\") pod \"b3fa1872-f7d9-4531-bc33-619419f530a5\" (UID: \"b3fa1872-f7d9-4531-bc33-619419f530a5\") " Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.328242 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3fa1872-f7d9-4531-bc33-619419f530a5-config\") pod \"b3fa1872-f7d9-4531-bc33-619419f530a5\" (UID: \"b3fa1872-f7d9-4531-bc33-619419f530a5\") " Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.328289 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6gb2q\" (UniqueName: \"kubernetes.io/projected/b3fa1872-f7d9-4531-bc33-619419f530a5-kube-api-access-6gb2q\") pod \"b3fa1872-f7d9-4531-bc33-619419f530a5\" (UID: \"b3fa1872-f7d9-4531-bc33-619419f530a5\") " Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.328321 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d86c04-b5e4-4334-832b-10953c1d0b1d-combined-ca-bundle\") pod \"36d86c04-b5e4-4334-832b-10953c1d0b1d\" (UID: \"36d86c04-b5e4-4334-832b-10953c1d0b1d\") " Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.328372 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/36d86c04-b5e4-4334-832b-10953c1d0b1d-openstack-config\") pod \"36d86c04-b5e4-4334-832b-10953c1d0b1d\" (UID: \"36d86c04-b5e4-4334-832b-10953c1d0b1d\") " Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.328433 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b3fa1872-f7d9-4531-bc33-619419f530a5-ovsdbserver-sb\") pod \"b3fa1872-f7d9-4531-bc33-619419f530a5\" (UID: \"b3fa1872-f7d9-4531-bc33-619419f530a5\") " Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.328469 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b3fa1872-f7d9-4531-bc33-619419f530a5-dns-swift-storage-0\") pod \"b3fa1872-f7d9-4531-bc33-619419f530a5\" (UID: \"b3fa1872-f7d9-4531-bc33-619419f530a5\") " Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.328490 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b3fa1872-f7d9-4531-bc33-619419f530a5-ovsdbserver-nb\") pod \"b3fa1872-f7d9-4531-bc33-619419f530a5\" (UID: \"b3fa1872-f7d9-4531-bc33-619419f530a5\") " Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.328638 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/36d86c04-b5e4-4334-832b-10953c1d0b1d-openstack-config-secret\") pod \"36d86c04-b5e4-4334-832b-10953c1d0b1d\" (UID: \"36d86c04-b5e4-4334-832b-10953c1d0b1d\") " Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.328703 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rl9r5\" (UniqueName: \"kubernetes.io/projected/36d86c04-b5e4-4334-832b-10953c1d0b1d-kube-api-access-rl9r5\") pod \"36d86c04-b5e4-4334-832b-10953c1d0b1d\" (UID: \"36d86c04-b5e4-4334-832b-10953c1d0b1d\") " Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.341723 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_e060bc1f-a47d-45ef-88bd-cfd0645f9ce2/ovsdbserver-nb/0.log" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.342134 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.345111 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-v4r7q"] Nov 27 07:18:50 crc kubenswrapper[4971]: E1127 07:18:50.348943 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="960448ee56e126a49d6f57a232fb215e9851f384a304f93ed3bcb04c83b92864" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 27 07:18:50 crc kubenswrapper[4971]: E1127 07:18:50.355902 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="960448ee56e126a49d6f57a232fb215e9851f384a304f93ed3bcb04c83b92864" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 27 07:18:50 crc kubenswrapper[4971]: E1127 07:18:50.355981 4971 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad" containerName="nova-cell0-conductor-conductor" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.397240 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_6e13a581-61d0-4a1f-ad42-5f2783417c70/ovsdbserver-sb/0.log" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.397383 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.409111 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36d86c04-b5e4-4334-832b-10953c1d0b1d-kube-api-access-rl9r5" (OuterVolumeSpecName: "kube-api-access-rl9r5") pod "36d86c04-b5e4-4334-832b-10953c1d0b1d" (UID: "36d86c04-b5e4-4334-832b-10953c1d0b1d"). InnerVolumeSpecName "kube-api-access-rl9r5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.410575 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3fa1872-f7d9-4531-bc33-619419f530a5-kube-api-access-6gb2q" (OuterVolumeSpecName: "kube-api-access-6gb2q") pod "b3fa1872-f7d9-4531-bc33-619419f530a5" (UID: "b3fa1872-f7d9-4531-bc33-619419f530a5"). InnerVolumeSpecName "kube-api-access-6gb2q". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.429868 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e13a581-61d0-4a1f-ad42-5f2783417c70-config\") pod \"6e13a581-61d0-4a1f-ad42-5f2783417c70\" (UID: \"6e13a581-61d0-4a1f-ad42-5f2783417c70\") " Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.429983 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0aa6e1b6-c18b-4a02-a396-880350cde407-scripts\") pod \"0aa6e1b6-c18b-4a02-a396-880350cde407\" (UID: \"0aa6e1b6-c18b-4a02-a396-880350cde407\") " Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.430028 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-combined-ca-bundle\") pod \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\" (UID: \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\") " Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.430052 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c2vqk\" (UniqueName: \"kubernetes.io/projected/6e13a581-61d0-4a1f-ad42-5f2783417c70-kube-api-access-c2vqk\") pod \"6e13a581-61d0-4a1f-ad42-5f2783417c70\" (UID: \"6e13a581-61d0-4a1f-ad42-5f2783417c70\") " Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.430074 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0aa6e1b6-c18b-4a02-a396-880350cde407-var-run\") pod \"0aa6e1b6-c18b-4a02-a396-880350cde407\" (UID: \"0aa6e1b6-c18b-4a02-a396-880350cde407\") " Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.430203 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0aa6e1b6-c18b-4a02-a396-880350cde407-combined-ca-bundle\") pod \"0aa6e1b6-c18b-4a02-a396-880350cde407\" (UID: \"0aa6e1b6-c18b-4a02-a396-880350cde407\") " Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.430263 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dwljt\" (UniqueName: \"kubernetes.io/projected/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-kube-api-access-dwljt\") pod \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\" (UID: \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\") " Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.430291 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e13a581-61d0-4a1f-ad42-5f2783417c70-combined-ca-bundle\") pod \"6e13a581-61d0-4a1f-ad42-5f2783417c70\" (UID: \"6e13a581-61d0-4a1f-ad42-5f2783417c70\") " Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.430331 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e13a581-61d0-4a1f-ad42-5f2783417c70-metrics-certs-tls-certs\") pod \"6e13a581-61d0-4a1f-ad42-5f2783417c70\" (UID: \"6e13a581-61d0-4a1f-ad42-5f2783417c70\") " Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.430370 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6e13a581-61d0-4a1f-ad42-5f2783417c70-ovsdb-rundir\") pod \"6e13a581-61d0-4a1f-ad42-5f2783417c70\" (UID: \"6e13a581-61d0-4a1f-ad42-5f2783417c70\") " Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.430403 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e13a581-61d0-4a1f-ad42-5f2783417c70-ovsdbserver-sb-tls-certs\") pod \"6e13a581-61d0-4a1f-ad42-5f2783417c70\" (UID: \"6e13a581-61d0-4a1f-ad42-5f2783417c70\") " Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.430439 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/0aa6e1b6-c18b-4a02-a396-880350cde407-ovn-controller-tls-certs\") pod \"0aa6e1b6-c18b-4a02-a396-880350cde407\" (UID: \"0aa6e1b6-c18b-4a02-a396-880350cde407\") " Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.430465 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6e13a581-61d0-4a1f-ad42-5f2783417c70-scripts\") pod \"6e13a581-61d0-4a1f-ad42-5f2783417c70\" (UID: \"6e13a581-61d0-4a1f-ad42-5f2783417c70\") " Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.430488 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"6e13a581-61d0-4a1f-ad42-5f2783417c70\" (UID: \"6e13a581-61d0-4a1f-ad42-5f2783417c70\") " Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.430518 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\" (UID: \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\") " Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.430574 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wfgmz\" (UniqueName: \"kubernetes.io/projected/0aa6e1b6-c18b-4a02-a396-880350cde407-kube-api-access-wfgmz\") pod \"0aa6e1b6-c18b-4a02-a396-880350cde407\" (UID: \"0aa6e1b6-c18b-4a02-a396-880350cde407\") " Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.430621 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0aa6e1b6-c18b-4a02-a396-880350cde407-var-log-ovn\") pod \"0aa6e1b6-c18b-4a02-a396-880350cde407\" (UID: \"0aa6e1b6-c18b-4a02-a396-880350cde407\") " Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.430639 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-ovsdbserver-nb-tls-certs\") pod \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\" (UID: \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\") " Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.430658 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-ovsdb-rundir\") pod \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\" (UID: \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\") " Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.430693 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-config\") pod \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\" (UID: \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\") " Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.430735 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-metrics-certs-tls-certs\") pod \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\" (UID: \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\") " Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.430757 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0aa6e1b6-c18b-4a02-a396-880350cde407-var-run-ovn\") pod \"0aa6e1b6-c18b-4a02-a396-880350cde407\" (UID: \"0aa6e1b6-c18b-4a02-a396-880350cde407\") " Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.430787 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-scripts\") pod \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\" (UID: \"e060bc1f-a47d-45ef-88bd-cfd0645f9ce2\") " Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.431041 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzqfv\" (UniqueName: \"kubernetes.io/projected/9411d1e4-0384-4caf-b95e-e891b811b402-kube-api-access-vzqfv\") pod \"novacell16991-account-delete-ftkpc\" (UID: \"9411d1e4-0384-4caf-b95e-e891b811b402\") " pod="openstack/novacell16991-account-delete-ftkpc" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.431175 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9411d1e4-0384-4caf-b95e-e891b811b402-operator-scripts\") pod \"novacell16991-account-delete-ftkpc\" (UID: \"9411d1e4-0384-4caf-b95e-e891b811b402\") " pod="openstack/novacell16991-account-delete-ftkpc" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.431350 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rl9r5\" (UniqueName: \"kubernetes.io/projected/36d86c04-b5e4-4334-832b-10953c1d0b1d-kube-api-access-rl9r5\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.431367 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6gb2q\" (UniqueName: \"kubernetes.io/projected/b3fa1872-f7d9-4531-bc33-619419f530a5-kube-api-access-6gb2q\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:50 crc kubenswrapper[4971]: E1127 07:18:50.431426 4971 configmap.go:193] Couldn't get configMap openstack/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Nov 27 07:18:50 crc kubenswrapper[4971]: E1127 07:18:50.431481 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/9411d1e4-0384-4caf-b95e-e891b811b402-operator-scripts podName:9411d1e4-0384-4caf-b95e-e891b811b402 nodeName:}" failed. No retries permitted until 2025-11-27 07:18:54.431460291 +0000 UTC m=+1572.623504209 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/9411d1e4-0384-4caf-b95e-e891b811b402-operator-scripts") pod "novacell16991-account-delete-ftkpc" (UID: "9411d1e4-0384-4caf-b95e-e891b811b402") : configmap "openstack-cell1-scripts" not found Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.432455 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e13a581-61d0-4a1f-ad42-5f2783417c70-config" (OuterVolumeSpecName: "config") pod "6e13a581-61d0-4a1f-ad42-5f2783417c70" (UID: "6e13a581-61d0-4a1f-ad42-5f2783417c70"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.434021 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0aa6e1b6-c18b-4a02-a396-880350cde407-scripts" (OuterVolumeSpecName: "scripts") pod "0aa6e1b6-c18b-4a02-a396-880350cde407" (UID: "0aa6e1b6-c18b-4a02-a396-880350cde407"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.441078 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-config" (OuterVolumeSpecName: "config") pod "e060bc1f-a47d-45ef-88bd-cfd0645f9ce2" (UID: "e060bc1f-a47d-45ef-88bd-cfd0645f9ce2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.444291 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0aa6e1b6-c18b-4a02-a396-880350cde407-var-run" (OuterVolumeSpecName: "var-run") pod "0aa6e1b6-c18b-4a02-a396-880350cde407" (UID: "0aa6e1b6-c18b-4a02-a396-880350cde407"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.445936 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-scripts" (OuterVolumeSpecName: "scripts") pod "e060bc1f-a47d-45ef-88bd-cfd0645f9ce2" (UID: "e060bc1f-a47d-45ef-88bd-cfd0645f9ce2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.451665 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0aa6e1b6-c18b-4a02-a396-880350cde407-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "0aa6e1b6-c18b-4a02-a396-880350cde407" (UID: "0aa6e1b6-c18b-4a02-a396-880350cde407"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.451863 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0aa6e1b6-c18b-4a02-a396-880350cde407-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "0aa6e1b6-c18b-4a02-a396-880350cde407" (UID: "0aa6e1b6-c18b-4a02-a396-880350cde407"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 07:18:50 crc kubenswrapper[4971]: E1127 07:18:50.452050 4971 projected.go:194] Error preparing data for projected volume kube-api-access-vzqfv for pod openstack/novacell16991-account-delete-ftkpc: failed to fetch token: serviceaccounts "galera-openstack-cell1" not found Nov 27 07:18:50 crc kubenswrapper[4971]: E1127 07:18:50.452106 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9411d1e4-0384-4caf-b95e-e891b811b402-kube-api-access-vzqfv podName:9411d1e4-0384-4caf-b95e-e891b811b402 nodeName:}" failed. No retries permitted until 2025-11-27 07:18:54.452086776 +0000 UTC m=+1572.644130694 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-vzqfv" (UniqueName: "kubernetes.io/projected/9411d1e4-0384-4caf-b95e-e891b811b402-kube-api-access-vzqfv") pod "novacell16991-account-delete-ftkpc" (UID: "9411d1e4-0384-4caf-b95e-e891b811b402") : failed to fetch token: serviceaccounts "galera-openstack-cell1" not found Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.453042 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "e060bc1f-a47d-45ef-88bd-cfd0645f9ce2" (UID: "e060bc1f-a47d-45ef-88bd-cfd0645f9ce2"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.453630 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e13a581-61d0-4a1f-ad42-5f2783417c70-scripts" (OuterVolumeSpecName: "scripts") pod "6e13a581-61d0-4a1f-ad42-5f2783417c70" (UID: "6e13a581-61d0-4a1f-ad42-5f2783417c70"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.453912 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6e13a581-61d0-4a1f-ad42-5f2783417c70-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "6e13a581-61d0-4a1f-ad42-5f2783417c70" (UID: "6e13a581-61d0-4a1f-ad42-5f2783417c70"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.491916 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "6e13a581-61d0-4a1f-ad42-5f2783417c70" (UID: "6e13a581-61d0-4a1f-ad42-5f2783417c70"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.496004 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-kube-api-access-dwljt" (OuterVolumeSpecName: "kube-api-access-dwljt") pod "e060bc1f-a47d-45ef-88bd-cfd0645f9ce2" (UID: "e060bc1f-a47d-45ef-88bd-cfd0645f9ce2"). InnerVolumeSpecName "kube-api-access-dwljt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.501034 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e13a581-61d0-4a1f-ad42-5f2783417c70-kube-api-access-c2vqk" (OuterVolumeSpecName: "kube-api-access-c2vqk") pod "6e13a581-61d0-4a1f-ad42-5f2783417c70" (UID: "6e13a581-61d0-4a1f-ad42-5f2783417c70"). InnerVolumeSpecName "kube-api-access-c2vqk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.505802 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0aa6e1b6-c18b-4a02-a396-880350cde407-kube-api-access-wfgmz" (OuterVolumeSpecName: "kube-api-access-wfgmz") pod "0aa6e1b6-c18b-4a02-a396-880350cde407" (UID: "0aa6e1b6-c18b-4a02-a396-880350cde407"). InnerVolumeSpecName "kube-api-access-wfgmz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.505876 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "e060bc1f-a47d-45ef-88bd-cfd0645f9ce2" (UID: "e060bc1f-a47d-45ef-88bd-cfd0645f9ce2"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.539387 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c2vqk\" (UniqueName: \"kubernetes.io/projected/6e13a581-61d0-4a1f-ad42-5f2783417c70-kube-api-access-c2vqk\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.539415 4971 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0aa6e1b6-c18b-4a02-a396-880350cde407-var-run\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.539424 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dwljt\" (UniqueName: \"kubernetes.io/projected/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-kube-api-access-dwljt\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.539435 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6e13a581-61d0-4a1f-ad42-5f2783417c70-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.539452 4971 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.539462 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6e13a581-61d0-4a1f-ad42-5f2783417c70-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.539476 4971 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.539486 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wfgmz\" (UniqueName: \"kubernetes.io/projected/0aa6e1b6-c18b-4a02-a396-880350cde407-kube-api-access-wfgmz\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.539495 4971 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0aa6e1b6-c18b-4a02-a396-880350cde407-var-log-ovn\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.539504 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.539512 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-config\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.539519 4971 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0aa6e1b6-c18b-4a02-a396-880350cde407-var-run-ovn\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.539543 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.539551 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e13a581-61d0-4a1f-ad42-5f2783417c70-config\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.539568 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0aa6e1b6-c18b-4a02-a396-880350cde407-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:50 crc kubenswrapper[4971]: E1127 07:18:50.540563 4971 projected.go:288] Couldn't get configMap openstack/swift-storage-config-data: configmap "swift-storage-config-data" not found Nov 27 07:18:50 crc kubenswrapper[4971]: E1127 07:18:50.540606 4971 projected.go:263] Couldn't get secret openstack/swift-conf: secret "swift-conf" not found Nov 27 07:18:50 crc kubenswrapper[4971]: E1127 07:18:50.540802 4971 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Nov 27 07:18:50 crc kubenswrapper[4971]: E1127 07:18:50.540822 4971 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: [configmap "swift-storage-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Nov 27 07:18:50 crc kubenswrapper[4971]: E1127 07:18:50.541348 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b9ccc9bd-d955-4853-986f-95597f2c70e6-etc-swift podName:b9ccc9bd-d955-4853-986f-95597f2c70e6 nodeName:}" failed. No retries permitted until 2025-11-27 07:18:54.54101889 +0000 UTC m=+1572.733062858 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b9ccc9bd-d955-4853-986f-95597f2c70e6-etc-swift") pod "swift-storage-0" (UID: "b9ccc9bd-d955-4853-986f-95597f2c70e6") : [configmap "swift-storage-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.568187 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fabf66ef-ae78-4497-998d-95abd13bbab8" path="/var/lib/kubelet/pods/fabf66ef-ae78-4497-998d-95abd13bbab8/volumes" Nov 27 07:18:50 crc kubenswrapper[4971]: E1127 07:18:50.745552 4971 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Nov 27 07:18:50 crc kubenswrapper[4971]: E1127 07:18:50.746135 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/640c3829-d2e9-49e1-82e3-bd213aa992dd-config-data podName:640c3829-d2e9-49e1-82e3-bd213aa992dd nodeName:}" failed. No retries permitted until 2025-11-27 07:18:54.746116873 +0000 UTC m=+1572.938160791 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/640c3829-d2e9-49e1-82e3-bd213aa992dd-config-data") pod "rabbitmq-cell1-server-0" (UID: "640c3829-d2e9-49e1-82e3-bd213aa992dd") : configmap "rabbitmq-cell1-config-data" not found Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.781722 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="35b90587-df5b-4f15-8c34-f1b0a8506d85" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.205:8775/\": read tcp 10.217.0.2:54420->10.217.0.205:8775: read: connection reset by peer" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.782118 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="35b90587-df5b-4f15-8c34-f1b0a8506d85" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.205:8775/\": read tcp 10.217.0.2:54424->10.217.0.205:8775: read: connection reset by peer" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.797826 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-85d46db856-nzmcq" podUID="fc889790-089f-4007-876f-874880dad975" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.164:9311/healthcheck\": read tcp 10.217.0.2:44280->10.217.0.164:9311: read: connection reset by peer" Nov 27 07:18:50 crc kubenswrapper[4971]: I1127 07:18:50.797826 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-85d46db856-nzmcq" podUID="fc889790-089f-4007-876f-874880dad975" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.164:9311/healthcheck\": read tcp 10.217.0.2:44290->10.217.0.164:9311: read: connection reset by peer" Nov 27 07:18:51 crc kubenswrapper[4971]: I1127 07:18:51.101052 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0aa6e1b6-c18b-4a02-a396-880350cde407-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0aa6e1b6-c18b-4a02-a396-880350cde407" (UID: "0aa6e1b6-c18b-4a02-a396-880350cde407"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:51 crc kubenswrapper[4971]: I1127 07:18:51.167991 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0aa6e1b6-c18b-4a02-a396-880350cde407-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:51 crc kubenswrapper[4971]: I1127 07:18:51.242125 4971 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Nov 27 07:18:51 crc kubenswrapper[4971]: I1127 07:18:51.264992 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/36d86c04-b5e4-4334-832b-10953c1d0b1d-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "36d86c04-b5e4-4334-832b-10953c1d0b1d" (UID: "36d86c04-b5e4-4334-832b-10953c1d0b1d"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:18:51 crc kubenswrapper[4971]: I1127 07:18:51.274657 4971 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/36d86c04-b5e4-4334-832b-10953c1d0b1d-openstack-config\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:51 crc kubenswrapper[4971]: I1127 07:18:51.274723 4971 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:51 crc kubenswrapper[4971]: I1127 07:18:51.279321 4971 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Nov 27 07:18:51 crc kubenswrapper[4971]: I1127 07:18:51.390506 4971 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:51 crc kubenswrapper[4971]: I1127 07:18:51.398730 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement0147-account-delete-w7x5l" podStartSLOduration=7.398704507 podStartE2EDuration="7.398704507s" podCreationTimestamp="2025-11-27 07:18:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:18:51.359552168 +0000 UTC m=+1569.551596106" watchObservedRunningTime="2025-11-27 07:18:51.398704507 +0000 UTC m=+1569.590748425" Nov 27 07:18:51 crc kubenswrapper[4971]: I1127 07:18:51.421901 4971 generic.go:334] "Generic (PLEG): container finished" podID="fc889790-089f-4007-876f-874880dad975" containerID="5d760ae55be3a486a97462d9bd8576d1f7879d82b2f6d6318d4076d0797b7097" exitCode=0 Nov 27 07:18:51 crc kubenswrapper[4971]: I1127 07:18:51.442203 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e060bc1f-a47d-45ef-88bd-cfd0645f9ce2" (UID: "e060bc1f-a47d-45ef-88bd-cfd0645f9ce2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:51 crc kubenswrapper[4971]: I1127 07:18:51.452325 4971 generic.go:334] "Generic (PLEG): container finished" podID="35b90587-df5b-4f15-8c34-f1b0a8506d85" containerID="c581d4d83b4f2a55778d440cd3c9dd25c65d57ded13db9b0fc5dd5a6570dbd13" exitCode=0 Nov 27 07:18:51 crc kubenswrapper[4971]: I1127 07:18:51.453135 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36d86c04-b5e4-4334-832b-10953c1d0b1d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "36d86c04-b5e4-4334-832b-10953c1d0b1d" (UID: "36d86c04-b5e4-4334-832b-10953c1d0b1d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:51 crc kubenswrapper[4971]: I1127 07:18:51.469261 4971 generic.go:334] "Generic (PLEG): container finished" podID="69d47892-79da-4e4e-8de2-a84801d4d6b9" containerID="e6ecd3fddd03d9acdb8874669eb4ae87d6f12ad1116ff9c9cbf2e034df95e87e" exitCode=0 Nov 27 07:18:51 crc kubenswrapper[4971]: I1127 07:18:51.471816 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Nov 27 07:18:51 crc kubenswrapper[4971]: I1127 07:18:51.472932 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-db2qc" Nov 27 07:18:51 crc kubenswrapper[4971]: I1127 07:18:51.473197 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 27 07:18:51 crc kubenswrapper[4971]: I1127 07:18:51.473379 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-669669cf59-5rwqh" Nov 27 07:18:51 crc kubenswrapper[4971]: I1127 07:18:51.473619 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Nov 27 07:18:51 crc kubenswrapper[4971]: I1127 07:18:51.493834 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:51 crc kubenswrapper[4971]: I1127 07:18:51.493867 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36d86c04-b5e4-4334-832b-10953c1d0b1d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:51 crc kubenswrapper[4971]: I1127 07:18:51.497600 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3fa1872-f7d9-4531-bc33-619419f530a5-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b3fa1872-f7d9-4531-bc33-619419f530a5" (UID: "b3fa1872-f7d9-4531-bc33-619419f530a5"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:18:51 crc kubenswrapper[4971]: I1127 07:18:51.505718 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36d86c04-b5e4-4334-832b-10953c1d0b1d-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "36d86c04-b5e4-4334-832b-10953c1d0b1d" (UID: "36d86c04-b5e4-4334-832b-10953c1d0b1d"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:51 crc kubenswrapper[4971]: I1127 07:18:51.560118 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3fa1872-f7d9-4531-bc33-619419f530a5-config" (OuterVolumeSpecName: "config") pod "b3fa1872-f7d9-4531-bc33-619419f530a5" (UID: "b3fa1872-f7d9-4531-bc33-619419f530a5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:18:51 crc kubenswrapper[4971]: I1127 07:18:51.600183 4971 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/36d86c04-b5e4-4334-832b-10953c1d0b1d-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:51 crc kubenswrapper[4971]: I1127 07:18:51.600396 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3fa1872-f7d9-4531-bc33-619419f530a5-config\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:51 crc kubenswrapper[4971]: I1127 07:18:51.600468 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b3fa1872-f7d9-4531-bc33-619419f530a5-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:51 crc kubenswrapper[4971]: I1127 07:18:51.717705 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3fa1872-f7d9-4531-bc33-619419f530a5-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b3fa1872-f7d9-4531-bc33-619419f530a5" (UID: "b3fa1872-f7d9-4531-bc33-619419f530a5"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:18:51 crc kubenswrapper[4971]: I1127 07:18:51.734576 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e13a581-61d0-4a1f-ad42-5f2783417c70-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6e13a581-61d0-4a1f-ad42-5f2783417c70" (UID: "6e13a581-61d0-4a1f-ad42-5f2783417c70"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:51 crc kubenswrapper[4971]: I1127 07:18:51.789564 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "e060bc1f-a47d-45ef-88bd-cfd0645f9ce2" (UID: "e060bc1f-a47d-45ef-88bd-cfd0645f9ce2"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:51 crc kubenswrapper[4971]: I1127 07:18:51.815308 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e13a581-61d0-4a1f-ad42-5f2783417c70-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:51 crc kubenswrapper[4971]: I1127 07:18:51.815361 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:51 crc kubenswrapper[4971]: I1127 07:18:51.815378 4971 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b3fa1872-f7d9-4531-bc33-619419f530a5-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:51 crc kubenswrapper[4971]: I1127 07:18:51.867715 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e13a581-61d0-4a1f-ad42-5f2783417c70-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "6e13a581-61d0-4a1f-ad42-5f2783417c70" (UID: "6e13a581-61d0-4a1f-ad42-5f2783417c70"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:51 crc kubenswrapper[4971]: I1127 07:18:51.879059 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3fa1872-f7d9-4531-bc33-619419f530a5-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "b3fa1872-f7d9-4531-bc33-619419f530a5" (UID: "b3fa1872-f7d9-4531-bc33-619419f530a5"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:18:51 crc kubenswrapper[4971]: I1127 07:18:51.917241 4971 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b3fa1872-f7d9-4531-bc33-619419f530a5-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:51 crc kubenswrapper[4971]: I1127 07:18:51.933873 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e13a581-61d0-4a1f-ad42-5f2783417c70-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:51 crc kubenswrapper[4971]: I1127 07:18:51.943707 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "e060bc1f-a47d-45ef-88bd-cfd0645f9ce2" (UID: "e060bc1f-a47d-45ef-88bd-cfd0645f9ce2"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:51 crc kubenswrapper[4971]: I1127 07:18:51.975978 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0aa6e1b6-c18b-4a02-a396-880350cde407-ovn-controller-tls-certs" (OuterVolumeSpecName: "ovn-controller-tls-certs") pod "0aa6e1b6-c18b-4a02-a396-880350cde407" (UID: "0aa6e1b6-c18b-4a02-a396-880350cde407"). InnerVolumeSpecName "ovn-controller-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.065567 4971 reconciler_common.go:293] "Volume detached for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/0aa6e1b6-c18b-4a02-a396-880350cde407-ovn-controller-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.065602 4971 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.080089 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3fa1872-f7d9-4531-bc33-619419f530a5-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b3fa1872-f7d9-4531-bc33-619419f530a5" (UID: "b3fa1872-f7d9-4531-bc33-619419f530a5"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.117755 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e13a581-61d0-4a1f-ad42-5f2783417c70-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "6e13a581-61d0-4a1f-ad42-5f2783417c70" (UID: "6e13a581-61d0-4a1f-ad42-5f2783417c70"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.168282 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b3fa1872-f7d9-4531-bc33-619419f530a5-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.168318 4971 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e13a581-61d0-4a1f-ad42-5f2783417c70-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:52 crc kubenswrapper[4971]: E1127 07:18:52.261409 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8f63e70bc533062faba370a611b76ae194f52aeabfa2c53bea1503c7257a0a34" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 27 07:18:52 crc kubenswrapper[4971]: E1127 07:18:52.261523 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2 is running failed: container process not found" containerID="d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 27 07:18:52 crc kubenswrapper[4971]: E1127 07:18:52.266645 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2 is running failed: container process not found" containerID="d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 27 07:18:52 crc kubenswrapper[4971]: E1127 07:18:52.277704 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8f63e70bc533062faba370a611b76ae194f52aeabfa2c53bea1503c7257a0a34" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 27 07:18:52 crc kubenswrapper[4971]: E1127 07:18:52.277853 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2 is running failed: container process not found" containerID="d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 27 07:18:52 crc kubenswrapper[4971]: E1127 07:18:52.277876 4971 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-45rt8" podUID="6facf3b5-48aa-4a38-823e-6b7adbbcdfee" containerName="ovsdb-server" Nov 27 07:18:52 crc kubenswrapper[4971]: E1127 07:18:52.316511 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8f63e70bc533062faba370a611b76ae194f52aeabfa2c53bea1503c7257a0a34" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 27 07:18:52 crc kubenswrapper[4971]: E1127 07:18:52.316634 4971 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-45rt8" podUID="6facf3b5-48aa-4a38-823e-6b7adbbcdfee" containerName="ovs-vswitchd" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.493827 4971 generic.go:334] "Generic (PLEG): container finished" podID="aba3aa08-61e1-48d3-bf4b-cb45e0b71561" containerID="18682d190c4a5b0b55ee9d27eb1abd136955ddd4b910117777d70786a6f380e8" exitCode=0 Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.495768 4971 generic.go:334] "Generic (PLEG): container finished" podID="bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad" containerID="960448ee56e126a49d6f57a232fb215e9851f384a304f93ed3bcb04c83b92864" exitCode=0 Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.498026 4971 generic.go:334] "Generic (PLEG): container finished" podID="c81bcd57-9f3c-4511-a6f1-ca0bbb68af0d" containerID="021e6a2a11bacfa832c03480f267d7d6f46b8ff018e5da0bdfaee90588e4feda" exitCode=0 Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.507183 4971 generic.go:334] "Generic (PLEG): container finished" podID="91f979c6-21cc-4848-9eb5-b8bfc4abf082" containerID="1b572a1aecdf546cde5c96515c9893664d21cadbc5a9100a606b1ffea63c1f0b" exitCode=0 Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.647054 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 27 07:18:52 crc kubenswrapper[4971]: E1127 07:18:52.651065 4971 kubelet.go:2526] "Housekeeping took longer than expected" err="housekeeping took too long" expected="1s" actual="2.102s" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.651093 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5b7b448d48-2wggc" event={"ID":"6e067498-7bc0-4bc5-a9a6-696c8aa3cf71","Type":"ContainerDied","Data":"7852e87416c31263e7e00d8e70907a6f92840226e253d0e1be85a175d15e7676"} Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.651120 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7852e87416c31263e7e00d8e70907a6f92840226e253d0e1be85a175d15e7676" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.677493 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36d86c04-b5e4-4334-832b-10953c1d0b1d" path="/var/lib/kubelet/pods/36d86c04-b5e4-4334-832b-10953c1d0b1d/volumes" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687231 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-p4742"] Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687278 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-p4742"] Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687295 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell16991-account-delete-ftkpc"] Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687314 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-6991-account-create-update-flxv8"] Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687325 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-6991-account-create-update-flxv8"] Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687338 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance7577-account-delete-hmqt9" event={"ID":"83ad8915-ac3b-4891-ae7a-9b862747569f","Type":"ContainerDied","Data":"365e13d3fa7c5a7ff8d525defda7a68d1d726d4644424a9c76752122827d963f"} Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687358 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="365e13d3fa7c5a7ff8d525defda7a68d1d726d4644424a9c76752122827d963f" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687367 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"6754a19e-e024-4b15-8464-49e127bd35ad","Type":"ContainerDied","Data":"0588e324332a29c1829f747049eff940a0369fe3e5f347a71d15feb03a83af60"} Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687380 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0588e324332a29c1829f747049eff940a0369fe3e5f347a71d15feb03a83af60" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687388 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d","Type":"ContainerDied","Data":"161d68366ec826553a8bafea685338e9cd1ccc17a39d021ac9e9e24f5ff8d859"} Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687399 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="161d68366ec826553a8bafea685338e9cd1ccc17a39d021ac9e9e24f5ff8d859" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687409 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"1206c914-fbe7-4e8f-8470-861b0ebf75de","Type":"ContainerDied","Data":"b377f64f7799f517297f59bd0335b9e821a1d5096028224130d2cd37237cd078"} Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687420 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b377f64f7799f517297f59bd0335b9e821a1d5096028224130d2cd37237cd078" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687430 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement0147-account-delete-w7x5l" event={"ID":"c81bcd57-9f3c-4511-a6f1-ca0bbb68af0d","Type":"ContainerStarted","Data":"021e6a2a11bacfa832c03480f267d7d6f46b8ff018e5da0bdfaee90588e4feda"} Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687443 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novaapi6022-account-delete-bhttv"] Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687460 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"bcf58afd-21c6-4c9d-8702-09bc98859732","Type":"ContainerDied","Data":"a6652ce76b9cece76b9ede712acece12b70a0cae484af7a48284ab90b1053378"} Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687470 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a6652ce76b9cece76b9ede712acece12b70a0cae484af7a48284ab90b1053378" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687478 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7865dbd7d9-zp55h" event={"ID":"b3616559-d640-4b3b-a4b3-b9d9af1d0061","Type":"ContainerDied","Data":"f683a7245c3e4c96e78a29877fed747d3bba11220db6f862793ea02e6733adec"} Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687488 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f683a7245c3e4c96e78a29877fed747d3bba11220db6f862793ea02e6733adec" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687496 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-85d46db856-nzmcq" event={"ID":"fc889790-089f-4007-876f-874880dad975","Type":"ContainerDied","Data":"5d760ae55be3a486a97462d9bd8576d1f7879d82b2f6d6318d4076d0797b7097"} Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687512 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novacell091e4-account-delete-8kghx"] Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687525 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687563 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c2e2055e-1200-46e8-a49e-c6b490702c9b","Type":"ContainerDied","Data":"1fbc398a0c3a5b193a2d87ab95eda033cbc5cace8dbae7113e580a4b7fa2860b"} Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687575 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1fbc398a0c3a5b193a2d87ab95eda033cbc5cace8dbae7113e580a4b7fa2860b" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687583 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron1af4-account-delete-48m5w" event={"ID":"91f979c6-21cc-4848-9eb5-b8bfc4abf082","Type":"ContainerStarted","Data":"1b572a1aecdf546cde5c96515c9893664d21cadbc5a9100a606b1ffea63c1f0b"} Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687591 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"35b90587-df5b-4f15-8c34-f1b0a8506d85","Type":"ContainerDied","Data":"c581d4d83b4f2a55778d440cd3c9dd25c65d57ded13db9b0fc5dd5a6570dbd13"} Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687604 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687617 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder6f3a-account-delete-nzrmk" event={"ID":"51e69600-7930-4ccb-a8ff-0ad3377bf3a7","Type":"ContainerDied","Data":"ca3642a9f3127eb0e1a15792b08a5f5b011734d077acdae27a1d254b9bbafb2d"} Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687627 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ca3642a9f3127eb0e1a15792b08a5f5b011734d077acdae27a1d254b9bbafb2d" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687635 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687649 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-v825j"] Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687661 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"69d47892-79da-4e4e-8de2-a84801d4d6b9","Type":"ContainerDied","Data":"e6ecd3fddd03d9acdb8874669eb4ae87d6f12ad1116ff9c9cbf2e034df95e87e"} Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687676 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-65cf5bcbb-zf65t" event={"ID":"c88f9c00-b02f-4070-b81e-733009e44691","Type":"ContainerDied","Data":"4cf6cd20fef7aacfff33103e531d9c68aff3e165bcc56d39a875302d06e90d6f"} Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687692 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4cf6cd20fef7aacfff33103e531d9c68aff3e165bcc56d39a875302d06e90d6f" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687704 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-hcxsg"] Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687717 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-v825j"] Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687729 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell091e4-account-delete-8kghx" event={"ID":"19b68245-2b99-4337-892f-059f05113ad6","Type":"ContainerStarted","Data":"0ce441afde4807195b4b59dba4caac3ab937e0bf4580f4cc2df090f555bc7247"} Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687740 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican2596-account-delete-pttgn" event={"ID":"aba3aa08-61e1-48d3-bf4b-cb45e0b71561","Type":"ContainerDied","Data":"18682d190c4a5b0b55ee9d27eb1abd136955ddd4b910117777d70786a6f380e8"} Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687755 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-hcxsg"] Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687768 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-55bd67bdbd-llwzk"] Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687781 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687792 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad","Type":"ContainerDied","Data":"960448ee56e126a49d6f57a232fb215e9851f384a304f93ed3bcb04c83b92864"} Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687804 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad","Type":"ContainerDied","Data":"44f0d0b2d16a2e70a7f8a13193f0c24dce614ec56f01b2d28fa2d63b2985c3dc"} Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687811 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="44f0d0b2d16a2e70a7f8a13193f0c24dce614ec56f01b2d28fa2d63b2985c3dc" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687819 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-gn9bn"] Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687829 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement0147-account-delete-w7x5l" event={"ID":"c81bcd57-9f3c-4511-a6f1-ca0bbb68af0d","Type":"ContainerDied","Data":"021e6a2a11bacfa832c03480f267d7d6f46b8ff018e5da0bdfaee90588e4feda"} Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687842 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-gn9bn"] Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687854 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"35b90587-df5b-4f15-8c34-f1b0a8506d85","Type":"ContainerDied","Data":"1c2a4e6850f940a70a6eff35d37aea64d1c56493a7927207282006b381b5fd35"} Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687862 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1c2a4e6850f940a70a6eff35d37aea64d1c56493a7927207282006b381b5fd35" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687870 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-afb3-account-create-update-msgvh"] Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687880 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-85d46db856-nzmcq" event={"ID":"fc889790-089f-4007-876f-874880dad975","Type":"ContainerDied","Data":"0ba695ab2b4cada687607e751c739c6465567ff29b3737781325a36e76b0ada3"} Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687888 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0ba695ab2b4cada687607e751c739c6465567ff29b3737781325a36e76b0ada3" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687897 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-afb3-account-create-update-msgvh"] Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687912 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"69d47892-79da-4e4e-8de2-a84801d4d6b9","Type":"ContainerDied","Data":"f5a480690dc6e4f0b04176d5578489243dc597f63a060eb82d971a0d3e3ccd17"} Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687919 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f5a480690dc6e4f0b04176d5578489243dc597f63a060eb82d971a0d3e3ccd17" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687927 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron1af4-account-delete-48m5w" event={"ID":"91f979c6-21cc-4848-9eb5-b8bfc4abf082","Type":"ContainerDied","Data":"1b572a1aecdf546cde5c96515c9893664d21cadbc5a9100a606b1ffea63c1f0b"} Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.687936 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapi6022-account-delete-bhttv" event={"ID":"cbef532c-59fb-40ac-bde3-35b8f3616d85","Type":"ContainerStarted","Data":"cb55fc6b1f2a82665057fd9c58c97524af59cb9d419c8c4c0b115571d6fab162"} Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.688386 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="a2b1d941-1738-4967-a97c-6a7b2c36531c" containerName="kube-state-metrics" containerID="cri-o://b06e49f36aeb9385737ef57f67601c2cecfa59d6329f3deb519661b9fff43052" gracePeriod=30 Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.688516 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/memcached-0" podUID="d6833730-f034-4b5f-954a-19e993167f04" containerName="memcached" containerID="cri-o://de2ace92e2287e0935ebddfec859f7a90b7eade8440fecf2f0f85b2026bacd9f" gracePeriod=30 Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.688730 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/keystone-55bd67bdbd-llwzk" podUID="b2ac1dd8-824c-482d-8c0e-47573535f172" containerName="keystone-api" containerID="cri-o://e21d5793819283c5f8059801c98537ef24923751613488a4fef7ec0596a380e9" gracePeriod=30 Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.689176 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="41a3ef31-df5e-4cb9-8983-40a16f46823c" containerName="ceilometer-central-agent" containerID="cri-o://e42e5fec5c109e2ea0c69c634a81c7905e5cc3f5aa9cfe275d500939cb600b2b" gracePeriod=30 Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.689311 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="41a3ef31-df5e-4cb9-8983-40a16f46823c" containerName="proxy-httpd" containerID="cri-o://246b3a277e05318dff5ef1ad5944cae22aa133678c3e078b2b0e80c043b33db8" gracePeriod=30 Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.689352 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="41a3ef31-df5e-4cb9-8983-40a16f46823c" containerName="sg-core" containerID="cri-o://0b2dec937686384d25cd26d898333c4fc81933ae416ed8605ae98f8c6d7ffe0d" gracePeriod=30 Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.689378 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="41a3ef31-df5e-4cb9-8983-40a16f46823c" containerName="ceilometer-notification-agent" containerID="cri-o://268b7b5e3e099e59f58680414492d2b607e8bfc3bf0455ac64792b601ae0a545" gracePeriod=30 Nov 27 07:18:52 crc kubenswrapper[4971]: E1127 07:18:52.689643 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-vzqfv operator-scripts], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/novacell16991-account-delete-ftkpc" podUID="9411d1e4-0384-4caf-b95e-e891b811b402" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.699777 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-66674dfb5f-52hcq" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.745681 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-7865dbd7d9-zp55h" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.790928 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/82a33cc1-32fe-464f-ac33-b802fd32a4c1-etc-swift\") pod \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\" (UID: \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\") " Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.791010 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/82a33cc1-32fe-464f-ac33-b802fd32a4c1-internal-tls-certs\") pod \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\" (UID: \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\") " Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.791045 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/82a33cc1-32fe-464f-ac33-b802fd32a4c1-config-data\") pod \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\" (UID: \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\") " Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.791077 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/82a33cc1-32fe-464f-ac33-b802fd32a4c1-run-httpd\") pod \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\" (UID: \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\") " Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.791100 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea-config-data\") pod \"fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea\" (UID: \"fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea\") " Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.791171 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4t5z4\" (UniqueName: \"kubernetes.io/projected/fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea-kube-api-access-4t5z4\") pod \"fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea\" (UID: \"fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea\") " Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.792413 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/82a33cc1-32fe-464f-ac33-b802fd32a4c1-public-tls-certs\") pod \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\" (UID: \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\") " Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.792474 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea-combined-ca-bundle\") pod \"fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea\" (UID: \"fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea\") " Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.792524 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/82a33cc1-32fe-464f-ac33-b802fd32a4c1-log-httpd\") pod \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\" (UID: \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\") " Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.792702 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jj97k\" (UniqueName: \"kubernetes.io/projected/82a33cc1-32fe-464f-ac33-b802fd32a4c1-kube-api-access-jj97k\") pod \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\" (UID: \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\") " Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.792738 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea-vencrypt-tls-certs\") pod \"fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea\" (UID: \"fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea\") " Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.792757 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/82a33cc1-32fe-464f-ac33-b802fd32a4c1-combined-ca-bundle\") pod \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\" (UID: \"82a33cc1-32fe-464f-ac33-b802fd32a4c1\") " Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.792788 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea-nova-novncproxy-tls-certs\") pod \"fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea\" (UID: \"fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea\") " Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.797541 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/82a33cc1-32fe-464f-ac33-b802fd32a4c1-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "82a33cc1-32fe-464f-ac33-b802fd32a4c1" (UID: "82a33cc1-32fe-464f-ac33-b802fd32a4c1"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.806839 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/82a33cc1-32fe-464f-ac33-b802fd32a4c1-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "82a33cc1-32fe-464f-ac33-b802fd32a4c1" (UID: "82a33cc1-32fe-464f-ac33-b802fd32a4c1"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.807480 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82a33cc1-32fe-464f-ac33-b802fd32a4c1-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "82a33cc1-32fe-464f-ac33-b802fd32a4c1" (UID: "82a33cc1-32fe-464f-ac33-b802fd32a4c1"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.831113 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea-kube-api-access-4t5z4" (OuterVolumeSpecName: "kube-api-access-4t5z4") pod "fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea" (UID: "fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea"). InnerVolumeSpecName "kube-api-access-4t5z4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.862379 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82a33cc1-32fe-464f-ac33-b802fd32a4c1-kube-api-access-jj97k" (OuterVolumeSpecName: "kube-api-access-jj97k") pod "82a33cc1-32fe-464f-ac33-b802fd32a4c1" (UID: "82a33cc1-32fe-464f-ac33-b802fd32a4c1"). InnerVolumeSpecName "kube-api-access-jj97k". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.894509 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b3616559-d640-4b3b-a4b3-b9d9af1d0061-config-data-custom\") pod \"b3616559-d640-4b3b-a4b3-b9d9af1d0061\" (UID: \"b3616559-d640-4b3b-a4b3-b9d9af1d0061\") " Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.894645 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3616559-d640-4b3b-a4b3-b9d9af1d0061-combined-ca-bundle\") pod \"b3616559-d640-4b3b-a4b3-b9d9af1d0061\" (UID: \"b3616559-d640-4b3b-a4b3-b9d9af1d0061\") " Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.894675 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3616559-d640-4b3b-a4b3-b9d9af1d0061-logs\") pod \"b3616559-d640-4b3b-a4b3-b9d9af1d0061\" (UID: \"b3616559-d640-4b3b-a4b3-b9d9af1d0061\") " Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.894915 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3616559-d640-4b3b-a4b3-b9d9af1d0061-config-data\") pod \"b3616559-d640-4b3b-a4b3-b9d9af1d0061\" (UID: \"b3616559-d640-4b3b-a4b3-b9d9af1d0061\") " Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.895151 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fc7ms\" (UniqueName: \"kubernetes.io/projected/b3616559-d640-4b3b-a4b3-b9d9af1d0061-kube-api-access-fc7ms\") pod \"b3616559-d640-4b3b-a4b3-b9d9af1d0061\" (UID: \"b3616559-d640-4b3b-a4b3-b9d9af1d0061\") " Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.895727 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3616559-d640-4b3b-a4b3-b9d9af1d0061-logs" (OuterVolumeSpecName: "logs") pod "b3616559-d640-4b3b-a4b3-b9d9af1d0061" (UID: "b3616559-d640-4b3b-a4b3-b9d9af1d0061"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.899928 4971 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/82a33cc1-32fe-464f-ac33-b802fd32a4c1-etc-swift\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.900022 4971 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/82a33cc1-32fe-464f-ac33-b802fd32a4c1-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.900057 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4t5z4\" (UniqueName: \"kubernetes.io/projected/fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea-kube-api-access-4t5z4\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.900070 4971 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/82a33cc1-32fe-464f-ac33-b802fd32a4c1-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.900078 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jj97k\" (UniqueName: \"kubernetes.io/projected/82a33cc1-32fe-464f-ac33-b802fd32a4c1-kube-api-access-jj97k\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.900088 4971 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3616559-d640-4b3b-a4b3-b9d9af1d0061-logs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.928089 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3616559-d640-4b3b-a4b3-b9d9af1d0061-kube-api-access-fc7ms" (OuterVolumeSpecName: "kube-api-access-fc7ms") pod "b3616559-d640-4b3b-a4b3-b9d9af1d0061" (UID: "b3616559-d640-4b3b-a4b3-b9d9af1d0061"). InnerVolumeSpecName "kube-api-access-fc7ms". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.929266 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea-config-data" (OuterVolumeSpecName: "config-data") pod "fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea" (UID: "fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.937999 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3616559-d640-4b3b-a4b3-b9d9af1d0061-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "b3616559-d640-4b3b-a4b3-b9d9af1d0061" (UID: "b3616559-d640-4b3b-a4b3-b9d9af1d0061"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:52 crc kubenswrapper[4971]: I1127 07:18:52.958633 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea" (UID: "fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.003156 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.003195 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fc7ms\" (UniqueName: \"kubernetes.io/projected/b3616559-d640-4b3b-a4b3-b9d9af1d0061-kube-api-access-fc7ms\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.003209 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.003220 4971 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b3616559-d640-4b3b-a4b3-b9d9af1d0061-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: E1127 07:18:53.003313 4971 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Nov 27 07:18:53 crc kubenswrapper[4971]: E1127 07:18:53.003368 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d6439a3c-ee26-467c-8e42-5abbbf390f16-config-data podName:d6439a3c-ee26-467c-8e42-5abbbf390f16 nodeName:}" failed. No retries permitted until 2025-11-27 07:19:01.003347967 +0000 UTC m=+1579.195391885 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/d6439a3c-ee26-467c-8e42-5abbbf390f16-config-data") pod "rabbitmq-server-0" (UID: "d6439a3c-ee26-467c-8e42-5abbbf390f16") : configmap "rabbitmq-config-data" not found Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.017029 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82a33cc1-32fe-464f-ac33-b802fd32a4c1-config-data" (OuterVolumeSpecName: "config-data") pod "82a33cc1-32fe-464f-ac33-b802fd32a4c1" (UID: "82a33cc1-32fe-464f-ac33-b802fd32a4c1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.023900 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3616559-d640-4b3b-a4b3-b9d9af1d0061-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b3616559-d640-4b3b-a4b3-b9d9af1d0061" (UID: "b3616559-d640-4b3b-a4b3-b9d9af1d0061"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.048191 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82a33cc1-32fe-464f-ac33-b802fd32a4c1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "82a33cc1-32fe-464f-ac33-b802fd32a4c1" (UID: "82a33cc1-32fe-464f-ac33-b802fd32a4c1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.070596 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82a33cc1-32fe-464f-ac33-b802fd32a4c1-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "82a33cc1-32fe-464f-ac33-b802fd32a4c1" (UID: "82a33cc1-32fe-464f-ac33-b802fd32a4c1"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.070932 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82a33cc1-32fe-464f-ac33-b802fd32a4c1-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "82a33cc1-32fe-464f-ac33-b802fd32a4c1" (UID: "82a33cc1-32fe-464f-ac33-b802fd32a4c1"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.070941 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea" (UID: "fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.071061 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-galera-0" podUID="4a0dcbed-5f66-4faf-83c2-1227bc05e9d3" containerName="galera" containerID="cri-o://c882ea5da9f0b4988139836dea25aa544de3122c3c530bac76a1e7264a8122e0" gracePeriod=30 Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.095743 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3616559-d640-4b3b-a4b3-b9d9af1d0061-config-data" (OuterVolumeSpecName: "config-data") pod "b3616559-d640-4b3b-a4b3-b9d9af1d0061" (UID: "b3616559-d640-4b3b-a4b3-b9d9af1d0061"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.096119 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea" (UID: "fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.105810 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3616559-d640-4b3b-a4b3-b9d9af1d0061-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.105855 4971 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.105865 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/82a33cc1-32fe-464f-ac33-b802fd32a4c1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.105874 4971 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.105884 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3616559-d640-4b3b-a4b3-b9d9af1d0061-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.105894 4971 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/82a33cc1-32fe-464f-ac33-b802fd32a4c1-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.105907 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/82a33cc1-32fe-464f-ac33-b802fd32a4c1-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.105915 4971 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/82a33cc1-32fe-464f-ac33-b802fd32a4c1-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.204370 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.204518 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5b7b448d48-2wggc" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.230200 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.234738 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-669669cf59-5rwqh"] Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.271502 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-65cf5bcbb-zf65t" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.303251 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.318389 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kkcmh\" (UniqueName: \"kubernetes.io/projected/bcf58afd-21c6-4c9d-8702-09bc98859732-kube-api-access-kkcmh\") pod \"bcf58afd-21c6-4c9d-8702-09bc98859732\" (UID: \"bcf58afd-21c6-4c9d-8702-09bc98859732\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.324408 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1206c914-fbe7-4e8f-8470-861b0ebf75de-combined-ca-bundle\") pod \"1206c914-fbe7-4e8f-8470-861b0ebf75de\" (UID: \"1206c914-fbe7-4e8f-8470-861b0ebf75de\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.324633 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/bcf58afd-21c6-4c9d-8702-09bc98859732-kolla-config\") pod \"bcf58afd-21c6-4c9d-8702-09bc98859732\" (UID: \"bcf58afd-21c6-4c9d-8702-09bc98859732\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.324752 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"bcf58afd-21c6-4c9d-8702-09bc98859732\" (UID: \"bcf58afd-21c6-4c9d-8702-09bc98859732\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.324867 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1206c914-fbe7-4e8f-8470-861b0ebf75de-config-data\") pod \"1206c914-fbe7-4e8f-8470-861b0ebf75de\" (UID: \"1206c914-fbe7-4e8f-8470-861b0ebf75de\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.325029 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6e067498-7bc0-4bc5-a9a6-696c8aa3cf71-config-data-custom\") pod \"6e067498-7bc0-4bc5-a9a6-696c8aa3cf71\" (UID: \"6e067498-7bc0-4bc5-a9a6-696c8aa3cf71\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.326685 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1206c914-fbe7-4e8f-8470-861b0ebf75de-public-tls-certs\") pod \"1206c914-fbe7-4e8f-8470-861b0ebf75de\" (UID: \"1206c914-fbe7-4e8f-8470-861b0ebf75de\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.326899 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qbjj6\" (UniqueName: \"kubernetes.io/projected/6e067498-7bc0-4bc5-a9a6-696c8aa3cf71-kube-api-access-qbjj6\") pod \"6e067498-7bc0-4bc5-a9a6-696c8aa3cf71\" (UID: \"6e067498-7bc0-4bc5-a9a6-696c8aa3cf71\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.327049 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"1206c914-fbe7-4e8f-8470-861b0ebf75de\" (UID: \"1206c914-fbe7-4e8f-8470-861b0ebf75de\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.327177 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bcf58afd-21c6-4c9d-8702-09bc98859732-operator-scripts\") pod \"bcf58afd-21c6-4c9d-8702-09bc98859732\" (UID: \"bcf58afd-21c6-4c9d-8702-09bc98859732\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.327276 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1206c914-fbe7-4e8f-8470-861b0ebf75de-logs\") pod \"1206c914-fbe7-4e8f-8470-861b0ebf75de\" (UID: \"1206c914-fbe7-4e8f-8470-861b0ebf75de\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.327366 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/bcf58afd-21c6-4c9d-8702-09bc98859732-config-data-generated\") pod \"bcf58afd-21c6-4c9d-8702-09bc98859732\" (UID: \"bcf58afd-21c6-4c9d-8702-09bc98859732\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.327446 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/bcf58afd-21c6-4c9d-8702-09bc98859732-config-data-default\") pod \"bcf58afd-21c6-4c9d-8702-09bc98859732\" (UID: \"bcf58afd-21c6-4c9d-8702-09bc98859732\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.328433 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/bcf58afd-21c6-4c9d-8702-09bc98859732-galera-tls-certs\") pod \"bcf58afd-21c6-4c9d-8702-09bc98859732\" (UID: \"bcf58afd-21c6-4c9d-8702-09bc98859732\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.332398 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e067498-7bc0-4bc5-a9a6-696c8aa3cf71-combined-ca-bundle\") pod \"6e067498-7bc0-4bc5-a9a6-696c8aa3cf71\" (UID: \"6e067498-7bc0-4bc5-a9a6-696c8aa3cf71\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.332666 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e067498-7bc0-4bc5-a9a6-696c8aa3cf71-logs\") pod \"6e067498-7bc0-4bc5-a9a6-696c8aa3cf71\" (UID: \"6e067498-7bc0-4bc5-a9a6-696c8aa3cf71\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.332866 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bcf58afd-21c6-4c9d-8702-09bc98859732-combined-ca-bundle\") pod \"bcf58afd-21c6-4c9d-8702-09bc98859732\" (UID: \"bcf58afd-21c6-4c9d-8702-09bc98859732\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.333155 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1206c914-fbe7-4e8f-8470-861b0ebf75de-scripts\") pod \"1206c914-fbe7-4e8f-8470-861b0ebf75de\" (UID: \"1206c914-fbe7-4e8f-8470-861b0ebf75de\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.333321 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e067498-7bc0-4bc5-a9a6-696c8aa3cf71-config-data\") pod \"6e067498-7bc0-4bc5-a9a6-696c8aa3cf71\" (UID: \"6e067498-7bc0-4bc5-a9a6-696c8aa3cf71\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.333512 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1206c914-fbe7-4e8f-8470-861b0ebf75de-httpd-run\") pod \"1206c914-fbe7-4e8f-8470-861b0ebf75de\" (UID: \"1206c914-fbe7-4e8f-8470-861b0ebf75de\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.333702 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-58j4x\" (UniqueName: \"kubernetes.io/projected/1206c914-fbe7-4e8f-8470-861b0ebf75de-kube-api-access-58j4x\") pod \"1206c914-fbe7-4e8f-8470-861b0ebf75de\" (UID: \"1206c914-fbe7-4e8f-8470-861b0ebf75de\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.338285 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bcf58afd-21c6-4c9d-8702-09bc98859732-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "bcf58afd-21c6-4c9d-8702-09bc98859732" (UID: "bcf58afd-21c6-4c9d-8702-09bc98859732"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.339073 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bcf58afd-21c6-4c9d-8702-09bc98859732-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "bcf58afd-21c6-4c9d-8702-09bc98859732" (UID: "bcf58afd-21c6-4c9d-8702-09bc98859732"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.338736 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bcf58afd-21c6-4c9d-8702-09bc98859732-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "bcf58afd-21c6-4c9d-8702-09bc98859732" (UID: "bcf58afd-21c6-4c9d-8702-09bc98859732"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.340285 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1206c914-fbe7-4e8f-8470-861b0ebf75de-logs" (OuterVolumeSpecName: "logs") pod "1206c914-fbe7-4e8f-8470-861b0ebf75de" (UID: "1206c914-fbe7-4e8f-8470-861b0ebf75de"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.340647 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6e067498-7bc0-4bc5-a9a6-696c8aa3cf71-logs" (OuterVolumeSpecName: "logs") pod "6e067498-7bc0-4bc5-a9a6-696c8aa3cf71" (UID: "6e067498-7bc0-4bc5-a9a6-696c8aa3cf71"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.342196 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "1206c914-fbe7-4e8f-8470-861b0ebf75de" (UID: "1206c914-fbe7-4e8f-8470-861b0ebf75de"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.342862 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1206c914-fbe7-4e8f-8470-861b0ebf75de-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "1206c914-fbe7-4e8f-8470-861b0ebf75de" (UID: "1206c914-fbe7-4e8f-8470-861b0ebf75de"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.345385 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-669669cf59-5rwqh"] Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.345890 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bcf58afd-21c6-4c9d-8702-09bc98859732-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "bcf58afd-21c6-4c9d-8702-09bc98859732" (UID: "bcf58afd-21c6-4c9d-8702-09bc98859732"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.351262 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bcf58afd-21c6-4c9d-8702-09bc98859732-kube-api-access-kkcmh" (OuterVolumeSpecName: "kube-api-access-kkcmh") pod "bcf58afd-21c6-4c9d-8702-09bc98859732" (UID: "bcf58afd-21c6-4c9d-8702-09bc98859732"). InnerVolumeSpecName "kube-api-access-kkcmh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.356255 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e067498-7bc0-4bc5-a9a6-696c8aa3cf71-kube-api-access-qbjj6" (OuterVolumeSpecName: "kube-api-access-qbjj6") pod "6e067498-7bc0-4bc5-a9a6-696c8aa3cf71" (UID: "6e067498-7bc0-4bc5-a9a6-696c8aa3cf71"). InnerVolumeSpecName "kube-api-access-qbjj6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.359853 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.371457 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.383984 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1206c914-fbe7-4e8f-8470-861b0ebf75de-kube-api-access-58j4x" (OuterVolumeSpecName: "kube-api-access-58j4x") pod "1206c914-fbe7-4e8f-8470-861b0ebf75de" (UID: "1206c914-fbe7-4e8f-8470-861b0ebf75de"). InnerVolumeSpecName "kube-api-access-58j4x". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.384318 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance7577-account-delete-hmqt9" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.384583 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1206c914-fbe7-4e8f-8470-861b0ebf75de-scripts" (OuterVolumeSpecName: "scripts") pod "1206c914-fbe7-4e8f-8470-861b0ebf75de" (UID: "1206c914-fbe7-4e8f-8470-861b0ebf75de"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.384658 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e067498-7bc0-4bc5-a9a6-696c8aa3cf71-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "6e067498-7bc0-4bc5-a9a6-696c8aa3cf71" (UID: "6e067498-7bc0-4bc5-a9a6-696c8aa3cf71"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.387437 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder6f3a-account-delete-nzrmk" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.398974 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.419423 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-db2qc"] Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.423830 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1206c914-fbe7-4e8f-8470-861b0ebf75de-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1206c914-fbe7-4e8f-8470-861b0ebf75de" (UID: "1206c914-fbe7-4e8f-8470-861b0ebf75de"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.431467 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-db2qc"] Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.445811 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "mysql-db") pod "bcf58afd-21c6-4c9d-8702-09bc98859732" (UID: "bcf58afd-21c6-4c9d-8702-09bc98859732"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.446626 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-logs\") pod \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\" (UID: \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.446671 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-etc-machine-id\") pod \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\" (UID: \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.446694 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c88f9c00-b02f-4070-b81e-733009e44691-scripts\") pod \"c88f9c00-b02f-4070-b81e-733009e44691\" (UID: \"c88f9c00-b02f-4070-b81e-733009e44691\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.446864 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-internal-tls-certs\") pod \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\" (UID: \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.446903 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b9dqn\" (UniqueName: \"kubernetes.io/projected/c88f9c00-b02f-4070-b81e-733009e44691-kube-api-access-b9dqn\") pod \"c88f9c00-b02f-4070-b81e-733009e44691\" (UID: \"c88f9c00-b02f-4070-b81e-733009e44691\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.446974 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c88f9c00-b02f-4070-b81e-733009e44691-internal-tls-certs\") pod \"c88f9c00-b02f-4070-b81e-733009e44691\" (UID: \"c88f9c00-b02f-4070-b81e-733009e44691\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.447031 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vjvxt\" (UniqueName: \"kubernetes.io/projected/6754a19e-e024-4b15-8464-49e127bd35ad-kube-api-access-vjvxt\") pod \"6754a19e-e024-4b15-8464-49e127bd35ad\" (UID: \"6754a19e-e024-4b15-8464-49e127bd35ad\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.447053 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c88f9c00-b02f-4070-b81e-733009e44691-public-tls-certs\") pod \"c88f9c00-b02f-4070-b81e-733009e44691\" (UID: \"c88f9c00-b02f-4070-b81e-733009e44691\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.447070 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-public-tls-certs\") pod \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\" (UID: \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.447109 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-combined-ca-bundle\") pod \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\" (UID: \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.447152 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6754a19e-e024-4b15-8464-49e127bd35ad-combined-ca-bundle\") pod \"6754a19e-e024-4b15-8464-49e127bd35ad\" (UID: \"6754a19e-e024-4b15-8464-49e127bd35ad\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.447176 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-scripts\") pod \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\" (UID: \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.447301 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-config-data\") pod \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\" (UID: \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.447339 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-config-data-custom\") pod \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\" (UID: \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.447371 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qx8kk\" (UniqueName: \"kubernetes.io/projected/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-kube-api-access-qx8kk\") pod \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\" (UID: \"2988a7d8-1d6b-46d8-b204-8e02d0be3b4d\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.447404 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c88f9c00-b02f-4070-b81e-733009e44691-config-data\") pod \"c88f9c00-b02f-4070-b81e-733009e44691\" (UID: \"c88f9c00-b02f-4070-b81e-733009e44691\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.447427 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6754a19e-e024-4b15-8464-49e127bd35ad-config-data-custom\") pod \"6754a19e-e024-4b15-8464-49e127bd35ad\" (UID: \"6754a19e-e024-4b15-8464-49e127bd35ad\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.447446 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c88f9c00-b02f-4070-b81e-733009e44691-combined-ca-bundle\") pod \"c88f9c00-b02f-4070-b81e-733009e44691\" (UID: \"c88f9c00-b02f-4070-b81e-733009e44691\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.447461 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6754a19e-e024-4b15-8464-49e127bd35ad-scripts\") pod \"6754a19e-e024-4b15-8464-49e127bd35ad\" (UID: \"6754a19e-e024-4b15-8464-49e127bd35ad\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.446886 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "2988a7d8-1d6b-46d8-b204-8e02d0be3b4d" (UID: "2988a7d8-1d6b-46d8-b204-8e02d0be3b4d"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.447502 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6754a19e-e024-4b15-8464-49e127bd35ad-config-data\") pod \"6754a19e-e024-4b15-8464-49e127bd35ad\" (UID: \"6754a19e-e024-4b15-8464-49e127bd35ad\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.447545 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c88f9c00-b02f-4070-b81e-733009e44691-logs\") pod \"c88f9c00-b02f-4070-b81e-733009e44691\" (UID: \"c88f9c00-b02f-4070-b81e-733009e44691\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.447570 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6754a19e-e024-4b15-8464-49e127bd35ad-etc-machine-id\") pod \"6754a19e-e024-4b15-8464-49e127bd35ad\" (UID: \"6754a19e-e024-4b15-8464-49e127bd35ad\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.447613 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-logs" (OuterVolumeSpecName: "logs") pod "2988a7d8-1d6b-46d8-b204-8e02d0be3b4d" (UID: "2988a7d8-1d6b-46d8-b204-8e02d0be3b4d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.448121 4971 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1206c914-fbe7-4e8f-8470-861b0ebf75de-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.448144 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-58j4x\" (UniqueName: \"kubernetes.io/projected/1206c914-fbe7-4e8f-8470-861b0ebf75de-kube-api-access-58j4x\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.448162 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kkcmh\" (UniqueName: \"kubernetes.io/projected/bcf58afd-21c6-4c9d-8702-09bc98859732-kube-api-access-kkcmh\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.448185 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1206c914-fbe7-4e8f-8470-861b0ebf75de-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.448195 4971 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/bcf58afd-21c6-4c9d-8702-09bc98859732-kolla-config\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.448219 4971 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.448229 4971 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6e067498-7bc0-4bc5-a9a6-696c8aa3cf71-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.448240 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qbjj6\" (UniqueName: \"kubernetes.io/projected/6e067498-7bc0-4bc5-a9a6-696c8aa3cf71-kube-api-access-qbjj6\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.448248 4971 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-logs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.448263 4971 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.448273 4971 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.448283 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bcf58afd-21c6-4c9d-8702-09bc98859732-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.448292 4971 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1206c914-fbe7-4e8f-8470-861b0ebf75de-logs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.448301 4971 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/bcf58afd-21c6-4c9d-8702-09bc98859732-config-data-generated\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.448310 4971 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/bcf58afd-21c6-4c9d-8702-09bc98859732-config-data-default\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.448319 4971 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e067498-7bc0-4bc5-a9a6-696c8aa3cf71-logs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.448330 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1206c914-fbe7-4e8f-8470-861b0ebf75de-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.449255 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6754a19e-e024-4b15-8464-49e127bd35ad-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "6754a19e-e024-4b15-8464-49e127bd35ad" (UID: "6754a19e-e024-4b15-8464-49e127bd35ad"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.450362 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c88f9c00-b02f-4070-b81e-733009e44691-logs" (OuterVolumeSpecName: "logs") pod "c88f9c00-b02f-4070-b81e-733009e44691" (UID: "c88f9c00-b02f-4070-b81e-733009e44691"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.454381 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.468417 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c88f9c00-b02f-4070-b81e-733009e44691-scripts" (OuterVolumeSpecName: "scripts") pod "c88f9c00-b02f-4070-b81e-733009e44691" (UID: "c88f9c00-b02f-4070-b81e-733009e44691"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.472738 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bcf58afd-21c6-4c9d-8702-09bc98859732-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "bcf58afd-21c6-4c9d-8702-09bc98859732" (UID: "bcf58afd-21c6-4c9d-8702-09bc98859732"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.480279 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.496330 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6754a19e-e024-4b15-8464-49e127bd35ad-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "6754a19e-e024-4b15-8464-49e127bd35ad" (UID: "6754a19e-e024-4b15-8464-49e127bd35ad"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.496368 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6754a19e-e024-4b15-8464-49e127bd35ad-kube-api-access-vjvxt" (OuterVolumeSpecName: "kube-api-access-vjvxt") pod "6754a19e-e024-4b15-8464-49e127bd35ad" (UID: "6754a19e-e024-4b15-8464-49e127bd35ad"). InnerVolumeSpecName "kube-api-access-vjvxt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.496398 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "2988a7d8-1d6b-46d8-b204-8e02d0be3b4d" (UID: "2988a7d8-1d6b-46d8-b204-8e02d0be3b4d"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.496426 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c88f9c00-b02f-4070-b81e-733009e44691-kube-api-access-b9dqn" (OuterVolumeSpecName: "kube-api-access-b9dqn") pod "c88f9c00-b02f-4070-b81e-733009e44691" (UID: "c88f9c00-b02f-4070-b81e-733009e44691"). InnerVolumeSpecName "kube-api-access-b9dqn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.496489 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-scripts" (OuterVolumeSpecName: "scripts") pod "2988a7d8-1d6b-46d8-b204-8e02d0be3b4d" (UID: "2988a7d8-1d6b-46d8-b204-8e02d0be3b4d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.496731 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.497381 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6754a19e-e024-4b15-8464-49e127bd35ad-scripts" (OuterVolumeSpecName: "scripts") pod "6754a19e-e024-4b15-8464-49e127bd35ad" (UID: "6754a19e-e024-4b15-8464-49e127bd35ad"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.504369 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e067498-7bc0-4bc5-a9a6-696c8aa3cf71-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6e067498-7bc0-4bc5-a9a6-696c8aa3cf71" (UID: "6e067498-7bc0-4bc5-a9a6-696c8aa3cf71"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.542020 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e067498-7bc0-4bc5-a9a6-696c8aa3cf71-config-data" (OuterVolumeSpecName: "config-data") pod "6e067498-7bc0-4bc5-a9a6-696c8aa3cf71" (UID: "6e067498-7bc0-4bc5-a9a6-696c8aa3cf71"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.544057 4971 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.546555 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bcf58afd-21c6-4c9d-8702-09bc98859732-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bcf58afd-21c6-4c9d-8702-09bc98859732" (UID: "bcf58afd-21c6-4c9d-8702-09bc98859732"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.549461 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r6ndq\" (UniqueName: \"kubernetes.io/projected/51e69600-7930-4ccb-a8ff-0ad3377bf3a7-kube-api-access-r6ndq\") pod \"51e69600-7930-4ccb-a8ff-0ad3377bf3a7\" (UID: \"51e69600-7930-4ccb-a8ff-0ad3377bf3a7\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.549572 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/51e69600-7930-4ccb-a8ff-0ad3377bf3a7-operator-scripts\") pod \"51e69600-7930-4ccb-a8ff-0ad3377bf3a7\" (UID: \"51e69600-7930-4ccb-a8ff-0ad3377bf3a7\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.549777 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/83ad8915-ac3b-4891-ae7a-9b862747569f-operator-scripts\") pod \"83ad8915-ac3b-4891-ae7a-9b862747569f\" (UID: \"83ad8915-ac3b-4891-ae7a-9b862747569f\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.549839 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w8hqt\" (UniqueName: \"kubernetes.io/projected/83ad8915-ac3b-4891-ae7a-9b862747569f-kube-api-access-w8hqt\") pod \"83ad8915-ac3b-4891-ae7a-9b862747569f\" (UID: \"83ad8915-ac3b-4891-ae7a-9b862747569f\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.550420 4971 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6754a19e-e024-4b15-8464-49e127bd35ad-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.550443 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c88f9c00-b02f-4070-b81e-733009e44691-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.550458 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b9dqn\" (UniqueName: \"kubernetes.io/projected/c88f9c00-b02f-4070-b81e-733009e44691-kube-api-access-b9dqn\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.550471 4971 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/bcf58afd-21c6-4c9d-8702-09bc98859732-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.550482 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e067498-7bc0-4bc5-a9a6-696c8aa3cf71-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.550494 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bcf58afd-21c6-4c9d-8702-09bc98859732-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.550504 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vjvxt\" (UniqueName: \"kubernetes.io/projected/6754a19e-e024-4b15-8464-49e127bd35ad-kube-api-access-vjvxt\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.550513 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e067498-7bc0-4bc5-a9a6-696c8aa3cf71-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.550521 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.550544 4971 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.550553 4971 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6754a19e-e024-4b15-8464-49e127bd35ad-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.550561 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6754a19e-e024-4b15-8464-49e127bd35ad-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.550570 4971 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.550579 4971 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c88f9c00-b02f-4070-b81e-733009e44691-logs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.553699 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83ad8915-ac3b-4891-ae7a-9b862747569f-kube-api-access-w8hqt" (OuterVolumeSpecName: "kube-api-access-w8hqt") pod "83ad8915-ac3b-4891-ae7a-9b862747569f" (UID: "83ad8915-ac3b-4891-ae7a-9b862747569f"). InnerVolumeSpecName "kube-api-access-w8hqt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.557666 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51e69600-7930-4ccb-a8ff-0ad3377bf3a7-kube-api-access-r6ndq" (OuterVolumeSpecName: "kube-api-access-r6ndq") pod "51e69600-7930-4ccb-a8ff-0ad3377bf3a7" (UID: "51e69600-7930-4ccb-a8ff-0ad3377bf3a7"). InnerVolumeSpecName "kube-api-access-r6ndq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.558101 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/83ad8915-ac3b-4891-ae7a-9b862747569f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "83ad8915-ac3b-4891-ae7a-9b862747569f" (UID: "83ad8915-ac3b-4891-ae7a-9b862747569f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.559167 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-kube-api-access-qx8kk" (OuterVolumeSpecName: "kube-api-access-qx8kk") pod "2988a7d8-1d6b-46d8-b204-8e02d0be3b4d" (UID: "2988a7d8-1d6b-46d8-b204-8e02d0be3b4d"). InnerVolumeSpecName "kube-api-access-qx8kk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.564155 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapi6022-account-delete-bhttv" event={"ID":"cbef532c-59fb-40ac-bde3-35b8f3616d85","Type":"ContainerStarted","Data":"934521e5549c4b670a151928313a07441d4a31af6657c1c703e07932bf31d32e"} Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.564851 4971 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/novaapi6022-account-delete-bhttv" secret="" err="secret \"galera-openstack-dockercfg-kzxgw\" not found" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.564880 4971 scope.go:117] "RemoveContainer" containerID="934521e5549c4b670a151928313a07441d4a31af6657c1c703e07932bf31d32e" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.569024 4971 generic.go:334] "Generic (PLEG): container finished" podID="a2b1d941-1738-4967-a97c-6a7b2c36531c" containerID="b06e49f36aeb9385737ef57f67601c2cecfa59d6329f3deb519661b9fff43052" exitCode=2 Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.569120 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"a2b1d941-1738-4967-a97c-6a7b2c36531c","Type":"ContainerDied","Data":"b06e49f36aeb9385737ef57f67601c2cecfa59d6329f3deb519661b9fff43052"} Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.569165 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"a2b1d941-1738-4967-a97c-6a7b2c36531c","Type":"ContainerDied","Data":"2a4188f91760c23554f5eac7dfb3f6ac1589fb2bbdd197bfbdf22259d93bade4"} Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.569182 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2a4188f91760c23554f5eac7dfb3f6ac1589fb2bbdd197bfbdf22259d93bade4" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.574541 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell091e4-account-delete-8kghx" event={"ID":"19b68245-2b99-4337-892f-059f05113ad6","Type":"ContainerStarted","Data":"d248fffbf5a9fd8a392c19a71137172faa28f77aa6cf43793ddc632cbb05dd20"} Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.577543 4971 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/novacell091e4-account-delete-8kghx" secret="" err="secret \"galera-openstack-dockercfg-kzxgw\" not found" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.581109 4971 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.586018 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/51e69600-7930-4ccb-a8ff-0ad3377bf3a7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "51e69600-7930-4ccb-a8ff-0ad3377bf3a7" (UID: "51e69600-7930-4ccb-a8ff-0ad3377bf3a7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.597037 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1206c914-fbe7-4e8f-8470-861b0ebf75de-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "1206c914-fbe7-4e8f-8470-861b0ebf75de" (UID: "1206c914-fbe7-4e8f-8470-861b0ebf75de"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: E1127 07:18:53.606003 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b97556ba2fd93384c3dcc5c7577bad7f95a4205ce2fc105bf6f734f22b25ff2a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.606390 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1206c914-fbe7-4e8f-8470-861b0ebf75de-config-data" (OuterVolumeSpecName: "config-data") pod "1206c914-fbe7-4e8f-8470-861b0ebf75de" (UID: "1206c914-fbe7-4e8f-8470-861b0ebf75de"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.607470 4971 generic.go:334] "Generic (PLEG): container finished" podID="41a3ef31-df5e-4cb9-8983-40a16f46823c" containerID="246b3a277e05318dff5ef1ad5944cae22aa133678c3e078b2b0e80c043b33db8" exitCode=0 Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.607501 4971 generic.go:334] "Generic (PLEG): container finished" podID="41a3ef31-df5e-4cb9-8983-40a16f46823c" containerID="0b2dec937686384d25cd26d898333c4fc81933ae416ed8605ae98f8c6d7ffe0d" exitCode=2 Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.607511 4971 generic.go:334] "Generic (PLEG): container finished" podID="41a3ef31-df5e-4cb9-8983-40a16f46823c" containerID="e42e5fec5c109e2ea0c69c634a81c7905e5cc3f5aa9cfe275d500939cb600b2b" exitCode=0 Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.607572 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41a3ef31-df5e-4cb9-8983-40a16f46823c","Type":"ContainerDied","Data":"246b3a277e05318dff5ef1ad5944cae22aa133678c3e078b2b0e80c043b33db8"} Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.607600 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41a3ef31-df5e-4cb9-8983-40a16f46823c","Type":"ContainerDied","Data":"0b2dec937686384d25cd26d898333c4fc81933ae416ed8605ae98f8c6d7ffe0d"} Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.607611 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41a3ef31-df5e-4cb9-8983-40a16f46823c","Type":"ContainerDied","Data":"e42e5fec5c109e2ea0c69c634a81c7905e5cc3f5aa9cfe275d500939cb600b2b"} Nov 27 07:18:53 crc kubenswrapper[4971]: E1127 07:18:53.611921 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b97556ba2fd93384c3dcc5c7577bad7f95a4205ce2fc105bf6f734f22b25ff2a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.619110 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_abd4a589-1b2e-4559-852f-2c27c0d8c459/ovn-northd/0.log" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.619164 4971 generic.go:334] "Generic (PLEG): container finished" podID="abd4a589-1b2e-4559-852f-2c27c0d8c459" containerID="c9d172c81f9f3625bed2063b71b15aa0aaee90448e7484f3536af33829d91bf8" exitCode=139 Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.619249 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"abd4a589-1b2e-4559-852f-2c27c0d8c459","Type":"ContainerDied","Data":"c9d172c81f9f3625bed2063b71b15aa0aaee90448e7484f3536af33829d91bf8"} Nov 27 07:18:53 crc kubenswrapper[4971]: E1127 07:18:53.624358 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b97556ba2fd93384c3dcc5c7577bad7f95a4205ce2fc105bf6f734f22b25ff2a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 27 07:18:53 crc kubenswrapper[4971]: E1127 07:18:53.624437 4971 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="36cda10d-7f8f-403c-82b8-fbbdf89e8ed5" containerName="nova-scheduler-scheduler" Nov 27 07:18:53 crc kubenswrapper[4971]: E1127 07:18:53.633736 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="bacf1459d637ebaf1473ffbba9c90d2886c77004f0c01d13c0668368ade96df2" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.634092 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.634570 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron1af4-account-delete-48m5w" event={"ID":"91f979c6-21cc-4848-9eb5-b8bfc4abf082","Type":"ContainerDied","Data":"fd5d4c775a7fb450ac8ff1db182633c99de47c66fdf05041c3bf7ad5c72a8003"} Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.634598 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fd5d4c775a7fb450ac8ff1db182633c99de47c66fdf05041c3bf7ad5c72a8003" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.634739 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-66674dfb5f-52hcq" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.634857 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.634914 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance7577-account-delete-hmqt9" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.634957 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.635007 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novacell16991-account-delete-ftkpc" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.635084 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5b7b448d48-2wggc" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.635138 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-65cf5bcbb-zf65t" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.635172 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.635208 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.635707 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder6f3a-account-delete-nzrmk" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.635758 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.635802 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-7865dbd7d9-zp55h" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.650781 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2988a7d8-1d6b-46d8-b204-8e02d0be3b4d" (UID: "2988a7d8-1d6b-46d8-b204-8e02d0be3b4d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.651595 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c2e2055e-1200-46e8-a49e-c6b490702c9b-httpd-run\") pod \"c2e2055e-1200-46e8-a49e-c6b490702c9b\" (UID: \"c2e2055e-1200-46e8-a49e-c6b490702c9b\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.651641 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c2e2055e-1200-46e8-a49e-c6b490702c9b-logs\") pod \"c2e2055e-1200-46e8-a49e-c6b490702c9b\" (UID: \"c2e2055e-1200-46e8-a49e-c6b490702c9b\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.651731 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2e2055e-1200-46e8-a49e-c6b490702c9b-config-data\") pod \"c2e2055e-1200-46e8-a49e-c6b490702c9b\" (UID: \"c2e2055e-1200-46e8-a49e-c6b490702c9b\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.651823 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"c2e2055e-1200-46e8-a49e-c6b490702c9b\" (UID: \"c2e2055e-1200-46e8-a49e-c6b490702c9b\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.651840 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2e2055e-1200-46e8-a49e-c6b490702c9b-combined-ca-bundle\") pod \"c2e2055e-1200-46e8-a49e-c6b490702c9b\" (UID: \"c2e2055e-1200-46e8-a49e-c6b490702c9b\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.651885 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c2e2055e-1200-46e8-a49e-c6b490702c9b-internal-tls-certs\") pod \"c2e2055e-1200-46e8-a49e-c6b490702c9b\" (UID: \"c2e2055e-1200-46e8-a49e-c6b490702c9b\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.651924 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xpdz5\" (UniqueName: \"kubernetes.io/projected/c2e2055e-1200-46e8-a49e-c6b490702c9b-kube-api-access-xpdz5\") pod \"c2e2055e-1200-46e8-a49e-c6b490702c9b\" (UID: \"c2e2055e-1200-46e8-a49e-c6b490702c9b\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.651956 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c2e2055e-1200-46e8-a49e-c6b490702c9b-scripts\") pod \"c2e2055e-1200-46e8-a49e-c6b490702c9b\" (UID: \"c2e2055e-1200-46e8-a49e-c6b490702c9b\") " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.652386 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qx8kk\" (UniqueName: \"kubernetes.io/projected/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-kube-api-access-qx8kk\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.652410 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1206c914-fbe7-4e8f-8470-861b0ebf75de-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.652422 4971 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1206c914-fbe7-4e8f-8470-861b0ebf75de-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.652434 4971 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.652444 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r6ndq\" (UniqueName: \"kubernetes.io/projected/51e69600-7930-4ccb-a8ff-0ad3377bf3a7-kube-api-access-r6ndq\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.652452 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/51e69600-7930-4ccb-a8ff-0ad3377bf3a7-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.652461 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.652470 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/83ad8915-ac3b-4891-ae7a-9b862747569f-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.652479 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w8hqt\" (UniqueName: \"kubernetes.io/projected/83ad8915-ac3b-4891-ae7a-9b862747569f-kube-api-access-w8hqt\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: E1127 07:18:53.652528 4971 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Nov 27 07:18:53 crc kubenswrapper[4971]: E1127 07:18:53.652582 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/cbef532c-59fb-40ac-bde3-35b8f3616d85-operator-scripts podName:cbef532c-59fb-40ac-bde3-35b8f3616d85 nodeName:}" failed. No retries permitted until 2025-11-27 07:18:54.152567154 +0000 UTC m=+1572.344611072 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/cbef532c-59fb-40ac-bde3-35b8f3616d85-operator-scripts") pod "novaapi6022-account-delete-bhttv" (UID: "cbef532c-59fb-40ac-bde3-35b8f3616d85") : configmap "openstack-scripts" not found Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.656022 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c2e2055e-1200-46e8-a49e-c6b490702c9b-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "c2e2055e-1200-46e8-a49e-c6b490702c9b" (UID: "c2e2055e-1200-46e8-a49e-c6b490702c9b"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.656271 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c2e2055e-1200-46e8-a49e-c6b490702c9b-logs" (OuterVolumeSpecName: "logs") pod "c2e2055e-1200-46e8-a49e-c6b490702c9b" (UID: "c2e2055e-1200-46e8-a49e-c6b490702c9b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: E1127 07:18:53.658067 4971 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Nov 27 07:18:53 crc kubenswrapper[4971]: E1127 07:18:53.658128 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/19b68245-2b99-4337-892f-059f05113ad6-operator-scripts podName:19b68245-2b99-4337-892f-059f05113ad6 nodeName:}" failed. No retries permitted until 2025-11-27 07:18:54.158107074 +0000 UTC m=+1572.350150992 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/19b68245-2b99-4337-892f-059f05113ad6-operator-scripts") pod "novacell091e4-account-delete-8kghx" (UID: "19b68245-2b99-4337-892f-059f05113ad6") : configmap "openstack-scripts" not found Nov 27 07:18:53 crc kubenswrapper[4971]: E1127 07:18:53.663647 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="bacf1459d637ebaf1473ffbba9c90d2886c77004f0c01d13c0668368ade96df2" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 27 07:18:53 crc kubenswrapper[4971]: E1127 07:18:53.668937 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="bacf1459d637ebaf1473ffbba9c90d2886c77004f0c01d13c0668368ade96df2" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 27 07:18:53 crc kubenswrapper[4971]: E1127 07:18:53.669014 4971 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="7f461fed-9df2-44a5-b99c-17f30adf0d9c" containerName="nova-cell1-conductor-conductor" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.678629 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-config-data" (OuterVolumeSpecName: "config-data") pod "2988a7d8-1d6b-46d8-b204-8e02d0be3b4d" (UID: "2988a7d8-1d6b-46d8-b204-8e02d0be3b4d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.679564 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2e2055e-1200-46e8-a49e-c6b490702c9b-scripts" (OuterVolumeSpecName: "scripts") pod "c2e2055e-1200-46e8-a49e-c6b490702c9b" (UID: "c2e2055e-1200-46e8-a49e-c6b490702c9b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.689990 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "glance") pod "c2e2055e-1200-46e8-a49e-c6b490702c9b" (UID: "c2e2055e-1200-46e8-a49e-c6b490702c9b"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.690168 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2e2055e-1200-46e8-a49e-c6b490702c9b-kube-api-access-xpdz5" (OuterVolumeSpecName: "kube-api-access-xpdz5") pod "c2e2055e-1200-46e8-a49e-c6b490702c9b" (UID: "c2e2055e-1200-46e8-a49e-c6b490702c9b"). InnerVolumeSpecName "kube-api-access-xpdz5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.710473 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "2988a7d8-1d6b-46d8-b204-8e02d0be3b4d" (UID: "2988a7d8-1d6b-46d8-b204-8e02d0be3b4d"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.730690 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c88f9c00-b02f-4070-b81e-733009e44691-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c88f9c00-b02f-4070-b81e-733009e44691" (UID: "c88f9c00-b02f-4070-b81e-733009e44691"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.757232 4971 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.757276 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.757285 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xpdz5\" (UniqueName: \"kubernetes.io/projected/c2e2055e-1200-46e8-a49e-c6b490702c9b-kube-api-access-xpdz5\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.757296 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c2e2055e-1200-46e8-a49e-c6b490702c9b-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.757305 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c88f9c00-b02f-4070-b81e-733009e44691-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.757315 4971 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c2e2055e-1200-46e8-a49e-c6b490702c9b-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.757333 4971 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c2e2055e-1200-46e8-a49e-c6b490702c9b-logs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.757345 4971 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.768035 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c88f9c00-b02f-4070-b81e-733009e44691-config-data" (OuterVolumeSpecName: "config-data") pod "c88f9c00-b02f-4070-b81e-733009e44691" (UID: "c88f9c00-b02f-4070-b81e-733009e44691"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.777780 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6754a19e-e024-4b15-8464-49e127bd35ad-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6754a19e-e024-4b15-8464-49e127bd35ad" (UID: "6754a19e-e024-4b15-8464-49e127bd35ad"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.781686 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "2988a7d8-1d6b-46d8-b204-8e02d0be3b4d" (UID: "2988a7d8-1d6b-46d8-b204-8e02d0be3b4d"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.795022 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2e2055e-1200-46e8-a49e-c6b490702c9b-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "c2e2055e-1200-46e8-a49e-c6b490702c9b" (UID: "c2e2055e-1200-46e8-a49e-c6b490702c9b"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.806067 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c88f9c00-b02f-4070-b81e-733009e44691-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "c88f9c00-b02f-4070-b81e-733009e44691" (UID: "c88f9c00-b02f-4070-b81e-733009e44691"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.806761 4971 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.808258 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2e2055e-1200-46e8-a49e-c6b490702c9b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c2e2055e-1200-46e8-a49e-c6b490702c9b" (UID: "c2e2055e-1200-46e8-a49e-c6b490702c9b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.818658 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2e2055e-1200-46e8-a49e-c6b490702c9b-config-data" (OuterVolumeSpecName: "config-data") pod "c2e2055e-1200-46e8-a49e-c6b490702c9b" (UID: "c2e2055e-1200-46e8-a49e-c6b490702c9b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.825813 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c88f9c00-b02f-4070-b81e-733009e44691-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "c88f9c00-b02f-4070-b81e-733009e44691" (UID: "c88f9c00-b02f-4070-b81e-733009e44691"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.829406 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6754a19e-e024-4b15-8464-49e127bd35ad-config-data" (OuterVolumeSpecName: "config-data") pod "6754a19e-e024-4b15-8464-49e127bd35ad" (UID: "6754a19e-e024-4b15-8464-49e127bd35ad"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.839147 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/novacell091e4-account-delete-8kghx" podStartSLOduration=8.839130333 podStartE2EDuration="8.839130333s" podCreationTimestamp="2025-11-27 07:18:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:18:53.690129957 +0000 UTC m=+1571.882173865" watchObservedRunningTime="2025-11-27 07:18:53.839130333 +0000 UTC m=+1572.031174251" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.871051 4971 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c2e2055e-1200-46e8-a49e-c6b490702c9b-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.871077 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c88f9c00-b02f-4070-b81e-733009e44691-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.871088 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6754a19e-e024-4b15-8464-49e127bd35ad-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.871097 4971 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.871108 4971 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c88f9c00-b02f-4070-b81e-733009e44691-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.871116 4971 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c88f9c00-b02f-4070-b81e-733009e44691-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.871125 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2e2055e-1200-46e8-a49e-c6b490702c9b-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.871135 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6754a19e-e024-4b15-8464-49e127bd35ad-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.871145 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2e2055e-1200-46e8-a49e-c6b490702c9b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.871155 4971 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:53 crc kubenswrapper[4971]: I1127 07:18:53.999005 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-cell1-novncproxy-0" podUID="fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea" containerName="nova-cell1-novncproxy-novncproxy" probeResult="failure" output="Get \"https://10.217.0.196:6080/vnc_lite.html\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.143184 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 27 07:18:54 crc kubenswrapper[4971]: E1127 07:18:54.151344 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c9d172c81f9f3625bed2063b71b15aa0aaee90448e7484f3536af33829d91bf8 is running failed: container process not found" containerID="c9d172c81f9f3625bed2063b71b15aa0aaee90448e7484f3536af33829d91bf8" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Nov 27 07:18:54 crc kubenswrapper[4971]: E1127 07:18:54.151950 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c9d172c81f9f3625bed2063b71b15aa0aaee90448e7484f3536af33829d91bf8 is running failed: container process not found" containerID="c9d172c81f9f3625bed2063b71b15aa0aaee90448e7484f3536af33829d91bf8" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Nov 27 07:18:54 crc kubenswrapper[4971]: E1127 07:18:54.152286 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c9d172c81f9f3625bed2063b71b15aa0aaee90448e7484f3536af33829d91bf8 is running failed: container process not found" containerID="c9d172c81f9f3625bed2063b71b15aa0aaee90448e7484f3536af33829d91bf8" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Nov 27 07:18:54 crc kubenswrapper[4971]: E1127 07:18:54.152317 4971 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c9d172c81f9f3625bed2063b71b15aa0aaee90448e7484f3536af33829d91bf8 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="abd4a589-1b2e-4559-852f-2c27c0d8c459" containerName="ovn-northd" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.158769 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-85d46db856-nzmcq" Nov 27 07:18:54 crc kubenswrapper[4971]: E1127 07:18:54.179832 4971 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Nov 27 07:18:54 crc kubenswrapper[4971]: E1127 07:18:54.180276 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/19b68245-2b99-4337-892f-059f05113ad6-operator-scripts podName:19b68245-2b99-4337-892f-059f05113ad6 nodeName:}" failed. No retries permitted until 2025-11-27 07:18:55.180144494 +0000 UTC m=+1573.372188402 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/19b68245-2b99-4337-892f-059f05113ad6-operator-scripts") pod "novacell091e4-account-delete-8kghx" (UID: "19b68245-2b99-4337-892f-059f05113ad6") : configmap "openstack-scripts" not found Nov 27 07:18:54 crc kubenswrapper[4971]: E1127 07:18:54.183678 4971 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Nov 27 07:18:54 crc kubenswrapper[4971]: E1127 07:18:54.183805 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/cbef532c-59fb-40ac-bde3-35b8f3616d85-operator-scripts podName:cbef532c-59fb-40ac-bde3-35b8f3616d85 nodeName:}" failed. No retries permitted until 2025-11-27 07:18:55.183771758 +0000 UTC m=+1573.375815676 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/cbef532c-59fb-40ac-bde3-35b8f3616d85-operator-scripts") pod "novaapi6022-account-delete-bhttv" (UID: "cbef532c-59fb-40ac-bde3-35b8f3616d85") : configmap "openstack-scripts" not found Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.214366 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.225205 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.234623 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron1af4-account-delete-48m5w" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.249697 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.258561 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novacell16991-account-delete-ftkpc" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.279004 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_abd4a589-1b2e-4559-852f-2c27c0d8c459/ovn-northd/0.log" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.279097 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.282656 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.287601 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35b90587-df5b-4f15-8c34-f1b0a8506d85-logs\") pod \"35b90587-df5b-4f15-8c34-f1b0a8506d85\" (UID: \"35b90587-df5b-4f15-8c34-f1b0a8506d85\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.287653 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc889790-089f-4007-876f-874880dad975-combined-ca-bundle\") pod \"fc889790-089f-4007-876f-874880dad975\" (UID: \"fc889790-089f-4007-876f-874880dad975\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.287683 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc889790-089f-4007-876f-874880dad975-config-data\") pod \"fc889790-089f-4007-876f-874880dad975\" (UID: \"fc889790-089f-4007-876f-874880dad975\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.287740 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc889790-089f-4007-876f-874880dad975-internal-tls-certs\") pod \"fc889790-089f-4007-876f-874880dad975\" (UID: \"fc889790-089f-4007-876f-874880dad975\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.287789 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc889790-089f-4007-876f-874880dad975-public-tls-certs\") pod \"fc889790-089f-4007-876f-874880dad975\" (UID: \"fc889790-089f-4007-876f-874880dad975\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.287823 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wrt9w\" (UniqueName: \"kubernetes.io/projected/35b90587-df5b-4f15-8c34-f1b0a8506d85-kube-api-access-wrt9w\") pod \"35b90587-df5b-4f15-8c34-f1b0a8506d85\" (UID: \"35b90587-df5b-4f15-8c34-f1b0a8506d85\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.287875 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rqvtl\" (UniqueName: \"kubernetes.io/projected/fc889790-089f-4007-876f-874880dad975-kube-api-access-rqvtl\") pod \"fc889790-089f-4007-876f-874880dad975\" (UID: \"fc889790-089f-4007-876f-874880dad975\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.287926 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fc889790-089f-4007-876f-874880dad975-config-data-custom\") pod \"fc889790-089f-4007-876f-874880dad975\" (UID: \"fc889790-089f-4007-876f-874880dad975\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.287971 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/35b90587-df5b-4f15-8c34-f1b0a8506d85-nova-metadata-tls-certs\") pod \"35b90587-df5b-4f15-8c34-f1b0a8506d85\" (UID: \"35b90587-df5b-4f15-8c34-f1b0a8506d85\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.288014 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35b90587-df5b-4f15-8c34-f1b0a8506d85-config-data\") pod \"35b90587-df5b-4f15-8c34-f1b0a8506d85\" (UID: \"35b90587-df5b-4f15-8c34-f1b0a8506d85\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.288040 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35b90587-df5b-4f15-8c34-f1b0a8506d85-combined-ca-bundle\") pod \"35b90587-df5b-4f15-8c34-f1b0a8506d85\" (UID: \"35b90587-df5b-4f15-8c34-f1b0a8506d85\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.288099 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc889790-089f-4007-876f-874880dad975-logs\") pod \"fc889790-089f-4007-876f-874880dad975\" (UID: \"fc889790-089f-4007-876f-874880dad975\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.290915 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc889790-089f-4007-876f-874880dad975-logs" (OuterVolumeSpecName: "logs") pod "fc889790-089f-4007-876f-874880dad975" (UID: "fc889790-089f-4007-876f-874880dad975"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.293073 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35b90587-df5b-4f15-8c34-f1b0a8506d85-logs" (OuterVolumeSpecName: "logs") pod "35b90587-df5b-4f15-8c34-f1b0a8506d85" (UID: "35b90587-df5b-4f15-8c34-f1b0a8506d85"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.297976 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35b90587-df5b-4f15-8c34-f1b0a8506d85-kube-api-access-wrt9w" (OuterVolumeSpecName: "kube-api-access-wrt9w") pod "35b90587-df5b-4f15-8c34-f1b0a8506d85" (UID: "35b90587-df5b-4f15-8c34-f1b0a8506d85"). InnerVolumeSpecName "kube-api-access-wrt9w". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.302609 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.319973 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc889790-089f-4007-876f-874880dad975-kube-api-access-rqvtl" (OuterVolumeSpecName: "kube-api-access-rqvtl") pod "fc889790-089f-4007-876f-874880dad975" (UID: "fc889790-089f-4007-876f-874880dad975"). InnerVolumeSpecName "kube-api-access-rqvtl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.320246 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc889790-089f-4007-876f-874880dad975-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "fc889790-089f-4007-876f-874880dad975" (UID: "fc889790-089f-4007-876f-874880dad975"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.328202 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35b90587-df5b-4f15-8c34-f1b0a8506d85-config-data" (OuterVolumeSpecName: "config-data") pod "35b90587-df5b-4f15-8c34-f1b0a8506d85" (UID: "35b90587-df5b-4f15-8c34-f1b0a8506d85"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.332867 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.346021 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.361180 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.369067 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc889790-089f-4007-876f-874880dad975-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fc889790-089f-4007-876f-874880dad975" (UID: "fc889790-089f-4007-876f-874880dad975"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.375132 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.389186 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2b1d941-1738-4967-a97c-6a7b2c36531c-combined-ca-bundle\") pod \"a2b1d941-1738-4967-a97c-6a7b2c36531c\" (UID: \"a2b1d941-1738-4967-a97c-6a7b2c36531c\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.389213 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69d47892-79da-4e4e-8de2-a84801d4d6b9-combined-ca-bundle\") pod \"69d47892-79da-4e4e-8de2-a84801d4d6b9\" (UID: \"69d47892-79da-4e4e-8de2-a84801d4d6b9\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.389240 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/69d47892-79da-4e4e-8de2-a84801d4d6b9-internal-tls-certs\") pod \"69d47892-79da-4e4e-8de2-a84801d4d6b9\" (UID: \"69d47892-79da-4e4e-8de2-a84801d4d6b9\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.389258 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91f979c6-21cc-4848-9eb5-b8bfc4abf082-operator-scripts\") pod \"91f979c6-21cc-4848-9eb5-b8bfc4abf082\" (UID: \"91f979c6-21cc-4848-9eb5-b8bfc4abf082\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.389290 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abd4a589-1b2e-4559-852f-2c27c0d8c459-combined-ca-bundle\") pod \"abd4a589-1b2e-4559-852f-2c27c0d8c459\" (UID: \"abd4a589-1b2e-4559-852f-2c27c0d8c459\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.389322 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/69d47892-79da-4e4e-8de2-a84801d4d6b9-logs\") pod \"69d47892-79da-4e4e-8de2-a84801d4d6b9\" (UID: \"69d47892-79da-4e4e-8de2-a84801d4d6b9\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.389343 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6b9n7\" (UniqueName: \"kubernetes.io/projected/bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad-kube-api-access-6b9n7\") pod \"bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad\" (UID: \"bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.389367 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tllm6\" (UniqueName: \"kubernetes.io/projected/91f979c6-21cc-4848-9eb5-b8bfc4abf082-kube-api-access-tllm6\") pod \"91f979c6-21cc-4848-9eb5-b8bfc4abf082\" (UID: \"91f979c6-21cc-4848-9eb5-b8bfc4abf082\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.389420 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad-config-data\") pod \"bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad\" (UID: \"bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.389440 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4dzdm\" (UniqueName: \"kubernetes.io/projected/abd4a589-1b2e-4559-852f-2c27c0d8c459-kube-api-access-4dzdm\") pod \"abd4a589-1b2e-4559-852f-2c27c0d8c459\" (UID: \"abd4a589-1b2e-4559-852f-2c27c0d8c459\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.389469 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/a2b1d941-1738-4967-a97c-6a7b2c36531c-kube-state-metrics-tls-config\") pod \"a2b1d941-1738-4967-a97c-6a7b2c36531c\" (UID: \"a2b1d941-1738-4967-a97c-6a7b2c36531c\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.389485 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69d47892-79da-4e4e-8de2-a84801d4d6b9-config-data\") pod \"69d47892-79da-4e4e-8de2-a84801d4d6b9\" (UID: \"69d47892-79da-4e4e-8de2-a84801d4d6b9\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.389506 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-blnzp\" (UniqueName: \"kubernetes.io/projected/69d47892-79da-4e4e-8de2-a84801d4d6b9-kube-api-access-blnzp\") pod \"69d47892-79da-4e4e-8de2-a84801d4d6b9\" (UID: \"69d47892-79da-4e4e-8de2-a84801d4d6b9\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.389603 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2b1d941-1738-4967-a97c-6a7b2c36531c-kube-state-metrics-tls-certs\") pod \"a2b1d941-1738-4967-a97c-6a7b2c36531c\" (UID: \"a2b1d941-1738-4967-a97c-6a7b2c36531c\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.389706 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/abd4a589-1b2e-4559-852f-2c27c0d8c459-ovn-northd-tls-certs\") pod \"abd4a589-1b2e-4559-852f-2c27c0d8c459\" (UID: \"abd4a589-1b2e-4559-852f-2c27c0d8c459\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.389735 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/abd4a589-1b2e-4559-852f-2c27c0d8c459-scripts\") pod \"abd4a589-1b2e-4559-852f-2c27c0d8c459\" (UID: \"abd4a589-1b2e-4559-852f-2c27c0d8c459\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.389753 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/69d47892-79da-4e4e-8de2-a84801d4d6b9-public-tls-certs\") pod \"69d47892-79da-4e4e-8de2-a84801d4d6b9\" (UID: \"69d47892-79da-4e4e-8de2-a84801d4d6b9\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.389866 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad-combined-ca-bundle\") pod \"bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad\" (UID: \"bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.389916 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/abd4a589-1b2e-4559-852f-2c27c0d8c459-config\") pod \"abd4a589-1b2e-4559-852f-2c27c0d8c459\" (UID: \"abd4a589-1b2e-4559-852f-2c27c0d8c459\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.389938 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/abd4a589-1b2e-4559-852f-2c27c0d8c459-metrics-certs-tls-certs\") pod \"abd4a589-1b2e-4559-852f-2c27c0d8c459\" (UID: \"abd4a589-1b2e-4559-852f-2c27c0d8c459\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.389975 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/abd4a589-1b2e-4559-852f-2c27c0d8c459-ovn-rundir\") pod \"abd4a589-1b2e-4559-852f-2c27c0d8c459\" (UID: \"abd4a589-1b2e-4559-852f-2c27c0d8c459\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.390027 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mcr6v\" (UniqueName: \"kubernetes.io/projected/a2b1d941-1738-4967-a97c-6a7b2c36531c-kube-api-access-mcr6v\") pod \"a2b1d941-1738-4967-a97c-6a7b2c36531c\" (UID: \"a2b1d941-1738-4967-a97c-6a7b2c36531c\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.390639 4971 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc889790-089f-4007-876f-874880dad975-logs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.390668 4971 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/35b90587-df5b-4f15-8c34-f1b0a8506d85-logs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.390682 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc889790-089f-4007-876f-874880dad975-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.390696 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wrt9w\" (UniqueName: \"kubernetes.io/projected/35b90587-df5b-4f15-8c34-f1b0a8506d85-kube-api-access-wrt9w\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.390708 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rqvtl\" (UniqueName: \"kubernetes.io/projected/fc889790-089f-4007-876f-874880dad975-kube-api-access-rqvtl\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.390723 4971 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fc889790-089f-4007-876f-874880dad975-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.390734 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35b90587-df5b-4f15-8c34-f1b0a8506d85-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.391156 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/91f979c6-21cc-4848-9eb5-b8bfc4abf082-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "91f979c6-21cc-4848-9eb5-b8bfc4abf082" (UID: "91f979c6-21cc-4848-9eb5-b8bfc4abf082"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.402575 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/69d47892-79da-4e4e-8de2-a84801d4d6b9-logs" (OuterVolumeSpecName: "logs") pod "69d47892-79da-4e4e-8de2-a84801d4d6b9" (UID: "69d47892-79da-4e4e-8de2-a84801d4d6b9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.406555 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/abd4a589-1b2e-4559-852f-2c27c0d8c459-scripts" (OuterVolumeSpecName: "scripts") pod "abd4a589-1b2e-4559-852f-2c27c0d8c459" (UID: "abd4a589-1b2e-4559-852f-2c27c0d8c459"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.408790 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/abd4a589-1b2e-4559-852f-2c27c0d8c459-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "abd4a589-1b2e-4559-852f-2c27c0d8c459" (UID: "abd4a589-1b2e-4559-852f-2c27c0d8c459"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.415268 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69d47892-79da-4e4e-8de2-a84801d4d6b9-kube-api-access-blnzp" (OuterVolumeSpecName: "kube-api-access-blnzp") pod "69d47892-79da-4e4e-8de2-a84801d4d6b9" (UID: "69d47892-79da-4e4e-8de2-a84801d4d6b9"). InnerVolumeSpecName "kube-api-access-blnzp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.417012 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/abd4a589-1b2e-4559-852f-2c27c0d8c459-config" (OuterVolumeSpecName: "config") pod "abd4a589-1b2e-4559-852f-2c27c0d8c459" (UID: "abd4a589-1b2e-4559-852f-2c27c0d8c459"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.423622 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement0147-account-delete-w7x5l" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.424671 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2b1d941-1738-4967-a97c-6a7b2c36531c-kube-api-access-mcr6v" (OuterVolumeSpecName: "kube-api-access-mcr6v") pod "a2b1d941-1738-4967-a97c-6a7b2c36531c" (UID: "a2b1d941-1738-4967-a97c-6a7b2c36531c"). InnerVolumeSpecName "kube-api-access-mcr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.428831 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-66674dfb5f-52hcq"] Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.434922 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.435804 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91f979c6-21cc-4848-9eb5-b8bfc4abf082-kube-api-access-tllm6" (OuterVolumeSpecName: "kube-api-access-tllm6") pod "91f979c6-21cc-4848-9eb5-b8bfc4abf082" (UID: "91f979c6-21cc-4848-9eb5-b8bfc4abf082"). InnerVolumeSpecName "kube-api-access-tllm6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.444936 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-proxy-66674dfb5f-52hcq"] Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.456900 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad-kube-api-access-6b9n7" (OuterVolumeSpecName: "kube-api-access-6b9n7") pod "bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad" (UID: "bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad"). InnerVolumeSpecName "kube-api-access-6b9n7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.457007 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/abd4a589-1b2e-4559-852f-2c27c0d8c459-kube-api-access-4dzdm" (OuterVolumeSpecName: "kube-api-access-4dzdm") pod "abd4a589-1b2e-4559-852f-2c27c0d8c459" (UID: "abd4a589-1b2e-4559-852f-2c27c0d8c459"). InnerVolumeSpecName "kube-api-access-4dzdm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.458215 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.468869 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.486144 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.492394 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nfwrd\" (UniqueName: \"kubernetes.io/projected/c81bcd57-9f3c-4511-a6f1-ca0bbb68af0d-kube-api-access-nfwrd\") pod \"c81bcd57-9f3c-4511-a6f1-ca0bbb68af0d\" (UID: \"c81bcd57-9f3c-4511-a6f1-ca0bbb68af0d\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.492457 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6833730-f034-4b5f-954a-19e993167f04-memcached-tls-certs\") pod \"d6833730-f034-4b5f-954a-19e993167f04\" (UID: \"d6833730-f034-4b5f-954a-19e993167f04\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.492480 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tvd4d\" (UniqueName: \"kubernetes.io/projected/d6833730-f034-4b5f-954a-19e993167f04-kube-api-access-tvd4d\") pod \"d6833730-f034-4b5f-954a-19e993167f04\" (UID: \"d6833730-f034-4b5f-954a-19e993167f04\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.492503 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d6833730-f034-4b5f-954a-19e993167f04-kolla-config\") pod \"d6833730-f034-4b5f-954a-19e993167f04\" (UID: \"d6833730-f034-4b5f-954a-19e993167f04\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.496269 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d6833730-f034-4b5f-954a-19e993167f04-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "d6833730-f034-4b5f-954a-19e993167f04" (UID: "d6833730-f034-4b5f-954a-19e993167f04"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.509091 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.510596 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d6833730-f034-4b5f-954a-19e993167f04-config-data\") pod \"d6833730-f034-4b5f-954a-19e993167f04\" (UID: \"d6833730-f034-4b5f-954a-19e993167f04\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.510703 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6833730-f034-4b5f-954a-19e993167f04-combined-ca-bundle\") pod \"d6833730-f034-4b5f-954a-19e993167f04\" (UID: \"d6833730-f034-4b5f-954a-19e993167f04\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.510844 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c81bcd57-9f3c-4511-a6f1-ca0bbb68af0d-operator-scripts\") pod \"c81bcd57-9f3c-4511-a6f1-ca0bbb68af0d\" (UID: \"c81bcd57-9f3c-4511-a6f1-ca0bbb68af0d\") " Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.511389 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzqfv\" (UniqueName: \"kubernetes.io/projected/9411d1e4-0384-4caf-b95e-e891b811b402-kube-api-access-vzqfv\") pod \"novacell16991-account-delete-ftkpc\" (UID: \"9411d1e4-0384-4caf-b95e-e891b811b402\") " pod="openstack/novacell16991-account-delete-ftkpc" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.511461 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c81bcd57-9f3c-4511-a6f1-ca0bbb68af0d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c81bcd57-9f3c-4511-a6f1-ca0bbb68af0d" (UID: "c81bcd57-9f3c-4511-a6f1-ca0bbb68af0d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.511617 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9411d1e4-0384-4caf-b95e-e891b811b402-operator-scripts\") pod \"novacell16991-account-delete-ftkpc\" (UID: \"9411d1e4-0384-4caf-b95e-e891b811b402\") " pod="openstack/novacell16991-account-delete-ftkpc" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.511692 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4dzdm\" (UniqueName: \"kubernetes.io/projected/abd4a589-1b2e-4559-852f-2c27c0d8c459-kube-api-access-4dzdm\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.511709 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-blnzp\" (UniqueName: \"kubernetes.io/projected/69d47892-79da-4e4e-8de2-a84801d4d6b9-kube-api-access-blnzp\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.511720 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/abd4a589-1b2e-4559-852f-2c27c0d8c459-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.511729 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c81bcd57-9f3c-4511-a6f1-ca0bbb68af0d-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.511740 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/abd4a589-1b2e-4559-852f-2c27c0d8c459-config\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.511749 4971 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/abd4a589-1b2e-4559-852f-2c27c0d8c459-ovn-rundir\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.511760 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mcr6v\" (UniqueName: \"kubernetes.io/projected/a2b1d941-1738-4967-a97c-6a7b2c36531c-kube-api-access-mcr6v\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.511770 4971 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d6833730-f034-4b5f-954a-19e993167f04-kolla-config\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.511780 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91f979c6-21cc-4848-9eb5-b8bfc4abf082-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.511788 4971 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/69d47892-79da-4e4e-8de2-a84801d4d6b9-logs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.511797 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6b9n7\" (UniqueName: \"kubernetes.io/projected/bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad-kube-api-access-6b9n7\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.511805 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tllm6\" (UniqueName: \"kubernetes.io/projected/91f979c6-21cc-4848-9eb5-b8bfc4abf082-kube-api-access-tllm6\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:54 crc kubenswrapper[4971]: E1127 07:18:54.511862 4971 configmap.go:193] Couldn't get configMap openstack/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Nov 27 07:18:54 crc kubenswrapper[4971]: E1127 07:18:54.511902 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/9411d1e4-0384-4caf-b95e-e891b811b402-operator-scripts podName:9411d1e4-0384-4caf-b95e-e891b811b402 nodeName:}" failed. No retries permitted until 2025-11-27 07:19:02.511887848 +0000 UTC m=+1580.703931766 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/9411d1e4-0384-4caf-b95e-e891b811b402-operator-scripts") pod "novacell16991-account-delete-ftkpc" (UID: "9411d1e4-0384-4caf-b95e-e891b811b402") : configmap "openstack-cell1-scripts" not found Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.513572 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d6833730-f034-4b5f-954a-19e993167f04-config-data" (OuterVolumeSpecName: "config-data") pod "d6833730-f034-4b5f-954a-19e993167f04" (UID: "d6833730-f034-4b5f-954a-19e993167f04"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.514988 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-7865dbd7d9-zp55h"] Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.516620 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c81bcd57-9f3c-4511-a6f1-ca0bbb68af0d-kube-api-access-nfwrd" (OuterVolumeSpecName: "kube-api-access-nfwrd") pod "c81bcd57-9f3c-4511-a6f1-ca0bbb68af0d" (UID: "c81bcd57-9f3c-4511-a6f1-ca0bbb68af0d"). InnerVolumeSpecName "kube-api-access-nfwrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:54 crc kubenswrapper[4971]: E1127 07:18:54.516719 4971 projected.go:194] Error preparing data for projected volume kube-api-access-vzqfv for pod openstack/novacell16991-account-delete-ftkpc: failed to fetch token: serviceaccounts "galera-openstack-cell1" not found Nov 27 07:18:54 crc kubenswrapper[4971]: E1127 07:18:54.516807 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9411d1e4-0384-4caf-b95e-e891b811b402-kube-api-access-vzqfv podName:9411d1e4-0384-4caf-b95e-e891b811b402 nodeName:}" failed. No retries permitted until 2025-11-27 07:19:02.516767349 +0000 UTC m=+1580.708811267 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-vzqfv" (UniqueName: "kubernetes.io/projected/9411d1e4-0384-4caf-b95e-e891b811b402-kube-api-access-vzqfv") pod "novacell16991-account-delete-ftkpc" (UID: "9411d1e4-0384-4caf-b95e-e891b811b402") : failed to fetch token: serviceaccounts "galera-openstack-cell1" not found Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.541179 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6833730-f034-4b5f-954a-19e993167f04-kube-api-access-tvd4d" (OuterVolumeSpecName: "kube-api-access-tvd4d") pod "d6833730-f034-4b5f-954a-19e993167f04" (UID: "d6833730-f034-4b5f-954a-19e993167f04"). InnerVolumeSpecName "kube-api-access-tvd4d". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.577524 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0aa6e1b6-c18b-4a02-a396-880350cde407" path="/var/lib/kubelet/pods/0aa6e1b6-c18b-4a02-a396-880350cde407/volumes" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.578278 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10d9f99e-353e-4988-a85f-0173312c0a24" path="/var/lib/kubelet/pods/10d9f99e-353e-4988-a85f-0173312c0a24/volumes" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.608025 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1206c914-fbe7-4e8f-8470-861b0ebf75de" path="/var/lib/kubelet/pods/1206c914-fbe7-4e8f-8470-861b0ebf75de/volumes" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.609525 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="201f9996-de6c-49c4-916c-78946e7d1144" path="/var/lib/kubelet/pods/201f9996-de6c-49c4-916c-78946e7d1144/volumes" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.611290 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2988a7d8-1d6b-46d8-b204-8e02d0be3b4d" path="/var/lib/kubelet/pods/2988a7d8-1d6b-46d8-b204-8e02d0be3b4d/volumes" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.614092 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c01d5be-466c-4eec-a5b3-261fe0f47d9c" path="/var/lib/kubelet/pods/2c01d5be-466c-4eec-a5b3-261fe0f47d9c/volumes" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.615076 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="418a63d5-afc7-450f-a99a-3c67fedbbabd" path="/var/lib/kubelet/pods/418a63d5-afc7-450f-a99a-3c67fedbbabd/volumes" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.615615 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41a0569f-4523-4dc9-a40f-bc22d113d523" path="/var/lib/kubelet/pods/41a0569f-4523-4dc9-a40f-bc22d113d523/volumes" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.616450 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66f97b8c-cee1-4869-b034-c66f2830b14e" path="/var/lib/kubelet/pods/66f97b8c-cee1-4869-b034-c66f2830b14e/volumes" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.618585 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6754a19e-e024-4b15-8464-49e127bd35ad" path="/var/lib/kubelet/pods/6754a19e-e024-4b15-8464-49e127bd35ad/volumes" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.619428 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e13a581-61d0-4a1f-ad42-5f2783417c70" path="/var/lib/kubelet/pods/6e13a581-61d0-4a1f-ad42-5f2783417c70/volumes" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.621631 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="82a33cc1-32fe-464f-ac33-b802fd32a4c1" path="/var/lib/kubelet/pods/82a33cc1-32fe-464f-ac33-b802fd32a4c1/volumes" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.622781 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3fa1872-f7d9-4531-bc33-619419f530a5" path="/var/lib/kubelet/pods/b3fa1872-f7d9-4531-bc33-619419f530a5/volumes" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.626285 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bcf58afd-21c6-4c9d-8702-09bc98859732" path="/var/lib/kubelet/pods/bcf58afd-21c6-4c9d-8702-09bc98859732/volumes" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.626788 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nfwrd\" (UniqueName: \"kubernetes.io/projected/c81bcd57-9f3c-4511-a6f1-ca0bbb68af0d-kube-api-access-nfwrd\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.627155 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tvd4d\" (UniqueName: \"kubernetes.io/projected/d6833730-f034-4b5f-954a-19e993167f04-kube-api-access-tvd4d\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.627492 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e060bc1f-a47d-45ef-88bd-cfd0645f9ce2" path="/var/lib/kubelet/pods/e060bc1f-a47d-45ef-88bd-cfd0645f9ce2/volumes" Nov 27 07:18:54 crc kubenswrapper[4971]: E1127 07:18:54.627882 4971 projected.go:288] Couldn't get configMap openstack/swift-storage-config-data: configmap "swift-storage-config-data" not found Nov 27 07:18:54 crc kubenswrapper[4971]: E1127 07:18:54.627909 4971 projected.go:263] Couldn't get secret openstack/swift-conf: secret "swift-conf" not found Nov 27 07:18:54 crc kubenswrapper[4971]: E1127 07:18:54.627923 4971 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Nov 27 07:18:54 crc kubenswrapper[4971]: E1127 07:18:54.627935 4971 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: [configmap "swift-storage-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.627950 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d6833730-f034-4b5f-954a-19e993167f04-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:54 crc kubenswrapper[4971]: E1127 07:18:54.628051 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b9ccc9bd-d955-4853-986f-95597f2c70e6-etc-swift podName:b9ccc9bd-d955-4853-986f-95597f2c70e6 nodeName:}" failed. No retries permitted until 2025-11-27 07:19:02.628030916 +0000 UTC m=+1580.820074834 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b9ccc9bd-d955-4853-986f-95597f2c70e6-etc-swift") pod "swift-storage-0" (UID: "b9ccc9bd-d955-4853-986f-95597f2c70e6") : [configmap "swift-storage-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.628890 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea" path="/var/lib/kubelet/pods/fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea/volumes" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.652637 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement0147-account-delete-w7x5l" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.654641 4971 generic.go:334] "Generic (PLEG): container finished" podID="d6439a3c-ee26-467c-8e42-5abbbf390f16" containerID="77882cbee517c91fc3e1f3a6a066be0183a0d5dff26c18bb82c636aa29cd5b4c" exitCode=0 Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.662125 4971 generic.go:334] "Generic (PLEG): container finished" podID="19b68245-2b99-4337-892f-059f05113ad6" containerID="d248fffbf5a9fd8a392c19a71137172faa28f77aa6cf43793ddc632cbb05dd20" exitCode=1 Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.662865 4971 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/novacell091e4-account-delete-8kghx" secret="" err="secret \"galera-openstack-dockercfg-kzxgw\" not found" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.662895 4971 scope.go:117] "RemoveContainer" containerID="d248fffbf5a9fd8a392c19a71137172faa28f77aa6cf43793ddc632cbb05dd20" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.669179 4971 generic.go:334] "Generic (PLEG): container finished" podID="640c3829-d2e9-49e1-82e3-bd213aa992dd" containerID="f5b92bc920247db85a57df75b23c5a3d6d3ba39c4b4024014d369972c4b8162a" exitCode=0 Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.679229 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2b1d941-1738-4967-a97c-6a7b2c36531c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a2b1d941-1738-4967-a97c-6a7b2c36531c" (UID: "a2b1d941-1738-4967-a97c-6a7b2c36531c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.686510 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_abd4a589-1b2e-4559-852f-2c27c0d8c459/ovn-northd/0.log" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.686721 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.694605 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35b90587-df5b-4f15-8c34-f1b0a8506d85-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "35b90587-df5b-4f15-8c34-f1b0a8506d85" (UID: "35b90587-df5b-4f15-8c34-f1b0a8506d85"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.733264 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2b1d941-1738-4967-a97c-6a7b2c36531c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.733292 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35b90587-df5b-4f15-8c34-f1b0a8506d85-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.750842 4971 generic.go:334] "Generic (PLEG): container finished" podID="d6833730-f034-4b5f-954a-19e993167f04" containerID="de2ace92e2287e0935ebddfec859f7a90b7eade8440fecf2f0f85b2026bacd9f" exitCode=0 Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.753425 4971 generic.go:334] "Generic (PLEG): container finished" podID="cbef532c-59fb-40ac-bde3-35b8f3616d85" containerID="934521e5549c4b670a151928313a07441d4a31af6657c1c703e07932bf31d32e" exitCode=1 Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.753519 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron1af4-account-delete-48m5w" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.754183 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.755033 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-85d46db856-nzmcq" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.755073 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.755099 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.755117 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novacell16991-account-delete-ftkpc" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.755146 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.755176 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.756486 4971 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/novaapi6022-account-delete-bhttv" secret="" err="secret \"galera-openstack-dockercfg-kzxgw\" not found" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.756545 4971 scope.go:117] "RemoveContainer" containerID="548307ed473102352f01c5222cbdbd6761e2d348135638d2e5a8fab5b038e452" Nov 27 07:18:54 crc kubenswrapper[4971]: E1127 07:18:54.756786 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-delete\" with CrashLoopBackOff: \"back-off 10s restarting failed container=mariadb-account-delete pod=novaapi6022-account-delete-bhttv_openstack(cbef532c-59fb-40ac-bde3-35b8f3616d85)\"" pod="openstack/novaapi6022-account-delete-bhttv" podUID="cbef532c-59fb-40ac-bde3-35b8f3616d85" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.832784 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35b90587-df5b-4f15-8c34-f1b0a8506d85-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "35b90587-df5b-4f15-8c34-f1b0a8506d85" (UID: "35b90587-df5b-4f15-8c34-f1b0a8506d85"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.836674 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2b1d941-1738-4967-a97c-6a7b2c36531c-kube-state-metrics-tls-config" (OuterVolumeSpecName: "kube-state-metrics-tls-config") pod "a2b1d941-1738-4967-a97c-6a7b2c36531c" (UID: "a2b1d941-1738-4967-a97c-6a7b2c36531c"). InnerVolumeSpecName "kube-state-metrics-tls-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:54 crc kubenswrapper[4971]: E1127 07:18:54.836887 4971 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.836892 4971 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/a2b1d941-1738-4967-a97c-6a7b2c36531c-kube-state-metrics-tls-config\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.836941 4971 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/35b90587-df5b-4f15-8c34-f1b0a8506d85-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:54 crc kubenswrapper[4971]: E1127 07:18:54.836955 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/640c3829-d2e9-49e1-82e3-bd213aa992dd-config-data podName:640c3829-d2e9-49e1-82e3-bd213aa992dd nodeName:}" failed. No retries permitted until 2025-11-27 07:19:02.836934689 +0000 UTC m=+1581.028978607 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/640c3829-d2e9-49e1-82e3-bd213aa992dd-config-data") pod "rabbitmq-cell1-server-0" (UID: "640c3829-d2e9-49e1-82e3-bd213aa992dd") : configmap "rabbitmq-cell1-config-data" not found Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.851389 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69d47892-79da-4e4e-8de2-a84801d4d6b9-config-data" (OuterVolumeSpecName: "config-data") pod "69d47892-79da-4e4e-8de2-a84801d4d6b9" (UID: "69d47892-79da-4e4e-8de2-a84801d4d6b9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.860037 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abd4a589-1b2e-4559-852f-2c27c0d8c459-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "abd4a589-1b2e-4559-852f-2c27c0d8c459" (UID: "abd4a589-1b2e-4559-852f-2c27c0d8c459"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.865813 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6833730-f034-4b5f-954a-19e993167f04-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d6833730-f034-4b5f-954a-19e993167f04" (UID: "d6833730-f034-4b5f-954a-19e993167f04"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.883237 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69d47892-79da-4e4e-8de2-a84801d4d6b9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "69d47892-79da-4e4e-8de2-a84801d4d6b9" (UID: "69d47892-79da-4e4e-8de2-a84801d4d6b9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.891733 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad" (UID: "bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.895112 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69d47892-79da-4e4e-8de2-a84801d4d6b9-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "69d47892-79da-4e4e-8de2-a84801d4d6b9" (UID: "69d47892-79da-4e4e-8de2-a84801d4d6b9"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.904454 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6833730-f034-4b5f-954a-19e993167f04-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "d6833730-f034-4b5f-954a-19e993167f04" (UID: "d6833730-f034-4b5f-954a-19e993167f04"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.920169 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc889790-089f-4007-876f-874880dad975-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "fc889790-089f-4007-876f-874880dad975" (UID: "fc889790-089f-4007-876f-874880dad975"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.923917 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc889790-089f-4007-876f-874880dad975-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "fc889790-089f-4007-876f-874880dad975" (UID: "fc889790-089f-4007-876f-874880dad975"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.929689 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69d47892-79da-4e4e-8de2-a84801d4d6b9-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "69d47892-79da-4e4e-8de2-a84801d4d6b9" (UID: "69d47892-79da-4e4e-8de2-a84801d4d6b9"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.931020 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc889790-089f-4007-876f-874880dad975-config-data" (OuterVolumeSpecName: "config-data") pod "fc889790-089f-4007-876f-874880dad975" (UID: "fc889790-089f-4007-876f-874880dad975"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.934042 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2b1d941-1738-4967-a97c-6a7b2c36531c-kube-state-metrics-tls-certs" (OuterVolumeSpecName: "kube-state-metrics-tls-certs") pod "a2b1d941-1738-4967-a97c-6a7b2c36531c" (UID: "a2b1d941-1738-4967-a97c-6a7b2c36531c"). InnerVolumeSpecName "kube-state-metrics-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.940220 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abd4a589-1b2e-4559-852f-2c27c0d8c459-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.940252 4971 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc889790-089f-4007-876f-874880dad975-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.940261 4971 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc889790-089f-4007-876f-874880dad975-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.940273 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6833730-f034-4b5f-954a-19e993167f04-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.940284 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69d47892-79da-4e4e-8de2-a84801d4d6b9-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.940292 4971 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2b1d941-1738-4967-a97c-6a7b2c36531c-kube-state-metrics-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.940303 4971 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/69d47892-79da-4e4e-8de2-a84801d4d6b9-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.940311 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.940320 4971 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6833730-f034-4b5f-954a-19e993167f04-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.940328 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc889790-089f-4007-876f-874880dad975-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.940336 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69d47892-79da-4e4e-8de2-a84801d4d6b9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.940346 4971 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/69d47892-79da-4e4e-8de2-a84801d4d6b9-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.949626 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad-config-data" (OuterVolumeSpecName: "config-data") pod "bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad" (UID: "bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.976957 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/swift-proxy-66674dfb5f-52hcq" podUID="82a33cc1-32fe-464f-ac33-b802fd32a4c1" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.168:8080/healthcheck\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Nov 27 07:18:54 crc kubenswrapper[4971]: I1127 07:18:54.977237 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/swift-proxy-66674dfb5f-52hcq" podUID="82a33cc1-32fe-464f-ac33-b802fd32a4c1" containerName="proxy-server" probeResult="failure" output="Get \"https://10.217.0.168:8080/healthcheck\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.027137 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abd4a589-1b2e-4559-852f-2c27c0d8c459-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "abd4a589-1b2e-4559-852f-2c27c0d8c459" (UID: "abd4a589-1b2e-4559-852f-2c27c0d8c459"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.041864 4971 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/abd4a589-1b2e-4559-852f-2c27c0d8c459-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.041902 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.048442 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement0147-account-delete-w7x5l" event={"ID":"c81bcd57-9f3c-4511-a6f1-ca0bbb68af0d","Type":"ContainerDied","Data":"db9036a8968208018dfc91ff69b123ee88b9ad6478e7efc9d25548f5de33cf10"} Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.048494 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="db9036a8968208018dfc91ff69b123ee88b9ad6478e7efc9d25548f5de33cf10" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.048507 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d6439a3c-ee26-467c-8e42-5abbbf390f16","Type":"ContainerDied","Data":"77882cbee517c91fc3e1f3a6a066be0183a0d5dff26c18bb82c636aa29cd5b4c"} Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.048542 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-worker-7865dbd7d9-zp55h"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.048628 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d6439a3c-ee26-467c-8e42-5abbbf390f16","Type":"ContainerDied","Data":"b68c9a2e72731ae4a0d02eeb428d0b5c5cc249a3fba7835d0886b885cd7b4645"} Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.048641 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b68c9a2e72731ae4a0d02eeb428d0b5c5cc249a3fba7835d0886b885cd7b4645" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.048651 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-5b7b448d48-2wggc"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.048664 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-keystone-listener-5b7b448d48-2wggc"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.048676 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.048686 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell091e4-account-delete-8kghx" event={"ID":"19b68245-2b99-4337-892f-059f05113ad6","Type":"ContainerDied","Data":"d248fffbf5a9fd8a392c19a71137172faa28f77aa6cf43793ddc632cbb05dd20"} Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.048700 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"640c3829-d2e9-49e1-82e3-bd213aa992dd","Type":"ContainerDied","Data":"f5b92bc920247db85a57df75b23c5a3d6d3ba39c4b4024014d369972c4b8162a"} Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.048713 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.048728 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"abd4a589-1b2e-4559-852f-2c27c0d8c459","Type":"ContainerDied","Data":"57235c1b5dd826d09a3cbfc27f2c1cec772a45124168b6fae949112fe2038c59"} Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.048751 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican2596-account-delete-pttgn" event={"ID":"aba3aa08-61e1-48d3-bf4b-cb45e0b71561","Type":"ContainerDied","Data":"9544c98a91892292ec750565e653396ca5b32ab1848e93c8e927a3485832c427"} Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.048766 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9544c98a91892292ec750565e653396ca5b32ab1848e93c8e927a3485832c427" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.048776 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"d6833730-f034-4b5f-954a-19e993167f04","Type":"ContainerDied","Data":"de2ace92e2287e0935ebddfec859f7a90b7eade8440fecf2f0f85b2026bacd9f"} Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.048789 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"d6833730-f034-4b5f-954a-19e993167f04","Type":"ContainerDied","Data":"dc8739d7e1380833df5ffbb2fd90e957d06c03cf2b39c2491034ec42e003dac6"} Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.048800 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapi6022-account-delete-bhttv" event={"ID":"cbef532c-59fb-40ac-bde3-35b8f3616d85","Type":"ContainerDied","Data":"934521e5549c4b670a151928313a07441d4a31af6657c1c703e07932bf31d32e"} Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.048818 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-65cf5bcbb-zf65t"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.048835 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapi6022-account-delete-bhttv" event={"ID":"cbef532c-59fb-40ac-bde3-35b8f3616d85","Type":"ContainerStarted","Data":"548307ed473102352f01c5222cbdbd6761e2d348135638d2e5a8fab5b038e452"} Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.048848 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-65cf5bcbb-zf65t"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.048877 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-78rv4"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.048892 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-78rv4"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.048904 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-6f3a-account-create-update-4dqdw"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.048916 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder6f3a-account-delete-nzrmk"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.048927 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder6f3a-account-delete-nzrmk"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.048936 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-6f3a-account-create-update-4dqdw"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.048952 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-8769s"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.048962 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-8769s"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.048991 4971 scope.go:117] "RemoveContainer" containerID="fe9a50947e8810fe514a39d8fd3842aa695ffb13f22c248d0a3f4b1749faa50c" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.054768 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abd4a589-1b2e-4559-852f-2c27c0d8c459-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "abd4a589-1b2e-4559-852f-2c27c0d8c459" (UID: "abd4a589-1b2e-4559-852f-2c27c0d8c459"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.074521 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance7577-account-delete-hmqt9"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.091604 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-7577-account-create-update-v4drs"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.097798 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance7577-account-delete-hmqt9"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.100800 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-669669cf59-5rwqh" podUID="b3fa1872-f7d9-4531-bc33-619419f530a5" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.198:5353: i/o timeout" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.116691 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-7577-account-create-update-v4drs"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.123877 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-cxhr4"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.132636 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-cxhr4"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.143956 4971 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/abd4a589-1b2e-4559-852f-2c27c0d8c459-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.148517 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-0147-account-create-update-gtcn7"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.157130 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement0147-account-delete-w7x5l"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.167912 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-0147-account-create-update-gtcn7"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.205646 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement0147-account-delete-w7x5l"] Nov 27 07:18:55 crc kubenswrapper[4971]: E1127 07:18:55.245909 4971 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Nov 27 07:18:55 crc kubenswrapper[4971]: E1127 07:18:55.245984 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/cbef532c-59fb-40ac-bde3-35b8f3616d85-operator-scripts podName:cbef532c-59fb-40ac-bde3-35b8f3616d85 nodeName:}" failed. No retries permitted until 2025-11-27 07:18:57.245962631 +0000 UTC m=+1575.438006539 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/cbef532c-59fb-40ac-bde3-35b8f3616d85-operator-scripts") pod "novaapi6022-account-delete-bhttv" (UID: "cbef532c-59fb-40ac-bde3-35b8f3616d85") : configmap "openstack-scripts" not found Nov 27 07:18:55 crc kubenswrapper[4971]: E1127 07:18:55.246012 4971 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Nov 27 07:18:55 crc kubenswrapper[4971]: E1127 07:18:55.246072 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/19b68245-2b99-4337-892f-059f05113ad6-operator-scripts podName:19b68245-2b99-4337-892f-059f05113ad6 nodeName:}" failed. No retries permitted until 2025-11-27 07:18:57.246054094 +0000 UTC m=+1575.438098012 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/19b68245-2b99-4337-892f-059f05113ad6-operator-scripts") pod "novacell091e4-account-delete-8kghx" (UID: "19b68245-2b99-4337-892f-059f05113ad6") : configmap "openstack-scripts" not found Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.329449 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican2596-account-delete-pttgn" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.360644 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-287xw"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.387043 4971 scope.go:117] "RemoveContainer" containerID="c9d172c81f9f3625bed2063b71b15aa0aaee90448e7484f3536af33829d91bf8" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.417510 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-287xw"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.438887 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-2596-account-create-update-krr9x"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.453656 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aba3aa08-61e1-48d3-bf4b-cb45e0b71561-operator-scripts\") pod \"aba3aa08-61e1-48d3-bf4b-cb45e0b71561\" (UID: \"aba3aa08-61e1-48d3-bf4b-cb45e0b71561\") " Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.453833 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k7h9l\" (UniqueName: \"kubernetes.io/projected/aba3aa08-61e1-48d3-bf4b-cb45e0b71561-kube-api-access-k7h9l\") pod \"aba3aa08-61e1-48d3-bf4b-cb45e0b71561\" (UID: \"aba3aa08-61e1-48d3-bf4b-cb45e0b71561\") " Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.455270 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aba3aa08-61e1-48d3-bf4b-cb45e0b71561-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "aba3aa08-61e1-48d3-bf4b-cb45e0b71561" (UID: "aba3aa08-61e1-48d3-bf4b-cb45e0b71561"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.462017 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aba3aa08-61e1-48d3-bf4b-cb45e0b71561-kube-api-access-k7h9l" (OuterVolumeSpecName: "kube-api-access-k7h9l") pod "aba3aa08-61e1-48d3-bf4b-cb45e0b71561" (UID: "aba3aa08-61e1-48d3-bf4b-cb45e0b71561"). InnerVolumeSpecName "kube-api-access-k7h9l". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.473167 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.497313 4971 scope.go:117] "RemoveContainer" containerID="de2ace92e2287e0935ebddfec859f7a90b7eade8440fecf2f0f85b2026bacd9f" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.497703 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-2596-account-create-update-krr9x"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.530881 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican2596-account-delete-pttgn"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.549907 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican2596-account-delete-pttgn"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.550784 4971 scope.go:117] "RemoveContainer" containerID="0cad2c6a32c384b5b56d689d3a634854a392f78c534cb53e6b7da7260dd94a04" Nov 27 07:18:55 crc kubenswrapper[4971]: E1127 07:18:55.551005 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.554598 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"d6439a3c-ee26-467c-8e42-5abbbf390f16\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.554705 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d6439a3c-ee26-467c-8e42-5abbbf390f16-rabbitmq-erlang-cookie\") pod \"d6439a3c-ee26-467c-8e42-5abbbf390f16\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.554737 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d6439a3c-ee26-467c-8e42-5abbbf390f16-config-data\") pod \"d6439a3c-ee26-467c-8e42-5abbbf390f16\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.554791 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d6439a3c-ee26-467c-8e42-5abbbf390f16-rabbitmq-confd\") pod \"d6439a3c-ee26-467c-8e42-5abbbf390f16\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.554832 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d6439a3c-ee26-467c-8e42-5abbbf390f16-server-conf\") pod \"d6439a3c-ee26-467c-8e42-5abbbf390f16\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.554852 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d6439a3c-ee26-467c-8e42-5abbbf390f16-plugins-conf\") pod \"d6439a3c-ee26-467c-8e42-5abbbf390f16\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.554881 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kmnnq\" (UniqueName: \"kubernetes.io/projected/d6439a3c-ee26-467c-8e42-5abbbf390f16-kube-api-access-kmnnq\") pod \"d6439a3c-ee26-467c-8e42-5abbbf390f16\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.554903 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d6439a3c-ee26-467c-8e42-5abbbf390f16-rabbitmq-tls\") pod \"d6439a3c-ee26-467c-8e42-5abbbf390f16\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.554922 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d6439a3c-ee26-467c-8e42-5abbbf390f16-pod-info\") pod \"d6439a3c-ee26-467c-8e42-5abbbf390f16\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.554964 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d6439a3c-ee26-467c-8e42-5abbbf390f16-rabbitmq-plugins\") pod \"d6439a3c-ee26-467c-8e42-5abbbf390f16\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.554983 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d6439a3c-ee26-467c-8e42-5abbbf390f16-erlang-cookie-secret\") pod \"d6439a3c-ee26-467c-8e42-5abbbf390f16\" (UID: \"d6439a3c-ee26-467c-8e42-5abbbf390f16\") " Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.555384 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d6439a3c-ee26-467c-8e42-5abbbf390f16-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "d6439a3c-ee26-467c-8e42-5abbbf390f16" (UID: "d6439a3c-ee26-467c-8e42-5abbbf390f16"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.555395 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k7h9l\" (UniqueName: \"kubernetes.io/projected/aba3aa08-61e1-48d3-bf4b-cb45e0b71561-kube-api-access-k7h9l\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.555552 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aba3aa08-61e1-48d3-bf4b-cb45e0b71561-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.557187 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d6439a3c-ee26-467c-8e42-5abbbf390f16-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "d6439a3c-ee26-467c-8e42-5abbbf390f16" (UID: "d6439a3c-ee26-467c-8e42-5abbbf390f16"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.563923 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d6439a3c-ee26-467c-8e42-5abbbf390f16-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "d6439a3c-ee26-467c-8e42-5abbbf390f16" (UID: "d6439a3c-ee26-467c-8e42-5abbbf390f16"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.565973 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6439a3c-ee26-467c-8e42-5abbbf390f16-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "d6439a3c-ee26-467c-8e42-5abbbf390f16" (UID: "d6439a3c-ee26-467c-8e42-5abbbf390f16"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.580807 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6439a3c-ee26-467c-8e42-5abbbf390f16-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "d6439a3c-ee26-467c-8e42-5abbbf390f16" (UID: "d6439a3c-ee26-467c-8e42-5abbbf390f16"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.581576 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6439a3c-ee26-467c-8e42-5abbbf390f16-kube-api-access-kmnnq" (OuterVolumeSpecName: "kube-api-access-kmnnq") pod "d6439a3c-ee26-467c-8e42-5abbbf390f16" (UID: "d6439a3c-ee26-467c-8e42-5abbbf390f16"). InnerVolumeSpecName "kube-api-access-kmnnq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.586966 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell16991-account-delete-ftkpc"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.588720 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/d6439a3c-ee26-467c-8e42-5abbbf390f16-pod-info" (OuterVolumeSpecName: "pod-info") pod "d6439a3c-ee26-467c-8e42-5abbbf390f16" (UID: "d6439a3c-ee26-467c-8e42-5abbbf390f16"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.594286 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "persistence") pod "d6439a3c-ee26-467c-8e42-5abbbf390f16" (UID: "d6439a3c-ee26-467c-8e42-5abbbf390f16"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.608970 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/novacell16991-account-delete-ftkpc"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.635180 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d6439a3c-ee26-467c-8e42-5abbbf390f16-config-data" (OuterVolumeSpecName: "config-data") pod "d6439a3c-ee26-467c-8e42-5abbbf390f16" (UID: "d6439a3c-ee26-467c-8e42-5abbbf390f16"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.658986 4971 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d6439a3c-ee26-467c-8e42-5abbbf390f16-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.659023 4971 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d6439a3c-ee26-467c-8e42-5abbbf390f16-pod-info\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.659032 4971 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d6439a3c-ee26-467c-8e42-5abbbf390f16-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.659041 4971 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d6439a3c-ee26-467c-8e42-5abbbf390f16-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.659061 4971 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.659070 4971 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d6439a3c-ee26-467c-8e42-5abbbf390f16-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.659080 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d6439a3c-ee26-467c-8e42-5abbbf390f16-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.659091 4971 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d6439a3c-ee26-467c-8e42-5abbbf390f16-plugins-conf\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.659100 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kmnnq\" (UniqueName: \"kubernetes.io/projected/d6439a3c-ee26-467c-8e42-5abbbf390f16-kube-api-access-kmnnq\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.716958 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d6439a3c-ee26-467c-8e42-5abbbf390f16-server-conf" (OuterVolumeSpecName: "server-conf") pod "d6439a3c-ee26-467c-8e42-5abbbf390f16" (UID: "d6439a3c-ee26-467c-8e42-5abbbf390f16"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.738085 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-t2rfb"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.763090 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9411d1e4-0384-4caf-b95e-e891b811b402-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.764406 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vzqfv\" (UniqueName: \"kubernetes.io/projected/9411d1e4-0384-4caf-b95e-e891b811b402-kube-api-access-vzqfv\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.764427 4971 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d6439a3c-ee26-467c-8e42-5abbbf390f16-server-conf\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.769967 4971 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.798582 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-t2rfb"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.815811 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron1af4-account-delete-48m5w"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.840215 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-1af4-account-create-update-fz7bt"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.859212 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron1af4-account-delete-48m5w"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.870803 4971 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.870861 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-1af4-account-create-update-fz7bt"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.873990 4971 scope.go:117] "RemoveContainer" containerID="de2ace92e2287e0935ebddfec859f7a90b7eade8440fecf2f0f85b2026bacd9f" Nov 27 07:18:55 crc kubenswrapper[4971]: E1127 07:18:55.875910 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de2ace92e2287e0935ebddfec859f7a90b7eade8440fecf2f0f85b2026bacd9f\": container with ID starting with de2ace92e2287e0935ebddfec859f7a90b7eade8440fecf2f0f85b2026bacd9f not found: ID does not exist" containerID="de2ace92e2287e0935ebddfec859f7a90b7eade8440fecf2f0f85b2026bacd9f" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.875967 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de2ace92e2287e0935ebddfec859f7a90b7eade8440fecf2f0f85b2026bacd9f"} err="failed to get container status \"de2ace92e2287e0935ebddfec859f7a90b7eade8440fecf2f0f85b2026bacd9f\": rpc error: code = NotFound desc = could not find container \"de2ace92e2287e0935ebddfec859f7a90b7eade8440fecf2f0f85b2026bacd9f\": container with ID starting with de2ace92e2287e0935ebddfec859f7a90b7eade8440fecf2f0f85b2026bacd9f not found: ID does not exist" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.876004 4971 scope.go:117] "RemoveContainer" containerID="934521e5549c4b670a151928313a07441d4a31af6657c1c703e07932bf31d32e" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.876041 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.876508 4971 generic.go:334] "Generic (PLEG): container finished" podID="cbef532c-59fb-40ac-bde3-35b8f3616d85" containerID="548307ed473102352f01c5222cbdbd6761e2d348135638d2e5a8fab5b038e452" exitCode=1 Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.876590 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapi6022-account-delete-bhttv" event={"ID":"cbef532c-59fb-40ac-bde3-35b8f3616d85","Type":"ContainerDied","Data":"548307ed473102352f01c5222cbdbd6761e2d348135638d2e5a8fab5b038e452"} Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.877265 4971 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/novaapi6022-account-delete-bhttv" secret="" err="secret \"galera-openstack-dockercfg-kzxgw\" not found" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.877309 4971 scope.go:117] "RemoveContainer" containerID="548307ed473102352f01c5222cbdbd6761e2d348135638d2e5a8fab5b038e452" Nov 27 07:18:55 crc kubenswrapper[4971]: E1127 07:18:55.877542 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-delete\" with CrashLoopBackOff: \"back-off 10s restarting failed container=mariadb-account-delete pod=novaapi6022-account-delete-bhttv_openstack(cbef532c-59fb-40ac-bde3-35b8f3616d85)\"" pod="openstack/novaapi6022-account-delete-bhttv" podUID="cbef532c-59fb-40ac-bde3-35b8f3616d85" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.883924 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6439a3c-ee26-467c-8e42-5abbbf390f16-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "d6439a3c-ee26-467c-8e42-5abbbf390f16" (UID: "d6439a3c-ee26-467c-8e42-5abbbf390f16"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.895258 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell091e4-account-delete-8kghx" event={"ID":"19b68245-2b99-4337-892f-059f05113ad6","Type":"ContainerStarted","Data":"f4593d28b30f070f2c552a0321ee906b7eb0e049e6bb65ff5d226316e092db81"} Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.896389 4971 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/novacell091e4-account-delete-8kghx" secret="" err="secret \"galera-openstack-dockercfg-kzxgw\" not found" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.903693 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-b4zt8"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.913550 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-b4zt8"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.914376 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"640c3829-d2e9-49e1-82e3-bd213aa992dd","Type":"ContainerDied","Data":"852d5810ef0f5e31c0974f4306fef32f05d2ec8ed8b6e2ceb9309c3647318ba8"} Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.914456 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.929955 4971 generic.go:334] "Generic (PLEG): container finished" podID="4a0dcbed-5f66-4faf-83c2-1227bc05e9d3" containerID="c882ea5da9f0b4988139836dea25aa544de3122c3c530bac76a1e7264a8122e0" exitCode=0 Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.930018 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3","Type":"ContainerDied","Data":"c882ea5da9f0b4988139836dea25aa544de3122c3c530bac76a1e7264a8122e0"} Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.935931 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican2596-account-delete-pttgn" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.935961 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.938632 4971 scope.go:117] "RemoveContainer" containerID="934521e5549c4b670a151928313a07441d4a31af6657c1c703e07932bf31d32e" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.939594 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-6022-account-create-update-hwnvl"] Nov 27 07:18:55 crc kubenswrapper[4971]: E1127 07:18:55.943752 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"934521e5549c4b670a151928313a07441d4a31af6657c1c703e07932bf31d32e\": container with ID starting with 934521e5549c4b670a151928313a07441d4a31af6657c1c703e07932bf31d32e not found: ID does not exist" containerID="934521e5549c4b670a151928313a07441d4a31af6657c1c703e07932bf31d32e" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.943798 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"934521e5549c4b670a151928313a07441d4a31af6657c1c703e07932bf31d32e"} err="failed to get container status \"934521e5549c4b670a151928313a07441d4a31af6657c1c703e07932bf31d32e\": rpc error: code = NotFound desc = could not find container \"934521e5549c4b670a151928313a07441d4a31af6657c1c703e07932bf31d32e\": container with ID starting with 934521e5549c4b670a151928313a07441d4a31af6657c1c703e07932bf31d32e not found: ID does not exist" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.943826 4971 scope.go:117] "RemoveContainer" containerID="f5b92bc920247db85a57df75b23c5a3d6d3ba39c4b4024014d369972c4b8162a" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.948395 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-6022-account-create-update-hwnvl"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.960386 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novaapi6022-account-delete-bhttv"] Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.971829 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/640c3829-d2e9-49e1-82e3-bd213aa992dd-rabbitmq-confd\") pod \"640c3829-d2e9-49e1-82e3-bd213aa992dd\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.971909 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/640c3829-d2e9-49e1-82e3-bd213aa992dd-config-data\") pod \"640c3829-d2e9-49e1-82e3-bd213aa992dd\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.971956 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"640c3829-d2e9-49e1-82e3-bd213aa992dd\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.972007 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/640c3829-d2e9-49e1-82e3-bd213aa992dd-erlang-cookie-secret\") pod \"640c3829-d2e9-49e1-82e3-bd213aa992dd\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.972050 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/640c3829-d2e9-49e1-82e3-bd213aa992dd-server-conf\") pod \"640c3829-d2e9-49e1-82e3-bd213aa992dd\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.972094 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/640c3829-d2e9-49e1-82e3-bd213aa992dd-rabbitmq-erlang-cookie\") pod \"640c3829-d2e9-49e1-82e3-bd213aa992dd\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.972118 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/640c3829-d2e9-49e1-82e3-bd213aa992dd-rabbitmq-plugins\") pod \"640c3829-d2e9-49e1-82e3-bd213aa992dd\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.972138 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/640c3829-d2e9-49e1-82e3-bd213aa992dd-plugins-conf\") pod \"640c3829-d2e9-49e1-82e3-bd213aa992dd\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.972163 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/640c3829-d2e9-49e1-82e3-bd213aa992dd-rabbitmq-tls\") pod \"640c3829-d2e9-49e1-82e3-bd213aa992dd\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.972236 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/640c3829-d2e9-49e1-82e3-bd213aa992dd-pod-info\") pod \"640c3829-d2e9-49e1-82e3-bd213aa992dd\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.972266 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xzvhk\" (UniqueName: \"kubernetes.io/projected/640c3829-d2e9-49e1-82e3-bd213aa992dd-kube-api-access-xzvhk\") pod \"640c3829-d2e9-49e1-82e3-bd213aa992dd\" (UID: \"640c3829-d2e9-49e1-82e3-bd213aa992dd\") " Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.972854 4971 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d6439a3c-ee26-467c-8e42-5abbbf390f16-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.975485 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/640c3829-d2e9-49e1-82e3-bd213aa992dd-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "640c3829-d2e9-49e1-82e3-bd213aa992dd" (UID: "640c3829-d2e9-49e1-82e3-bd213aa992dd"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.976460 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/640c3829-d2e9-49e1-82e3-bd213aa992dd-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "640c3829-d2e9-49e1-82e3-bd213aa992dd" (UID: "640c3829-d2e9-49e1-82e3-bd213aa992dd"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.979807 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/640c3829-d2e9-49e1-82e3-bd213aa992dd-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "640c3829-d2e9-49e1-82e3-bd213aa992dd" (UID: "640c3829-d2e9-49e1-82e3-bd213aa992dd"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:18:55 crc kubenswrapper[4971]: I1127 07:18:55.989459 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:55.998093 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/640c3829-d2e9-49e1-82e3-bd213aa992dd-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "640c3829-d2e9-49e1-82e3-bd213aa992dd" (UID: "640c3829-d2e9-49e1-82e3-bd213aa992dd"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.002041 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "persistence") pod "640c3829-d2e9-49e1-82e3-bd213aa992dd" (UID: "640c3829-d2e9-49e1-82e3-bd213aa992dd"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.015043 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/640c3829-d2e9-49e1-82e3-bd213aa992dd-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "640c3829-d2e9-49e1-82e3-bd213aa992dd" (UID: "640c3829-d2e9-49e1-82e3-bd213aa992dd"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.030733 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/640c3829-d2e9-49e1-82e3-bd213aa992dd-pod-info" (OuterVolumeSpecName: "pod-info") pod "640c3829-d2e9-49e1-82e3-bd213aa992dd" (UID: "640c3829-d2e9-49e1-82e3-bd213aa992dd"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.041249 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-northd-0"] Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.051891 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/640c3829-d2e9-49e1-82e3-bd213aa992dd-server-conf" (OuterVolumeSpecName: "server-conf") pod "640c3829-d2e9-49e1-82e3-bd213aa992dd" (UID: "640c3829-d2e9-49e1-82e3-bd213aa992dd"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.062773 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/640c3829-d2e9-49e1-82e3-bd213aa992dd-kube-api-access-xzvhk" (OuterVolumeSpecName: "kube-api-access-xzvhk") pod "640c3829-d2e9-49e1-82e3-bd213aa992dd" (UID: "640c3829-d2e9-49e1-82e3-bd213aa992dd"). InnerVolumeSpecName "kube-api-access-xzvhk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.074305 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xzvhk\" (UniqueName: \"kubernetes.io/projected/640c3829-d2e9-49e1-82e3-bd213aa992dd-kube-api-access-xzvhk\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.074350 4971 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.074362 4971 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/640c3829-d2e9-49e1-82e3-bd213aa992dd-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.074371 4971 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/640c3829-d2e9-49e1-82e3-bd213aa992dd-server-conf\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.074383 4971 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/640c3829-d2e9-49e1-82e3-bd213aa992dd-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.074394 4971 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/640c3829-d2e9-49e1-82e3-bd213aa992dd-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.074402 4971 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/640c3829-d2e9-49e1-82e3-bd213aa992dd-plugins-conf\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.074412 4971 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/640c3829-d2e9-49e1-82e3-bd213aa992dd-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.074420 4971 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/640c3829-d2e9-49e1-82e3-bd213aa992dd-pod-info\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.091564 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.102283 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.103277 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/640c3829-d2e9-49e1-82e3-bd213aa992dd-config-data" (OuterVolumeSpecName: "config-data") pod "640c3829-d2e9-49e1-82e3-bd213aa992dd" (UID: "640c3829-d2e9-49e1-82e3-bd213aa992dd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.127623 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-vpqrm"] Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.131520 4971 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.144603 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-vpqrm"] Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.144703 4971 scope.go:117] "RemoveContainer" containerID="ddb554636617ff38027bc6df7fed10cfc5e39e121a5847f244d29b6eed3b5a39" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.157500 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.172349 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.176315 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/640c3829-d2e9-49e1-82e3-bd213aa992dd-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.176345 4971 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.190836 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-91e4-account-create-update-qxzp8"] Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.196852 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-91e4-account-create-update-qxzp8"] Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.199736 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/640c3829-d2e9-49e1-82e3-bd213aa992dd-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "640c3829-d2e9-49e1-82e3-bd213aa992dd" (UID: "640c3829-d2e9-49e1-82e3-bd213aa992dd"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.208169 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell091e4-account-delete-8kghx"] Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.223933 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.237189 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.252053 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.265180 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.278067 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-85d46db856-nzmcq"] Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.279350 4971 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/640c3829-d2e9-49e1-82e3-bd213aa992dd-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.283344 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-85d46db856-nzmcq"] Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.293445 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.302769 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/memcached-0"] Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.312843 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.320068 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.337999 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.356743 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.419341 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.484273 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v2dt8\" (UniqueName: \"kubernetes.io/projected/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-kube-api-access-v2dt8\") pod \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\" (UID: \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\") " Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.484386 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-config-data-generated\") pod \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\" (UID: \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\") " Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.484438 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-galera-tls-certs\") pod \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\" (UID: \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\") " Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.484571 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-kolla-config\") pod \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\" (UID: \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\") " Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.484620 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-config-data-default\") pod \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\" (UID: \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\") " Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.484682 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-operator-scripts\") pod \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\" (UID: \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\") " Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.484732 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\" (UID: \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\") " Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.484763 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-combined-ca-bundle\") pod \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\" (UID: \"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3\") " Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.489062 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "4a0dcbed-5f66-4faf-83c2-1227bc05e9d3" (UID: "4a0dcbed-5f66-4faf-83c2-1227bc05e9d3"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.489646 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "4a0dcbed-5f66-4faf-83c2-1227bc05e9d3" (UID: "4a0dcbed-5f66-4faf-83c2-1227bc05e9d3"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.492722 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "4a0dcbed-5f66-4faf-83c2-1227bc05e9d3" (UID: "4a0dcbed-5f66-4faf-83c2-1227bc05e9d3"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.493629 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4a0dcbed-5f66-4faf-83c2-1227bc05e9d3" (UID: "4a0dcbed-5f66-4faf-83c2-1227bc05e9d3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.520017 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-kube-api-access-v2dt8" (OuterVolumeSpecName: "kube-api-access-v2dt8") pod "4a0dcbed-5f66-4faf-83c2-1227bc05e9d3" (UID: "4a0dcbed-5f66-4faf-83c2-1227bc05e9d3"). InnerVolumeSpecName "kube-api-access-v2dt8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.572769 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "mysql-db") pod "4a0dcbed-5f66-4faf-83c2-1227bc05e9d3" (UID: "4a0dcbed-5f66-4faf-83c2-1227bc05e9d3"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.585021 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "4a0dcbed-5f66-4faf-83c2-1227bc05e9d3" (UID: "4a0dcbed-5f66-4faf-83c2-1227bc05e9d3"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.591586 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4a0dcbed-5f66-4faf-83c2-1227bc05e9d3" (UID: "4a0dcbed-5f66-4faf-83c2-1227bc05e9d3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.598519 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1349dfd7-4e8c-458d-a932-c6a77029f735" path="/var/lib/kubelet/pods/1349dfd7-4e8c-458d-a932-c6a77029f735/volumes" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.599662 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2be44422-3e3d-4564-9d1b-0a3489a93dac" path="/var/lib/kubelet/pods/2be44422-3e3d-4564-9d1b-0a3489a93dac/volumes" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.600178 4971 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-kolla-config\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.600207 4971 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-config-data-default\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.600219 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.600245 4971 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.600257 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.600393 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v2dt8\" (UniqueName: \"kubernetes.io/projected/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-kube-api-access-v2dt8\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.600407 4971 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-config-data-generated\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.600419 4971 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.610926 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35b90587-df5b-4f15-8c34-f1b0a8506d85" path="/var/lib/kubelet/pods/35b90587-df5b-4f15-8c34-f1b0a8506d85/volumes" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.613526 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="38e2d59c-6699-4df0-aa89-e837dd02993a" path="/var/lib/kubelet/pods/38e2d59c-6699-4df0-aa89-e837dd02993a/volumes" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.616813 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49b88fe9-5c45-423c-ab21-fbd5e2bb14c8" path="/var/lib/kubelet/pods/49b88fe9-5c45-423c-ab21-fbd5e2bb14c8/volumes" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.617305 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="51e69600-7930-4ccb-a8ff-0ad3377bf3a7" path="/var/lib/kubelet/pods/51e69600-7930-4ccb-a8ff-0ad3377bf3a7/volumes" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.618320 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5dedf934-ea25-4736-9220-80efdbd3756e" path="/var/lib/kubelet/pods/5dedf934-ea25-4736-9220-80efdbd3756e/volumes" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.622040 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="640c3829-d2e9-49e1-82e3-bd213aa992dd" path="/var/lib/kubelet/pods/640c3829-d2e9-49e1-82e3-bd213aa992dd/volumes" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.622651 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69d47892-79da-4e4e-8de2-a84801d4d6b9" path="/var/lib/kubelet/pods/69d47892-79da-4e4e-8de2-a84801d4d6b9/volumes" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.625442 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e067498-7bc0-4bc5-a9a6-696c8aa3cf71" path="/var/lib/kubelet/pods/6e067498-7bc0-4bc5-a9a6-696c8aa3cf71/volumes" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.628300 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83ad8915-ac3b-4891-ae7a-9b862747569f" path="/var/lib/kubelet/pods/83ad8915-ac3b-4891-ae7a-9b862747569f/volumes" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.630085 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91f979c6-21cc-4848-9eb5-b8bfc4abf082" path="/var/lib/kubelet/pods/91f979c6-21cc-4848-9eb5-b8bfc4abf082/volumes" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.630444 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9411d1e4-0384-4caf-b95e-e891b811b402" path="/var/lib/kubelet/pods/9411d1e4-0384-4caf-b95e-e891b811b402/volumes" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.633644 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96f81740-91f7-431a-be63-94b4fd8fc096" path="/var/lib/kubelet/pods/96f81740-91f7-431a-be63-94b4fd8fc096/volumes" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.634344 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2b1d941-1738-4967-a97c-6a7b2c36531c" path="/var/lib/kubelet/pods/a2b1d941-1738-4967-a97c-6a7b2c36531c/volumes" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.634850 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aba3aa08-61e1-48d3-bf4b-cb45e0b71561" path="/var/lib/kubelet/pods/aba3aa08-61e1-48d3-bf4b-cb45e0b71561/volumes" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.635383 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="abd4a589-1b2e-4559-852f-2c27c0d8c459" path="/var/lib/kubelet/pods/abd4a589-1b2e-4559-852f-2c27c0d8c459/volumes" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.638078 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad8e41a8-cb48-4f48-82d3-2293e640bf1e" path="/var/lib/kubelet/pods/ad8e41a8-cb48-4f48-82d3-2293e640bf1e/volumes" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.638721 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3616559-d640-4b3b-a4b3-b9d9af1d0061" path="/var/lib/kubelet/pods/b3616559-d640-4b3b-a4b3-b9d9af1d0061/volumes" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.640632 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf3a9110-6490-49f0-9bcc-bbd3bd595d58" path="/var/lib/kubelet/pods/bf3a9110-6490-49f0-9bcc-bbd3bd595d58/volumes" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.641222 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad" path="/var/lib/kubelet/pods/bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad/volumes" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.641951 4971 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.642076 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2e2055e-1200-46e8-a49e-c6b490702c9b" path="/var/lib/kubelet/pods/c2e2055e-1200-46e8-a49e-c6b490702c9b/volumes" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.643128 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c81bcd57-9f3c-4511-a6f1-ca0bbb68af0d" path="/var/lib/kubelet/pods/c81bcd57-9f3c-4511-a6f1-ca0bbb68af0d/volumes" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.643678 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c88f9c00-b02f-4070-b81e-733009e44691" path="/var/lib/kubelet/pods/c88f9c00-b02f-4070-b81e-733009e44691/volumes" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.644301 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d30c76d9-61bb-4891-bb73-c2e41fcdf0b5" path="/var/lib/kubelet/pods/d30c76d9-61bb-4891-bb73-c2e41fcdf0b5/volumes" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.645448 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d6439a3c-ee26-467c-8e42-5abbbf390f16" path="/var/lib/kubelet/pods/d6439a3c-ee26-467c-8e42-5abbbf390f16/volumes" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.646324 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d6833730-f034-4b5f-954a-19e993167f04" path="/var/lib/kubelet/pods/d6833730-f034-4b5f-954a-19e993167f04/volumes" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.647060 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb3b1410-ced5-4047-8aee-eb6e28fd5fb5" path="/var/lib/kubelet/pods/eb3b1410-ced5-4047-8aee-eb6e28fd5fb5/volumes" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.648212 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f379bced-5e57-4106-86b4-437cf8bce9b4" path="/var/lib/kubelet/pods/f379bced-5e57-4106-86b4-437cf8bce9b4/volumes" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.648826 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc08a45a-a721-4947-97d4-1592a8547091" path="/var/lib/kubelet/pods/fc08a45a-a721-4947-97d4-1592a8547091/volumes" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.649411 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc889790-089f-4007-876f-874880dad975" path="/var/lib/kubelet/pods/fc889790-089f-4007-876f-874880dad975/volumes" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.650626 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe01235a-8eb5-49d8-afeb-32fc11bde3cc" path="/var/lib/kubelet/pods/fe01235a-8eb5-49d8-afeb-32fc11bde3cc/volumes" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.651286 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ffa0ba8c-c0c5-4283-84eb-427a9def8359" path="/var/lib/kubelet/pods/ffa0ba8c-c0c5-4283-84eb-427a9def8359/volumes" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.685593 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.703652 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-55bd67bdbd-llwzk" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.708180 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-config-data\") pod \"b2ac1dd8-824c-482d-8c0e-47573535f172\" (UID: \"b2ac1dd8-824c-482d-8c0e-47573535f172\") " Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.708251 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-fernet-keys\") pod \"b2ac1dd8-824c-482d-8c0e-47573535f172\" (UID: \"b2ac1dd8-824c-482d-8c0e-47573535f172\") " Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.708337 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-combined-ca-bundle\") pod \"b2ac1dd8-824c-482d-8c0e-47573535f172\" (UID: \"b2ac1dd8-824c-482d-8c0e-47573535f172\") " Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.708366 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-scripts\") pod \"b2ac1dd8-824c-482d-8c0e-47573535f172\" (UID: \"b2ac1dd8-824c-482d-8c0e-47573535f172\") " Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.708400 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v95vq\" (UniqueName: \"kubernetes.io/projected/b2ac1dd8-824c-482d-8c0e-47573535f172-kube-api-access-v95vq\") pod \"b2ac1dd8-824c-482d-8c0e-47573535f172\" (UID: \"b2ac1dd8-824c-482d-8c0e-47573535f172\") " Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.708427 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-public-tls-certs\") pod \"b2ac1dd8-824c-482d-8c0e-47573535f172\" (UID: \"b2ac1dd8-824c-482d-8c0e-47573535f172\") " Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.708476 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f461fed-9df2-44a5-b99c-17f30adf0d9c-combined-ca-bundle\") pod \"7f461fed-9df2-44a5-b99c-17f30adf0d9c\" (UID: \"7f461fed-9df2-44a5-b99c-17f30adf0d9c\") " Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.708498 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b4nfw\" (UniqueName: \"kubernetes.io/projected/7f461fed-9df2-44a5-b99c-17f30adf0d9c-kube-api-access-b4nfw\") pod \"7f461fed-9df2-44a5-b99c-17f30adf0d9c\" (UID: \"7f461fed-9df2-44a5-b99c-17f30adf0d9c\") " Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.708575 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-credential-keys\") pod \"b2ac1dd8-824c-482d-8c0e-47573535f172\" (UID: \"b2ac1dd8-824c-482d-8c0e-47573535f172\") " Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.708629 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f461fed-9df2-44a5-b99c-17f30adf0d9c-config-data\") pod \"7f461fed-9df2-44a5-b99c-17f30adf0d9c\" (UID: \"7f461fed-9df2-44a5-b99c-17f30adf0d9c\") " Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.708730 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-internal-tls-certs\") pod \"b2ac1dd8-824c-482d-8c0e-47573535f172\" (UID: \"b2ac1dd8-824c-482d-8c0e-47573535f172\") " Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.709207 4971 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.715762 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "b2ac1dd8-824c-482d-8c0e-47573535f172" (UID: "b2ac1dd8-824c-482d-8c0e-47573535f172"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.716127 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2ac1dd8-824c-482d-8c0e-47573535f172-kube-api-access-v95vq" (OuterVolumeSpecName: "kube-api-access-v95vq") pod "b2ac1dd8-824c-482d-8c0e-47573535f172" (UID: "b2ac1dd8-824c-482d-8c0e-47573535f172"). InnerVolumeSpecName "kube-api-access-v95vq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.732794 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "b2ac1dd8-824c-482d-8c0e-47573535f172" (UID: "b2ac1dd8-824c-482d-8c0e-47573535f172"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.738904 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-scripts" (OuterVolumeSpecName: "scripts") pod "b2ac1dd8-824c-482d-8c0e-47573535f172" (UID: "b2ac1dd8-824c-482d-8c0e-47573535f172"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.740082 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f461fed-9df2-44a5-b99c-17f30adf0d9c-kube-api-access-b4nfw" (OuterVolumeSpecName: "kube-api-access-b4nfw") pod "7f461fed-9df2-44a5-b99c-17f30adf0d9c" (UID: "7f461fed-9df2-44a5-b99c-17f30adf0d9c"). InnerVolumeSpecName "kube-api-access-b4nfw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.740418 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.809961 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36cda10d-7f8f-403c-82b8-fbbdf89e8ed5-combined-ca-bundle\") pod \"36cda10d-7f8f-403c-82b8-fbbdf89e8ed5\" (UID: \"36cda10d-7f8f-403c-82b8-fbbdf89e8ed5\") " Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.810079 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vdtkd\" (UniqueName: \"kubernetes.io/projected/36cda10d-7f8f-403c-82b8-fbbdf89e8ed5-kube-api-access-vdtkd\") pod \"36cda10d-7f8f-403c-82b8-fbbdf89e8ed5\" (UID: \"36cda10d-7f8f-403c-82b8-fbbdf89e8ed5\") " Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.810106 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36cda10d-7f8f-403c-82b8-fbbdf89e8ed5-config-data\") pod \"36cda10d-7f8f-403c-82b8-fbbdf89e8ed5\" (UID: \"36cda10d-7f8f-403c-82b8-fbbdf89e8ed5\") " Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.810642 4971 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.810666 4971 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.810679 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.810692 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v95vq\" (UniqueName: \"kubernetes.io/projected/b2ac1dd8-824c-482d-8c0e-47573535f172-kube-api-access-v95vq\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.810703 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b4nfw\" (UniqueName: \"kubernetes.io/projected/7f461fed-9df2-44a5-b99c-17f30adf0d9c-kube-api-access-b4nfw\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.824808 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36cda10d-7f8f-403c-82b8-fbbdf89e8ed5-kube-api-access-vdtkd" (OuterVolumeSpecName: "kube-api-access-vdtkd") pod "36cda10d-7f8f-403c-82b8-fbbdf89e8ed5" (UID: "36cda10d-7f8f-403c-82b8-fbbdf89e8ed5"). InnerVolumeSpecName "kube-api-access-vdtkd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:56 crc kubenswrapper[4971]: E1127 07:18:56.826519 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7f461fed-9df2-44a5-b99c-17f30adf0d9c-config-data podName:7f461fed-9df2-44a5-b99c-17f30adf0d9c nodeName:}" failed. No retries permitted until 2025-11-27 07:18:57.326486727 +0000 UTC m=+1575.518530645 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "config-data" (UniqueName: "kubernetes.io/secret/7f461fed-9df2-44a5-b99c-17f30adf0d9c-config-data") pod "7f461fed-9df2-44a5-b99c-17f30adf0d9c" (UID: "7f461fed-9df2-44a5-b99c-17f30adf0d9c") : error deleting /var/lib/kubelet/pods/7f461fed-9df2-44a5-b99c-17f30adf0d9c/volume-subpaths: remove /var/lib/kubelet/pods/7f461fed-9df2-44a5-b99c-17f30adf0d9c/volume-subpaths: no such file or directory Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.828320 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-config-data" (OuterVolumeSpecName: "config-data") pod "b2ac1dd8-824c-482d-8c0e-47573535f172" (UID: "b2ac1dd8-824c-482d-8c0e-47573535f172"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.830190 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b2ac1dd8-824c-482d-8c0e-47573535f172" (UID: "b2ac1dd8-824c-482d-8c0e-47573535f172"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.834799 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f461fed-9df2-44a5-b99c-17f30adf0d9c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7f461fed-9df2-44a5-b99c-17f30adf0d9c" (UID: "7f461fed-9df2-44a5-b99c-17f30adf0d9c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.835825 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36cda10d-7f8f-403c-82b8-fbbdf89e8ed5-config-data" (OuterVolumeSpecName: "config-data") pod "36cda10d-7f8f-403c-82b8-fbbdf89e8ed5" (UID: "36cda10d-7f8f-403c-82b8-fbbdf89e8ed5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.836667 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36cda10d-7f8f-403c-82b8-fbbdf89e8ed5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "36cda10d-7f8f-403c-82b8-fbbdf89e8ed5" (UID: "36cda10d-7f8f-403c-82b8-fbbdf89e8ed5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.849747 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.856688 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "b2ac1dd8-824c-482d-8c0e-47573535f172" (UID: "b2ac1dd8-824c-482d-8c0e-47573535f172"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.859391 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "b2ac1dd8-824c-482d-8c0e-47573535f172" (UID: "b2ac1dd8-824c-482d-8c0e-47573535f172"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.912296 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36cda10d-7f8f-403c-82b8-fbbdf89e8ed5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.912334 4971 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.912349 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.912361 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vdtkd\" (UniqueName: \"kubernetes.io/projected/36cda10d-7f8f-403c-82b8-fbbdf89e8ed5-kube-api-access-vdtkd\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.912375 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36cda10d-7f8f-403c-82b8-fbbdf89e8ed5-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.912385 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.912395 4971 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2ac1dd8-824c-482d-8c0e-47573535f172-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.912407 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f461fed-9df2-44a5-b99c-17f30adf0d9c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.951736 4971 generic.go:334] "Generic (PLEG): container finished" podID="19b68245-2b99-4337-892f-059f05113ad6" containerID="f4593d28b30f070f2c552a0321ee906b7eb0e049e6bb65ff5d226316e092db81" exitCode=1 Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.951811 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell091e4-account-delete-8kghx" event={"ID":"19b68245-2b99-4337-892f-059f05113ad6","Type":"ContainerDied","Data":"f4593d28b30f070f2c552a0321ee906b7eb0e049e6bb65ff5d226316e092db81"} Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.952271 4971 scope.go:117] "RemoveContainer" containerID="d248fffbf5a9fd8a392c19a71137172faa28f77aa6cf43793ddc632cbb05dd20" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.952143 4971 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/novacell091e4-account-delete-8kghx" secret="" err="secret \"galera-openstack-dockercfg-kzxgw\" not found" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.952585 4971 scope.go:117] "RemoveContainer" containerID="f4593d28b30f070f2c552a0321ee906b7eb0e049e6bb65ff5d226316e092db81" Nov 27 07:18:56 crc kubenswrapper[4971]: E1127 07:18:56.953063 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-delete\" with CrashLoopBackOff: \"back-off 10s restarting failed container=mariadb-account-delete pod=novacell091e4-account-delete-8kghx_openstack(19b68245-2b99-4337-892f-059f05113ad6)\"" pod="openstack/novacell091e4-account-delete-8kghx" podUID="19b68245-2b99-4337-892f-059f05113ad6" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.959318 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"4a0dcbed-5f66-4faf-83c2-1227bc05e9d3","Type":"ContainerDied","Data":"d4ba45740bd639b78d618eee5a883a210b84172a9805a27979d54b1226fa1e2f"} Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.959332 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.969587 4971 generic.go:334] "Generic (PLEG): container finished" podID="7f461fed-9df2-44a5-b99c-17f30adf0d9c" containerID="bacf1459d637ebaf1473ffbba9c90d2886c77004f0c01d13c0668368ade96df2" exitCode=0 Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.969686 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.969705 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"7f461fed-9df2-44a5-b99c-17f30adf0d9c","Type":"ContainerDied","Data":"bacf1459d637ebaf1473ffbba9c90d2886c77004f0c01d13c0668368ade96df2"} Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.969835 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"7f461fed-9df2-44a5-b99c-17f30adf0d9c","Type":"ContainerDied","Data":"088e9ad3206eafb668d6b41c19fb219140daba1f69cf0186fd2f271c5c6aa17f"} Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.989878 4971 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/novaapi6022-account-delete-bhttv" secret="" err="secret \"galera-openstack-dockercfg-kzxgw\" not found" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.989938 4971 scope.go:117] "RemoveContainer" containerID="548307ed473102352f01c5222cbdbd6761e2d348135638d2e5a8fab5b038e452" Nov 27 07:18:56 crc kubenswrapper[4971]: E1127 07:18:56.991091 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-delete\" with CrashLoopBackOff: \"back-off 10s restarting failed container=mariadb-account-delete pod=novaapi6022-account-delete-bhttv_openstack(cbef532c-59fb-40ac-bde3-35b8f3616d85)\"" pod="openstack/novaapi6022-account-delete-bhttv" podUID="cbef532c-59fb-40ac-bde3-35b8f3616d85" Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.994883 4971 generic.go:334] "Generic (PLEG): container finished" podID="36cda10d-7f8f-403c-82b8-fbbdf89e8ed5" containerID="b97556ba2fd93384c3dcc5c7577bad7f95a4205ce2fc105bf6f734f22b25ff2a" exitCode=0 Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.995107 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"36cda10d-7f8f-403c-82b8-fbbdf89e8ed5","Type":"ContainerDied","Data":"b97556ba2fd93384c3dcc5c7577bad7f95a4205ce2fc105bf6f734f22b25ff2a"} Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.995203 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"36cda10d-7f8f-403c-82b8-fbbdf89e8ed5","Type":"ContainerDied","Data":"0ee8e4cedf669da3fc4b843f49bd5320748a375994ea141b5f0843e3240ccf4b"} Nov 27 07:18:56 crc kubenswrapper[4971]: I1127 07:18:56.995355 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.002595 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.006026 4971 generic.go:334] "Generic (PLEG): container finished" podID="b2ac1dd8-824c-482d-8c0e-47573535f172" containerID="e21d5793819283c5f8059801c98537ef24923751613488a4fef7ec0596a380e9" exitCode=0 Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.006113 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-55bd67bdbd-llwzk" event={"ID":"b2ac1dd8-824c-482d-8c0e-47573535f172","Type":"ContainerDied","Data":"e21d5793819283c5f8059801c98537ef24923751613488a4fef7ec0596a380e9"} Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.006141 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-55bd67bdbd-llwzk" event={"ID":"b2ac1dd8-824c-482d-8c0e-47573535f172","Type":"ContainerDied","Data":"769d7df66c9874c427870af3be46b762847efa06d0b942d45f7c0691a477006f"} Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.006252 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-55bd67bdbd-llwzk" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.012887 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41a3ef31-df5e-4cb9-8983-40a16f46823c-run-httpd\") pod \"41a3ef31-df5e-4cb9-8983-40a16f46823c\" (UID: \"41a3ef31-df5e-4cb9-8983-40a16f46823c\") " Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.012935 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41a3ef31-df5e-4cb9-8983-40a16f46823c-scripts\") pod \"41a3ef31-df5e-4cb9-8983-40a16f46823c\" (UID: \"41a3ef31-df5e-4cb9-8983-40a16f46823c\") " Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.012971 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xbft4\" (UniqueName: \"kubernetes.io/projected/41a3ef31-df5e-4cb9-8983-40a16f46823c-kube-api-access-xbft4\") pod \"41a3ef31-df5e-4cb9-8983-40a16f46823c\" (UID: \"41a3ef31-df5e-4cb9-8983-40a16f46823c\") " Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.013073 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/41a3ef31-df5e-4cb9-8983-40a16f46823c-ceilometer-tls-certs\") pod \"41a3ef31-df5e-4cb9-8983-40a16f46823c\" (UID: \"41a3ef31-df5e-4cb9-8983-40a16f46823c\") " Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.013136 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41a3ef31-df5e-4cb9-8983-40a16f46823c-config-data\") pod \"41a3ef31-df5e-4cb9-8983-40a16f46823c\" (UID: \"41a3ef31-df5e-4cb9-8983-40a16f46823c\") " Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.013189 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/41a3ef31-df5e-4cb9-8983-40a16f46823c-sg-core-conf-yaml\") pod \"41a3ef31-df5e-4cb9-8983-40a16f46823c\" (UID: \"41a3ef31-df5e-4cb9-8983-40a16f46823c\") " Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.013210 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41a3ef31-df5e-4cb9-8983-40a16f46823c-log-httpd\") pod \"41a3ef31-df5e-4cb9-8983-40a16f46823c\" (UID: \"41a3ef31-df5e-4cb9-8983-40a16f46823c\") " Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.013262 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41a3ef31-df5e-4cb9-8983-40a16f46823c-combined-ca-bundle\") pod \"41a3ef31-df5e-4cb9-8983-40a16f46823c\" (UID: \"41a3ef31-df5e-4cb9-8983-40a16f46823c\") " Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.019413 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41a3ef31-df5e-4cb9-8983-40a16f46823c-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "41a3ef31-df5e-4cb9-8983-40a16f46823c" (UID: "41a3ef31-df5e-4cb9-8983-40a16f46823c"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.021516 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41a3ef31-df5e-4cb9-8983-40a16f46823c-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "41a3ef31-df5e-4cb9-8983-40a16f46823c" (UID: "41a3ef31-df5e-4cb9-8983-40a16f46823c"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.037832 4971 generic.go:334] "Generic (PLEG): container finished" podID="41a3ef31-df5e-4cb9-8983-40a16f46823c" containerID="268b7b5e3e099e59f58680414492d2b607e8bfc3bf0455ac64792b601ae0a545" exitCode=0 Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.037888 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41a3ef31-df5e-4cb9-8983-40a16f46823c","Type":"ContainerDied","Data":"268b7b5e3e099e59f58680414492d2b607e8bfc3bf0455ac64792b601ae0a545"} Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.037915 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41a3ef31-df5e-4cb9-8983-40a16f46823c","Type":"ContainerDied","Data":"dcad59636dc1a6d93f37718d2ad05b92be2d5dd4fe4fa7c3cd8a3c37b02610ec"} Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.037983 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.044675 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-galera-0"] Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.044668 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41a3ef31-df5e-4cb9-8983-40a16f46823c-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "41a3ef31-df5e-4cb9-8983-40a16f46823c" (UID: "41a3ef31-df5e-4cb9-8983-40a16f46823c"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.044698 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41a3ef31-df5e-4cb9-8983-40a16f46823c-kube-api-access-xbft4" (OuterVolumeSpecName: "kube-api-access-xbft4") pod "41a3ef31-df5e-4cb9-8983-40a16f46823c" (UID: "41a3ef31-df5e-4cb9-8983-40a16f46823c"). InnerVolumeSpecName "kube-api-access-xbft4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.044681 4971 scope.go:117] "RemoveContainer" containerID="c882ea5da9f0b4988139836dea25aa544de3122c3c530bac76a1e7264a8122e0" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.044987 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41a3ef31-df5e-4cb9-8983-40a16f46823c-scripts" (OuterVolumeSpecName: "scripts") pod "41a3ef31-df5e-4cb9-8983-40a16f46823c" (UID: "41a3ef31-df5e-4cb9-8983-40a16f46823c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.054807 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.068946 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.073985 4971 scope.go:117] "RemoveContainer" containerID="7af3d67f7f3c155ae9f2f5faa014fea2a48dc67f24c202cc5665e7b51df1c641" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.081874 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-55bd67bdbd-llwzk"] Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.082691 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-55bd67bdbd-llwzk"] Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.092059 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41a3ef31-df5e-4cb9-8983-40a16f46823c-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "41a3ef31-df5e-4cb9-8983-40a16f46823c" (UID: "41a3ef31-df5e-4cb9-8983-40a16f46823c"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.101752 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41a3ef31-df5e-4cb9-8983-40a16f46823c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "41a3ef31-df5e-4cb9-8983-40a16f46823c" (UID: "41a3ef31-df5e-4cb9-8983-40a16f46823c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.103491 4971 scope.go:117] "RemoveContainer" containerID="bacf1459d637ebaf1473ffbba9c90d2886c77004f0c01d13c0668368ade96df2" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.114811 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41a3ef31-df5e-4cb9-8983-40a16f46823c-config-data" (OuterVolumeSpecName: "config-data") pod "41a3ef31-df5e-4cb9-8983-40a16f46823c" (UID: "41a3ef31-df5e-4cb9-8983-40a16f46823c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.115296 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41a3ef31-df5e-4cb9-8983-40a16f46823c-config-data\") pod \"41a3ef31-df5e-4cb9-8983-40a16f46823c\" (UID: \"41a3ef31-df5e-4cb9-8983-40a16f46823c\") " Nov 27 07:18:57 crc kubenswrapper[4971]: W1127 07:18:57.115425 4971 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/41a3ef31-df5e-4cb9-8983-40a16f46823c/volumes/kubernetes.io~secret/config-data Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.115482 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41a3ef31-df5e-4cb9-8983-40a16f46823c-config-data" (OuterVolumeSpecName: "config-data") pod "41a3ef31-df5e-4cb9-8983-40a16f46823c" (UID: "41a3ef31-df5e-4cb9-8983-40a16f46823c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.115743 4971 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41a3ef31-df5e-4cb9-8983-40a16f46823c-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.115764 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41a3ef31-df5e-4cb9-8983-40a16f46823c-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.115775 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xbft4\" (UniqueName: \"kubernetes.io/projected/41a3ef31-df5e-4cb9-8983-40a16f46823c-kube-api-access-xbft4\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.115788 4971 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/41a3ef31-df5e-4cb9-8983-40a16f46823c-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.115797 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41a3ef31-df5e-4cb9-8983-40a16f46823c-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.115805 4971 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/41a3ef31-df5e-4cb9-8983-40a16f46823c-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.115813 4971 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41a3ef31-df5e-4cb9-8983-40a16f46823c-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.115823 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41a3ef31-df5e-4cb9-8983-40a16f46823c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.123229 4971 scope.go:117] "RemoveContainer" containerID="bacf1459d637ebaf1473ffbba9c90d2886c77004f0c01d13c0668368ade96df2" Nov 27 07:18:57 crc kubenswrapper[4971]: E1127 07:18:57.123891 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bacf1459d637ebaf1473ffbba9c90d2886c77004f0c01d13c0668368ade96df2\": container with ID starting with bacf1459d637ebaf1473ffbba9c90d2886c77004f0c01d13c0668368ade96df2 not found: ID does not exist" containerID="bacf1459d637ebaf1473ffbba9c90d2886c77004f0c01d13c0668368ade96df2" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.123935 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bacf1459d637ebaf1473ffbba9c90d2886c77004f0c01d13c0668368ade96df2"} err="failed to get container status \"bacf1459d637ebaf1473ffbba9c90d2886c77004f0c01d13c0668368ade96df2\": rpc error: code = NotFound desc = could not find container \"bacf1459d637ebaf1473ffbba9c90d2886c77004f0c01d13c0668368ade96df2\": container with ID starting with bacf1459d637ebaf1473ffbba9c90d2886c77004f0c01d13c0668368ade96df2 not found: ID does not exist" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.124144 4971 scope.go:117] "RemoveContainer" containerID="b97556ba2fd93384c3dcc5c7577bad7f95a4205ce2fc105bf6f734f22b25ff2a" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.144572 4971 scope.go:117] "RemoveContainer" containerID="b97556ba2fd93384c3dcc5c7577bad7f95a4205ce2fc105bf6f734f22b25ff2a" Nov 27 07:18:57 crc kubenswrapper[4971]: E1127 07:18:57.145033 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b97556ba2fd93384c3dcc5c7577bad7f95a4205ce2fc105bf6f734f22b25ff2a\": container with ID starting with b97556ba2fd93384c3dcc5c7577bad7f95a4205ce2fc105bf6f734f22b25ff2a not found: ID does not exist" containerID="b97556ba2fd93384c3dcc5c7577bad7f95a4205ce2fc105bf6f734f22b25ff2a" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.145063 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b97556ba2fd93384c3dcc5c7577bad7f95a4205ce2fc105bf6f734f22b25ff2a"} err="failed to get container status \"b97556ba2fd93384c3dcc5c7577bad7f95a4205ce2fc105bf6f734f22b25ff2a\": rpc error: code = NotFound desc = could not find container \"b97556ba2fd93384c3dcc5c7577bad7f95a4205ce2fc105bf6f734f22b25ff2a\": container with ID starting with b97556ba2fd93384c3dcc5c7577bad7f95a4205ce2fc105bf6f734f22b25ff2a not found: ID does not exist" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.145085 4971 scope.go:117] "RemoveContainer" containerID="e21d5793819283c5f8059801c98537ef24923751613488a4fef7ec0596a380e9" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.171991 4971 scope.go:117] "RemoveContainer" containerID="e21d5793819283c5f8059801c98537ef24923751613488a4fef7ec0596a380e9" Nov 27 07:18:57 crc kubenswrapper[4971]: E1127 07:18:57.172423 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e21d5793819283c5f8059801c98537ef24923751613488a4fef7ec0596a380e9\": container with ID starting with e21d5793819283c5f8059801c98537ef24923751613488a4fef7ec0596a380e9 not found: ID does not exist" containerID="e21d5793819283c5f8059801c98537ef24923751613488a4fef7ec0596a380e9" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.172468 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e21d5793819283c5f8059801c98537ef24923751613488a4fef7ec0596a380e9"} err="failed to get container status \"e21d5793819283c5f8059801c98537ef24923751613488a4fef7ec0596a380e9\": rpc error: code = NotFound desc = could not find container \"e21d5793819283c5f8059801c98537ef24923751613488a4fef7ec0596a380e9\": container with ID starting with e21d5793819283c5f8059801c98537ef24923751613488a4fef7ec0596a380e9 not found: ID does not exist" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.172498 4971 scope.go:117] "RemoveContainer" containerID="246b3a277e05318dff5ef1ad5944cae22aa133678c3e078b2b0e80c043b33db8" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.192817 4971 scope.go:117] "RemoveContainer" containerID="0b2dec937686384d25cd26d898333c4fc81933ae416ed8605ae98f8c6d7ffe0d" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.215734 4971 scope.go:117] "RemoveContainer" containerID="268b7b5e3e099e59f58680414492d2b607e8bfc3bf0455ac64792b601ae0a545" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.252741 4971 scope.go:117] "RemoveContainer" containerID="e42e5fec5c109e2ea0c69c634a81c7905e5cc3f5aa9cfe275d500939cb600b2b" Nov 27 07:18:57 crc kubenswrapper[4971]: E1127 07:18:57.255677 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2 is running failed: container process not found" containerID="d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 27 07:18:57 crc kubenswrapper[4971]: E1127 07:18:57.255922 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2 is running failed: container process not found" containerID="d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 27 07:18:57 crc kubenswrapper[4971]: E1127 07:18:57.256164 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2 is running failed: container process not found" containerID="d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 27 07:18:57 crc kubenswrapper[4971]: E1127 07:18:57.256191 4971 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-45rt8" podUID="6facf3b5-48aa-4a38-823e-6b7adbbcdfee" containerName="ovsdb-server" Nov 27 07:18:57 crc kubenswrapper[4971]: E1127 07:18:57.256858 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8f63e70bc533062faba370a611b76ae194f52aeabfa2c53bea1503c7257a0a34" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 27 07:18:57 crc kubenswrapper[4971]: E1127 07:18:57.259121 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8f63e70bc533062faba370a611b76ae194f52aeabfa2c53bea1503c7257a0a34" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 27 07:18:57 crc kubenswrapper[4971]: E1127 07:18:57.261567 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8f63e70bc533062faba370a611b76ae194f52aeabfa2c53bea1503c7257a0a34" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 27 07:18:57 crc kubenswrapper[4971]: E1127 07:18:57.261635 4971 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-45rt8" podUID="6facf3b5-48aa-4a38-823e-6b7adbbcdfee" containerName="ovs-vswitchd" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.280394 4971 scope.go:117] "RemoveContainer" containerID="246b3a277e05318dff5ef1ad5944cae22aa133678c3e078b2b0e80c043b33db8" Nov 27 07:18:57 crc kubenswrapper[4971]: E1127 07:18:57.280920 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"246b3a277e05318dff5ef1ad5944cae22aa133678c3e078b2b0e80c043b33db8\": container with ID starting with 246b3a277e05318dff5ef1ad5944cae22aa133678c3e078b2b0e80c043b33db8 not found: ID does not exist" containerID="246b3a277e05318dff5ef1ad5944cae22aa133678c3e078b2b0e80c043b33db8" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.280963 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"246b3a277e05318dff5ef1ad5944cae22aa133678c3e078b2b0e80c043b33db8"} err="failed to get container status \"246b3a277e05318dff5ef1ad5944cae22aa133678c3e078b2b0e80c043b33db8\": rpc error: code = NotFound desc = could not find container \"246b3a277e05318dff5ef1ad5944cae22aa133678c3e078b2b0e80c043b33db8\": container with ID starting with 246b3a277e05318dff5ef1ad5944cae22aa133678c3e078b2b0e80c043b33db8 not found: ID does not exist" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.280991 4971 scope.go:117] "RemoveContainer" containerID="0b2dec937686384d25cd26d898333c4fc81933ae416ed8605ae98f8c6d7ffe0d" Nov 27 07:18:57 crc kubenswrapper[4971]: E1127 07:18:57.282032 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b2dec937686384d25cd26d898333c4fc81933ae416ed8605ae98f8c6d7ffe0d\": container with ID starting with 0b2dec937686384d25cd26d898333c4fc81933ae416ed8605ae98f8c6d7ffe0d not found: ID does not exist" containerID="0b2dec937686384d25cd26d898333c4fc81933ae416ed8605ae98f8c6d7ffe0d" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.284728 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b2dec937686384d25cd26d898333c4fc81933ae416ed8605ae98f8c6d7ffe0d"} err="failed to get container status \"0b2dec937686384d25cd26d898333c4fc81933ae416ed8605ae98f8c6d7ffe0d\": rpc error: code = NotFound desc = could not find container \"0b2dec937686384d25cd26d898333c4fc81933ae416ed8605ae98f8c6d7ffe0d\": container with ID starting with 0b2dec937686384d25cd26d898333c4fc81933ae416ed8605ae98f8c6d7ffe0d not found: ID does not exist" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.284768 4971 scope.go:117] "RemoveContainer" containerID="268b7b5e3e099e59f58680414492d2b607e8bfc3bf0455ac64792b601ae0a545" Nov 27 07:18:57 crc kubenswrapper[4971]: E1127 07:18:57.285162 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"268b7b5e3e099e59f58680414492d2b607e8bfc3bf0455ac64792b601ae0a545\": container with ID starting with 268b7b5e3e099e59f58680414492d2b607e8bfc3bf0455ac64792b601ae0a545 not found: ID does not exist" containerID="268b7b5e3e099e59f58680414492d2b607e8bfc3bf0455ac64792b601ae0a545" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.285193 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"268b7b5e3e099e59f58680414492d2b607e8bfc3bf0455ac64792b601ae0a545"} err="failed to get container status \"268b7b5e3e099e59f58680414492d2b607e8bfc3bf0455ac64792b601ae0a545\": rpc error: code = NotFound desc = could not find container \"268b7b5e3e099e59f58680414492d2b607e8bfc3bf0455ac64792b601ae0a545\": container with ID starting with 268b7b5e3e099e59f58680414492d2b607e8bfc3bf0455ac64792b601ae0a545 not found: ID does not exist" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.285231 4971 scope.go:117] "RemoveContainer" containerID="e42e5fec5c109e2ea0c69c634a81c7905e5cc3f5aa9cfe275d500939cb600b2b" Nov 27 07:18:57 crc kubenswrapper[4971]: E1127 07:18:57.285568 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e42e5fec5c109e2ea0c69c634a81c7905e5cc3f5aa9cfe275d500939cb600b2b\": container with ID starting with e42e5fec5c109e2ea0c69c634a81c7905e5cc3f5aa9cfe275d500939cb600b2b not found: ID does not exist" containerID="e42e5fec5c109e2ea0c69c634a81c7905e5cc3f5aa9cfe275d500939cb600b2b" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.285593 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e42e5fec5c109e2ea0c69c634a81c7905e5cc3f5aa9cfe275d500939cb600b2b"} err="failed to get container status \"e42e5fec5c109e2ea0c69c634a81c7905e5cc3f5aa9cfe275d500939cb600b2b\": rpc error: code = NotFound desc = could not find container \"e42e5fec5c109e2ea0c69c634a81c7905e5cc3f5aa9cfe275d500939cb600b2b\": container with ID starting with e42e5fec5c109e2ea0c69c634a81c7905e5cc3f5aa9cfe275d500939cb600b2b not found: ID does not exist" Nov 27 07:18:57 crc kubenswrapper[4971]: E1127 07:18:57.317978 4971 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Nov 27 07:18:57 crc kubenswrapper[4971]: E1127 07:18:57.318094 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/19b68245-2b99-4337-892f-059f05113ad6-operator-scripts podName:19b68245-2b99-4337-892f-059f05113ad6 nodeName:}" failed. No retries permitted until 2025-11-27 07:19:01.318042748 +0000 UTC m=+1579.510086666 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/19b68245-2b99-4337-892f-059f05113ad6-operator-scripts") pod "novacell091e4-account-delete-8kghx" (UID: "19b68245-2b99-4337-892f-059f05113ad6") : configmap "openstack-scripts" not found Nov 27 07:18:57 crc kubenswrapper[4971]: E1127 07:18:57.319699 4971 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Nov 27 07:18:57 crc kubenswrapper[4971]: E1127 07:18:57.319760 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/cbef532c-59fb-40ac-bde3-35b8f3616d85-operator-scripts podName:cbef532c-59fb-40ac-bde3-35b8f3616d85 nodeName:}" failed. No retries permitted until 2025-11-27 07:19:01.319744077 +0000 UTC m=+1579.511787995 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/cbef532c-59fb-40ac-bde3-35b8f3616d85-operator-scripts") pod "novaapi6022-account-delete-bhttv" (UID: "cbef532c-59fb-40ac-bde3-35b8f3616d85") : configmap "openstack-scripts" not found Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.381226 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.389230 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.425348 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f461fed-9df2-44a5-b99c-17f30adf0d9c-config-data\") pod \"7f461fed-9df2-44a5-b99c-17f30adf0d9c\" (UID: \"7f461fed-9df2-44a5-b99c-17f30adf0d9c\") " Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.431827 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f461fed-9df2-44a5-b99c-17f30adf0d9c-config-data" (OuterVolumeSpecName: "config-data") pod "7f461fed-9df2-44a5-b99c-17f30adf0d9c" (UID: "7f461fed-9df2-44a5-b99c-17f30adf0d9c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.527938 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f461fed-9df2-44a5-b99c-17f30adf0d9c-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.605457 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.611957 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 27 07:18:57 crc kubenswrapper[4971]: I1127 07:18:57.809854 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="2988a7d8-1d6b-46d8-b204-8e02d0be3b4d" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.163:8776/healthcheck\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Nov 27 07:18:58 crc kubenswrapper[4971]: I1127 07:18:58.459502 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novacell091e4-account-delete-8kghx" Nov 27 07:18:58 crc kubenswrapper[4971]: I1127 07:18:58.469238 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novaapi6022-account-delete-bhttv" Nov 27 07:18:58 crc kubenswrapper[4971]: I1127 07:18:58.559813 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36cda10d-7f8f-403c-82b8-fbbdf89e8ed5" path="/var/lib/kubelet/pods/36cda10d-7f8f-403c-82b8-fbbdf89e8ed5/volumes" Nov 27 07:18:58 crc kubenswrapper[4971]: I1127 07:18:58.560444 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41a3ef31-df5e-4cb9-8983-40a16f46823c" path="/var/lib/kubelet/pods/41a3ef31-df5e-4cb9-8983-40a16f46823c/volumes" Nov 27 07:18:58 crc kubenswrapper[4971]: I1127 07:18:58.561895 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a0dcbed-5f66-4faf-83c2-1227bc05e9d3" path="/var/lib/kubelet/pods/4a0dcbed-5f66-4faf-83c2-1227bc05e9d3/volumes" Nov 27 07:18:58 crc kubenswrapper[4971]: I1127 07:18:58.563095 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f461fed-9df2-44a5-b99c-17f30adf0d9c" path="/var/lib/kubelet/pods/7f461fed-9df2-44a5-b99c-17f30adf0d9c/volumes" Nov 27 07:18:58 crc kubenswrapper[4971]: I1127 07:18:58.563699 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b2ac1dd8-824c-482d-8c0e-47573535f172" path="/var/lib/kubelet/pods/b2ac1dd8-824c-482d-8c0e-47573535f172/volumes" Nov 27 07:18:58 crc kubenswrapper[4971]: I1127 07:18:58.650889 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5v769\" (UniqueName: \"kubernetes.io/projected/cbef532c-59fb-40ac-bde3-35b8f3616d85-kube-api-access-5v769\") pod \"cbef532c-59fb-40ac-bde3-35b8f3616d85\" (UID: \"cbef532c-59fb-40ac-bde3-35b8f3616d85\") " Nov 27 07:18:58 crc kubenswrapper[4971]: I1127 07:18:58.650956 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-55tdk\" (UniqueName: \"kubernetes.io/projected/19b68245-2b99-4337-892f-059f05113ad6-kube-api-access-55tdk\") pod \"19b68245-2b99-4337-892f-059f05113ad6\" (UID: \"19b68245-2b99-4337-892f-059f05113ad6\") " Nov 27 07:18:58 crc kubenswrapper[4971]: I1127 07:18:58.651036 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19b68245-2b99-4337-892f-059f05113ad6-operator-scripts\") pod \"19b68245-2b99-4337-892f-059f05113ad6\" (UID: \"19b68245-2b99-4337-892f-059f05113ad6\") " Nov 27 07:18:58 crc kubenswrapper[4971]: I1127 07:18:58.651065 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cbef532c-59fb-40ac-bde3-35b8f3616d85-operator-scripts\") pod \"cbef532c-59fb-40ac-bde3-35b8f3616d85\" (UID: \"cbef532c-59fb-40ac-bde3-35b8f3616d85\") " Nov 27 07:18:58 crc kubenswrapper[4971]: I1127 07:18:58.651981 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cbef532c-59fb-40ac-bde3-35b8f3616d85-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "cbef532c-59fb-40ac-bde3-35b8f3616d85" (UID: "cbef532c-59fb-40ac-bde3-35b8f3616d85"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:18:58 crc kubenswrapper[4971]: I1127 07:18:58.653261 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/19b68245-2b99-4337-892f-059f05113ad6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "19b68245-2b99-4337-892f-059f05113ad6" (UID: "19b68245-2b99-4337-892f-059f05113ad6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:18:58 crc kubenswrapper[4971]: I1127 07:18:58.657229 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cbef532c-59fb-40ac-bde3-35b8f3616d85-kube-api-access-5v769" (OuterVolumeSpecName: "kube-api-access-5v769") pod "cbef532c-59fb-40ac-bde3-35b8f3616d85" (UID: "cbef532c-59fb-40ac-bde3-35b8f3616d85"). InnerVolumeSpecName "kube-api-access-5v769". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:58 crc kubenswrapper[4971]: I1127 07:18:58.673055 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19b68245-2b99-4337-892f-059f05113ad6-kube-api-access-55tdk" (OuterVolumeSpecName: "kube-api-access-55tdk") pod "19b68245-2b99-4337-892f-059f05113ad6" (UID: "19b68245-2b99-4337-892f-059f05113ad6"). InnerVolumeSpecName "kube-api-access-55tdk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:18:58 crc kubenswrapper[4971]: I1127 07:18:58.752641 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5v769\" (UniqueName: \"kubernetes.io/projected/cbef532c-59fb-40ac-bde3-35b8f3616d85-kube-api-access-5v769\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:58 crc kubenswrapper[4971]: I1127 07:18:58.752679 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-55tdk\" (UniqueName: \"kubernetes.io/projected/19b68245-2b99-4337-892f-059f05113ad6-kube-api-access-55tdk\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:58 crc kubenswrapper[4971]: I1127 07:18:58.752690 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19b68245-2b99-4337-892f-059f05113ad6-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:58 crc kubenswrapper[4971]: I1127 07:18:58.752702 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cbef532c-59fb-40ac-bde3-35b8f3616d85-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:18:58 crc kubenswrapper[4971]: I1127 07:18:58.887101 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="35b90587-df5b-4f15-8c34-f1b0a8506d85" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.205:8775/\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Nov 27 07:18:58 crc kubenswrapper[4971]: I1127 07:18:58.887101 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="35b90587-df5b-4f15-8c34-f1b0a8506d85" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.205:8775/\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Nov 27 07:18:59 crc kubenswrapper[4971]: I1127 07:18:59.091464 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapi6022-account-delete-bhttv" event={"ID":"cbef532c-59fb-40ac-bde3-35b8f3616d85","Type":"ContainerDied","Data":"cb55fc6b1f2a82665057fd9c58c97524af59cb9d419c8c4c0b115571d6fab162"} Nov 27 07:18:59 crc kubenswrapper[4971]: I1127 07:18:59.091530 4971 scope.go:117] "RemoveContainer" containerID="548307ed473102352f01c5222cbdbd6761e2d348135638d2e5a8fab5b038e452" Nov 27 07:18:59 crc kubenswrapper[4971]: I1127 07:18:59.091732 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novaapi6022-account-delete-bhttv" Nov 27 07:18:59 crc kubenswrapper[4971]: I1127 07:18:59.096313 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell091e4-account-delete-8kghx" event={"ID":"19b68245-2b99-4337-892f-059f05113ad6","Type":"ContainerDied","Data":"0ce441afde4807195b4b59dba4caac3ab937e0bf4580f4cc2df090f555bc7247"} Nov 27 07:18:59 crc kubenswrapper[4971]: I1127 07:18:59.096435 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novacell091e4-account-delete-8kghx" Nov 27 07:18:59 crc kubenswrapper[4971]: I1127 07:18:59.118978 4971 scope.go:117] "RemoveContainer" containerID="f4593d28b30f070f2c552a0321ee906b7eb0e049e6bb65ff5d226316e092db81" Nov 27 07:18:59 crc kubenswrapper[4971]: I1127 07:18:59.231795 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell091e4-account-delete-8kghx"] Nov 27 07:18:59 crc kubenswrapper[4971]: I1127 07:18:59.240805 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/novacell091e4-account-delete-8kghx"] Nov 27 07:18:59 crc kubenswrapper[4971]: I1127 07:18:59.256215 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novaapi6022-account-delete-bhttv"] Nov 27 07:18:59 crc kubenswrapper[4971]: I1127 07:18:59.261512 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/novaapi6022-account-delete-bhttv"] Nov 27 07:19:00 crc kubenswrapper[4971]: I1127 07:19:00.559066 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19b68245-2b99-4337-892f-059f05113ad6" path="/var/lib/kubelet/pods/19b68245-2b99-4337-892f-059f05113ad6/volumes" Nov 27 07:19:00 crc kubenswrapper[4971]: I1127 07:19:00.560021 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cbef532c-59fb-40ac-bde3-35b8f3616d85" path="/var/lib/kubelet/pods/cbef532c-59fb-40ac-bde3-35b8f3616d85/volumes" Nov 27 07:19:00 crc kubenswrapper[4971]: I1127 07:19:00.648104 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-7b8cfffdbf-92cj6" podUID="622e028f-779d-4306-923c-ee204fdef6b0" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.0.152:9696/\": dial tcp 10.217.0.152:9696: connect: connection refused" Nov 27 07:19:02 crc kubenswrapper[4971]: E1127 07:19:02.255834 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2 is running failed: container process not found" containerID="d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 27 07:19:02 crc kubenswrapper[4971]: E1127 07:19:02.256643 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2 is running failed: container process not found" containerID="d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 27 07:19:02 crc kubenswrapper[4971]: E1127 07:19:02.256955 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2 is running failed: container process not found" containerID="d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 27 07:19:02 crc kubenswrapper[4971]: E1127 07:19:02.257015 4971 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-45rt8" podUID="6facf3b5-48aa-4a38-823e-6b7adbbcdfee" containerName="ovsdb-server" Nov 27 07:19:02 crc kubenswrapper[4971]: E1127 07:19:02.257631 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8f63e70bc533062faba370a611b76ae194f52aeabfa2c53bea1503c7257a0a34" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 27 07:19:02 crc kubenswrapper[4971]: E1127 07:19:02.259141 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8f63e70bc533062faba370a611b76ae194f52aeabfa2c53bea1503c7257a0a34" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 27 07:19:02 crc kubenswrapper[4971]: E1127 07:19:02.260125 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8f63e70bc533062faba370a611b76ae194f52aeabfa2c53bea1503c7257a0a34" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 27 07:19:02 crc kubenswrapper[4971]: E1127 07:19:02.260170 4971 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-45rt8" podUID="6facf3b5-48aa-4a38-823e-6b7adbbcdfee" containerName="ovs-vswitchd" Nov 27 07:19:02 crc kubenswrapper[4971]: E1127 07:19:02.645262 4971 projected.go:288] Couldn't get configMap openstack/swift-storage-config-data: configmap "swift-storage-config-data" not found Nov 27 07:19:02 crc kubenswrapper[4971]: E1127 07:19:02.645298 4971 projected.go:263] Couldn't get secret openstack/swift-conf: secret "swift-conf" not found Nov 27 07:19:02 crc kubenswrapper[4971]: E1127 07:19:02.645308 4971 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Nov 27 07:19:02 crc kubenswrapper[4971]: E1127 07:19:02.645319 4971 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: [configmap "swift-storage-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Nov 27 07:19:02 crc kubenswrapper[4971]: E1127 07:19:02.645378 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b9ccc9bd-d955-4853-986f-95597f2c70e6-etc-swift podName:b9ccc9bd-d955-4853-986f-95597f2c70e6 nodeName:}" failed. No retries permitted until 2025-11-27 07:19:18.645357373 +0000 UTC m=+1596.837401291 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b9ccc9bd-d955-4853-986f-95597f2c70e6-etc-swift") pod "swift-storage-0" (UID: "b9ccc9bd-d955-4853-986f-95597f2c70e6") : [configmap "swift-storage-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Nov 27 07:19:03 crc kubenswrapper[4971]: I1127 07:19:03.142185 4971 generic.go:334] "Generic (PLEG): container finished" podID="622e028f-779d-4306-923c-ee204fdef6b0" containerID="2685d0639fa1ae15b7a9608c81aa2b95f5d3f50c5aefc4159568fe3afd5b0c33" exitCode=0 Nov 27 07:19:03 crc kubenswrapper[4971]: I1127 07:19:03.142234 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7b8cfffdbf-92cj6" event={"ID":"622e028f-779d-4306-923c-ee204fdef6b0","Type":"ContainerDied","Data":"2685d0639fa1ae15b7a9608c81aa2b95f5d3f50c5aefc4159568fe3afd5b0c33"} Nov 27 07:19:03 crc kubenswrapper[4971]: I1127 07:19:03.535294 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7b8cfffdbf-92cj6" Nov 27 07:19:03 crc kubenswrapper[4971]: I1127 07:19:03.658509 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/622e028f-779d-4306-923c-ee204fdef6b0-httpd-config\") pod \"622e028f-779d-4306-923c-ee204fdef6b0\" (UID: \"622e028f-779d-4306-923c-ee204fdef6b0\") " Nov 27 07:19:03 crc kubenswrapper[4971]: I1127 07:19:03.658621 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/622e028f-779d-4306-923c-ee204fdef6b0-public-tls-certs\") pod \"622e028f-779d-4306-923c-ee204fdef6b0\" (UID: \"622e028f-779d-4306-923c-ee204fdef6b0\") " Nov 27 07:19:03 crc kubenswrapper[4971]: I1127 07:19:03.658660 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/622e028f-779d-4306-923c-ee204fdef6b0-internal-tls-certs\") pod \"622e028f-779d-4306-923c-ee204fdef6b0\" (UID: \"622e028f-779d-4306-923c-ee204fdef6b0\") " Nov 27 07:19:03 crc kubenswrapper[4971]: I1127 07:19:03.658754 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/622e028f-779d-4306-923c-ee204fdef6b0-ovndb-tls-certs\") pod \"622e028f-779d-4306-923c-ee204fdef6b0\" (UID: \"622e028f-779d-4306-923c-ee204fdef6b0\") " Nov 27 07:19:03 crc kubenswrapper[4971]: I1127 07:19:03.658794 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vtz2h\" (UniqueName: \"kubernetes.io/projected/622e028f-779d-4306-923c-ee204fdef6b0-kube-api-access-vtz2h\") pod \"622e028f-779d-4306-923c-ee204fdef6b0\" (UID: \"622e028f-779d-4306-923c-ee204fdef6b0\") " Nov 27 07:19:03 crc kubenswrapper[4971]: I1127 07:19:03.658862 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/622e028f-779d-4306-923c-ee204fdef6b0-combined-ca-bundle\") pod \"622e028f-779d-4306-923c-ee204fdef6b0\" (UID: \"622e028f-779d-4306-923c-ee204fdef6b0\") " Nov 27 07:19:03 crc kubenswrapper[4971]: I1127 07:19:03.658942 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/622e028f-779d-4306-923c-ee204fdef6b0-config\") pod \"622e028f-779d-4306-923c-ee204fdef6b0\" (UID: \"622e028f-779d-4306-923c-ee204fdef6b0\") " Nov 27 07:19:03 crc kubenswrapper[4971]: I1127 07:19:03.663757 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/622e028f-779d-4306-923c-ee204fdef6b0-kube-api-access-vtz2h" (OuterVolumeSpecName: "kube-api-access-vtz2h") pod "622e028f-779d-4306-923c-ee204fdef6b0" (UID: "622e028f-779d-4306-923c-ee204fdef6b0"). InnerVolumeSpecName "kube-api-access-vtz2h". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:19:03 crc kubenswrapper[4971]: I1127 07:19:03.663822 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/622e028f-779d-4306-923c-ee204fdef6b0-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "622e028f-779d-4306-923c-ee204fdef6b0" (UID: "622e028f-779d-4306-923c-ee204fdef6b0"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:19:03 crc kubenswrapper[4971]: I1127 07:19:03.703979 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/622e028f-779d-4306-923c-ee204fdef6b0-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "622e028f-779d-4306-923c-ee204fdef6b0" (UID: "622e028f-779d-4306-923c-ee204fdef6b0"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:19:03 crc kubenswrapper[4971]: I1127 07:19:03.705429 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/622e028f-779d-4306-923c-ee204fdef6b0-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "622e028f-779d-4306-923c-ee204fdef6b0" (UID: "622e028f-779d-4306-923c-ee204fdef6b0"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:19:03 crc kubenswrapper[4971]: I1127 07:19:03.705903 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/622e028f-779d-4306-923c-ee204fdef6b0-config" (OuterVolumeSpecName: "config") pod "622e028f-779d-4306-923c-ee204fdef6b0" (UID: "622e028f-779d-4306-923c-ee204fdef6b0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:19:03 crc kubenswrapper[4971]: I1127 07:19:03.718118 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/622e028f-779d-4306-923c-ee204fdef6b0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "622e028f-779d-4306-923c-ee204fdef6b0" (UID: "622e028f-779d-4306-923c-ee204fdef6b0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:19:03 crc kubenswrapper[4971]: I1127 07:19:03.727615 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/622e028f-779d-4306-923c-ee204fdef6b0-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "622e028f-779d-4306-923c-ee204fdef6b0" (UID: "622e028f-779d-4306-923c-ee204fdef6b0"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:19:03 crc kubenswrapper[4971]: I1127 07:19:03.761328 4971 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/622e028f-779d-4306-923c-ee204fdef6b0-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:19:03 crc kubenswrapper[4971]: I1127 07:19:03.761365 4971 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/622e028f-779d-4306-923c-ee204fdef6b0-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:19:03 crc kubenswrapper[4971]: I1127 07:19:03.761380 4971 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/622e028f-779d-4306-923c-ee204fdef6b0-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 27 07:19:03 crc kubenswrapper[4971]: I1127 07:19:03.761392 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vtz2h\" (UniqueName: \"kubernetes.io/projected/622e028f-779d-4306-923c-ee204fdef6b0-kube-api-access-vtz2h\") on node \"crc\" DevicePath \"\"" Nov 27 07:19:03 crc kubenswrapper[4971]: I1127 07:19:03.761405 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/622e028f-779d-4306-923c-ee204fdef6b0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 07:19:03 crc kubenswrapper[4971]: I1127 07:19:03.761416 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/622e028f-779d-4306-923c-ee204fdef6b0-config\") on node \"crc\" DevicePath \"\"" Nov 27 07:19:03 crc kubenswrapper[4971]: I1127 07:19:03.761426 4971 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/622e028f-779d-4306-923c-ee204fdef6b0-httpd-config\") on node \"crc\" DevicePath \"\"" Nov 27 07:19:04 crc kubenswrapper[4971]: I1127 07:19:04.157147 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7b8cfffdbf-92cj6" event={"ID":"622e028f-779d-4306-923c-ee204fdef6b0","Type":"ContainerDied","Data":"cfacdbff5ae68f2b65336fa8989a19c54e1983372c976d51adf9f4847b8f7be3"} Nov 27 07:19:04 crc kubenswrapper[4971]: I1127 07:19:04.157232 4971 scope.go:117] "RemoveContainer" containerID="360d9e165c0c9e2b9b0b1cee20e9ddcb63854ee168b8dc44e7daf6d665e3532e" Nov 27 07:19:04 crc kubenswrapper[4971]: I1127 07:19:04.157245 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7b8cfffdbf-92cj6" Nov 27 07:19:04 crc kubenswrapper[4971]: I1127 07:19:04.182503 4971 scope.go:117] "RemoveContainer" containerID="2685d0639fa1ae15b7a9608c81aa2b95f5d3f50c5aefc4159568fe3afd5b0c33" Nov 27 07:19:04 crc kubenswrapper[4971]: I1127 07:19:04.232639 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7b8cfffdbf-92cj6"] Nov 27 07:19:04 crc kubenswrapper[4971]: I1127 07:19:04.240436 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-7b8cfffdbf-92cj6"] Nov 27 07:19:04 crc kubenswrapper[4971]: I1127 07:19:04.558917 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="622e028f-779d-4306-923c-ee204fdef6b0" path="/var/lib/kubelet/pods/622e028f-779d-4306-923c-ee204fdef6b0/volumes" Nov 27 07:19:06 crc kubenswrapper[4971]: I1127 07:19:06.550613 4971 scope.go:117] "RemoveContainer" containerID="0cad2c6a32c384b5b56d689d3a634854a392f78c534cb53e6b7da7260dd94a04" Nov 27 07:19:06 crc kubenswrapper[4971]: E1127 07:19:06.551091 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:19:07 crc kubenswrapper[4971]: E1127 07:19:07.255749 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2 is running failed: container process not found" containerID="d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 27 07:19:07 crc kubenswrapper[4971]: E1127 07:19:07.256124 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2 is running failed: container process not found" containerID="d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 27 07:19:07 crc kubenswrapper[4971]: E1127 07:19:07.256462 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2 is running failed: container process not found" containerID="d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 27 07:19:07 crc kubenswrapper[4971]: E1127 07:19:07.256494 4971 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-45rt8" podUID="6facf3b5-48aa-4a38-823e-6b7adbbcdfee" containerName="ovsdb-server" Nov 27 07:19:07 crc kubenswrapper[4971]: E1127 07:19:07.257201 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8f63e70bc533062faba370a611b76ae194f52aeabfa2c53bea1503c7257a0a34" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 27 07:19:07 crc kubenswrapper[4971]: E1127 07:19:07.264152 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8f63e70bc533062faba370a611b76ae194f52aeabfa2c53bea1503c7257a0a34" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 27 07:19:07 crc kubenswrapper[4971]: E1127 07:19:07.271066 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8f63e70bc533062faba370a611b76ae194f52aeabfa2c53bea1503c7257a0a34" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 27 07:19:07 crc kubenswrapper[4971]: E1127 07:19:07.271153 4971 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-45rt8" podUID="6facf3b5-48aa-4a38-823e-6b7adbbcdfee" containerName="ovs-vswitchd" Nov 27 07:19:12 crc kubenswrapper[4971]: E1127 07:19:12.255360 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2 is running failed: container process not found" containerID="d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 27 07:19:12 crc kubenswrapper[4971]: E1127 07:19:12.256496 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2 is running failed: container process not found" containerID="d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 27 07:19:12 crc kubenswrapper[4971]: E1127 07:19:12.256853 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2 is running failed: container process not found" containerID="d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 27 07:19:12 crc kubenswrapper[4971]: E1127 07:19:12.256889 4971 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-45rt8" podUID="6facf3b5-48aa-4a38-823e-6b7adbbcdfee" containerName="ovsdb-server" Nov 27 07:19:12 crc kubenswrapper[4971]: E1127 07:19:12.257382 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8f63e70bc533062faba370a611b76ae194f52aeabfa2c53bea1503c7257a0a34" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 27 07:19:12 crc kubenswrapper[4971]: E1127 07:19:12.258869 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8f63e70bc533062faba370a611b76ae194f52aeabfa2c53bea1503c7257a0a34" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 27 07:19:12 crc kubenswrapper[4971]: E1127 07:19:12.260441 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8f63e70bc533062faba370a611b76ae194f52aeabfa2c53bea1503c7257a0a34" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 27 07:19:12 crc kubenswrapper[4971]: E1127 07:19:12.260469 4971 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-45rt8" podUID="6facf3b5-48aa-4a38-823e-6b7adbbcdfee" containerName="ovs-vswitchd" Nov 27 07:19:16 crc kubenswrapper[4971]: I1127 07:19:16.280938 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-45rt8_6facf3b5-48aa-4a38-823e-6b7adbbcdfee/ovs-vswitchd/0.log" Nov 27 07:19:16 crc kubenswrapper[4971]: I1127 07:19:16.284017 4971 generic.go:334] "Generic (PLEG): container finished" podID="6facf3b5-48aa-4a38-823e-6b7adbbcdfee" containerID="8f63e70bc533062faba370a611b76ae194f52aeabfa2c53bea1503c7257a0a34" exitCode=137 Nov 27 07:19:16 crc kubenswrapper[4971]: I1127 07:19:16.284062 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-45rt8" event={"ID":"6facf3b5-48aa-4a38-823e-6b7adbbcdfee","Type":"ContainerDied","Data":"8f63e70bc533062faba370a611b76ae194f52aeabfa2c53bea1503c7257a0a34"} Nov 27 07:19:16 crc kubenswrapper[4971]: I1127 07:19:16.600445 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-45rt8_6facf3b5-48aa-4a38-823e-6b7adbbcdfee/ovs-vswitchd/0.log" Nov 27 07:19:16 crc kubenswrapper[4971]: I1127 07:19:16.601723 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-45rt8" Nov 27 07:19:16 crc kubenswrapper[4971]: I1127 07:19:16.764751 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/6facf3b5-48aa-4a38-823e-6b7adbbcdfee-var-log\") pod \"6facf3b5-48aa-4a38-823e-6b7adbbcdfee\" (UID: \"6facf3b5-48aa-4a38-823e-6b7adbbcdfee\") " Nov 27 07:19:16 crc kubenswrapper[4971]: I1127 07:19:16.764812 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/6facf3b5-48aa-4a38-823e-6b7adbbcdfee-etc-ovs\") pod \"6facf3b5-48aa-4a38-823e-6b7adbbcdfee\" (UID: \"6facf3b5-48aa-4a38-823e-6b7adbbcdfee\") " Nov 27 07:19:16 crc kubenswrapper[4971]: I1127 07:19:16.764846 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/6facf3b5-48aa-4a38-823e-6b7adbbcdfee-var-lib\") pod \"6facf3b5-48aa-4a38-823e-6b7adbbcdfee\" (UID: \"6facf3b5-48aa-4a38-823e-6b7adbbcdfee\") " Nov 27 07:19:16 crc kubenswrapper[4971]: I1127 07:19:16.764876 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d9wff\" (UniqueName: \"kubernetes.io/projected/6facf3b5-48aa-4a38-823e-6b7adbbcdfee-kube-api-access-d9wff\") pod \"6facf3b5-48aa-4a38-823e-6b7adbbcdfee\" (UID: \"6facf3b5-48aa-4a38-823e-6b7adbbcdfee\") " Nov 27 07:19:16 crc kubenswrapper[4971]: I1127 07:19:16.764906 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6facf3b5-48aa-4a38-823e-6b7adbbcdfee-etc-ovs" (OuterVolumeSpecName: "etc-ovs") pod "6facf3b5-48aa-4a38-823e-6b7adbbcdfee" (UID: "6facf3b5-48aa-4a38-823e-6b7adbbcdfee"). InnerVolumeSpecName "etc-ovs". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 07:19:16 crc kubenswrapper[4971]: I1127 07:19:16.764924 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6facf3b5-48aa-4a38-823e-6b7adbbcdfee-scripts\") pod \"6facf3b5-48aa-4a38-823e-6b7adbbcdfee\" (UID: \"6facf3b5-48aa-4a38-823e-6b7adbbcdfee\") " Nov 27 07:19:16 crc kubenswrapper[4971]: I1127 07:19:16.764969 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6facf3b5-48aa-4a38-823e-6b7adbbcdfee-var-lib" (OuterVolumeSpecName: "var-lib") pod "6facf3b5-48aa-4a38-823e-6b7adbbcdfee" (UID: "6facf3b5-48aa-4a38-823e-6b7adbbcdfee"). InnerVolumeSpecName "var-lib". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 07:19:16 crc kubenswrapper[4971]: I1127 07:19:16.764906 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6facf3b5-48aa-4a38-823e-6b7adbbcdfee-var-log" (OuterVolumeSpecName: "var-log") pod "6facf3b5-48aa-4a38-823e-6b7adbbcdfee" (UID: "6facf3b5-48aa-4a38-823e-6b7adbbcdfee"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 07:19:16 crc kubenswrapper[4971]: I1127 07:19:16.765071 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6facf3b5-48aa-4a38-823e-6b7adbbcdfee-var-run\") pod \"6facf3b5-48aa-4a38-823e-6b7adbbcdfee\" (UID: \"6facf3b5-48aa-4a38-823e-6b7adbbcdfee\") " Nov 27 07:19:16 crc kubenswrapper[4971]: I1127 07:19:16.765153 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6facf3b5-48aa-4a38-823e-6b7adbbcdfee-var-run" (OuterVolumeSpecName: "var-run") pod "6facf3b5-48aa-4a38-823e-6b7adbbcdfee" (UID: "6facf3b5-48aa-4a38-823e-6b7adbbcdfee"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 07:19:16 crc kubenswrapper[4971]: I1127 07:19:16.765732 4971 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6facf3b5-48aa-4a38-823e-6b7adbbcdfee-var-run\") on node \"crc\" DevicePath \"\"" Nov 27 07:19:16 crc kubenswrapper[4971]: I1127 07:19:16.765760 4971 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/6facf3b5-48aa-4a38-823e-6b7adbbcdfee-var-log\") on node \"crc\" DevicePath \"\"" Nov 27 07:19:16 crc kubenswrapper[4971]: I1127 07:19:16.765772 4971 reconciler_common.go:293] "Volume detached for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/6facf3b5-48aa-4a38-823e-6b7adbbcdfee-etc-ovs\") on node \"crc\" DevicePath \"\"" Nov 27 07:19:16 crc kubenswrapper[4971]: I1127 07:19:16.765783 4971 reconciler_common.go:293] "Volume detached for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/6facf3b5-48aa-4a38-823e-6b7adbbcdfee-var-lib\") on node \"crc\" DevicePath \"\"" Nov 27 07:19:16 crc kubenswrapper[4971]: I1127 07:19:16.766027 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6facf3b5-48aa-4a38-823e-6b7adbbcdfee-scripts" (OuterVolumeSpecName: "scripts") pod "6facf3b5-48aa-4a38-823e-6b7adbbcdfee" (UID: "6facf3b5-48aa-4a38-823e-6b7adbbcdfee"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:19:16 crc kubenswrapper[4971]: I1127 07:19:16.769871 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6facf3b5-48aa-4a38-823e-6b7adbbcdfee-kube-api-access-d9wff" (OuterVolumeSpecName: "kube-api-access-d9wff") pod "6facf3b5-48aa-4a38-823e-6b7adbbcdfee" (UID: "6facf3b5-48aa-4a38-823e-6b7adbbcdfee"). InnerVolumeSpecName "kube-api-access-d9wff". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:19:16 crc kubenswrapper[4971]: I1127 07:19:16.867342 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d9wff\" (UniqueName: \"kubernetes.io/projected/6facf3b5-48aa-4a38-823e-6b7adbbcdfee-kube-api-access-d9wff\") on node \"crc\" DevicePath \"\"" Nov 27 07:19:16 crc kubenswrapper[4971]: I1127 07:19:16.867378 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6facf3b5-48aa-4a38-823e-6b7adbbcdfee-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 07:19:17 crc kubenswrapper[4971]: I1127 07:19:17.294387 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-45rt8_6facf3b5-48aa-4a38-823e-6b7adbbcdfee/ovs-vswitchd/0.log" Nov 27 07:19:17 crc kubenswrapper[4971]: I1127 07:19:17.295123 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-45rt8" event={"ID":"6facf3b5-48aa-4a38-823e-6b7adbbcdfee","Type":"ContainerDied","Data":"9b40984b9eca07ba195f28e6dcd857b2b4c7a263a570bb3a7e7ef5022c1c0d62"} Nov 27 07:19:17 crc kubenswrapper[4971]: I1127 07:19:17.295179 4971 scope.go:117] "RemoveContainer" containerID="8f63e70bc533062faba370a611b76ae194f52aeabfa2c53bea1503c7257a0a34" Nov 27 07:19:17 crc kubenswrapper[4971]: I1127 07:19:17.295212 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-45rt8" Nov 27 07:19:17 crc kubenswrapper[4971]: I1127 07:19:17.328378 4971 scope.go:117] "RemoveContainer" containerID="d33b927b1763dc0119ed1223d1f361f5849c6145f04c2d2dffc8642b41c591a2" Nov 27 07:19:17 crc kubenswrapper[4971]: I1127 07:19:17.347655 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-45rt8"] Nov 27 07:19:17 crc kubenswrapper[4971]: I1127 07:19:17.355511 4971 scope.go:117] "RemoveContainer" containerID="60f59a6e83fdc72d437d71d6a2fc4290250b75d86ff4dc231378f363a52d5f74" Nov 27 07:19:17 crc kubenswrapper[4971]: I1127 07:19:17.357218 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-ovs-45rt8"] Nov 27 07:19:17 crc kubenswrapper[4971]: I1127 07:19:17.996088 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.186913 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/b9ccc9bd-d955-4853-986f-95597f2c70e6-lock\") pod \"b9ccc9bd-d955-4853-986f-95597f2c70e6\" (UID: \"b9ccc9bd-d955-4853-986f-95597f2c70e6\") " Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.187054 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/b9ccc9bd-d955-4853-986f-95597f2c70e6-cache\") pod \"b9ccc9bd-d955-4853-986f-95597f2c70e6\" (UID: \"b9ccc9bd-d955-4853-986f-95597f2c70e6\") " Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.187153 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"b9ccc9bd-d955-4853-986f-95597f2c70e6\" (UID: \"b9ccc9bd-d955-4853-986f-95597f2c70e6\") " Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.187192 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5rhrd\" (UniqueName: \"kubernetes.io/projected/b9ccc9bd-d955-4853-986f-95597f2c70e6-kube-api-access-5rhrd\") pod \"b9ccc9bd-d955-4853-986f-95597f2c70e6\" (UID: \"b9ccc9bd-d955-4853-986f-95597f2c70e6\") " Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.187263 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b9ccc9bd-d955-4853-986f-95597f2c70e6-etc-swift\") pod \"b9ccc9bd-d955-4853-986f-95597f2c70e6\" (UID: \"b9ccc9bd-d955-4853-986f-95597f2c70e6\") " Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.187665 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9ccc9bd-d955-4853-986f-95597f2c70e6-lock" (OuterVolumeSpecName: "lock") pod "b9ccc9bd-d955-4853-986f-95597f2c70e6" (UID: "b9ccc9bd-d955-4853-986f-95597f2c70e6"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.187844 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9ccc9bd-d955-4853-986f-95597f2c70e6-cache" (OuterVolumeSpecName: "cache") pod "b9ccc9bd-d955-4853-986f-95597f2c70e6" (UID: "b9ccc9bd-d955-4853-986f-95597f2c70e6"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.187918 4971 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/b9ccc9bd-d955-4853-986f-95597f2c70e6-lock\") on node \"crc\" DevicePath \"\"" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.191803 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "swift") pod "b9ccc9bd-d955-4853-986f-95597f2c70e6" (UID: "b9ccc9bd-d955-4853-986f-95597f2c70e6"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.191912 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9ccc9bd-d955-4853-986f-95597f2c70e6-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "b9ccc9bd-d955-4853-986f-95597f2c70e6" (UID: "b9ccc9bd-d955-4853-986f-95597f2c70e6"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.193109 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9ccc9bd-d955-4853-986f-95597f2c70e6-kube-api-access-5rhrd" (OuterVolumeSpecName: "kube-api-access-5rhrd") pod "b9ccc9bd-d955-4853-986f-95597f2c70e6" (UID: "b9ccc9bd-d955-4853-986f-95597f2c70e6"). InnerVolumeSpecName "kube-api-access-5rhrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.289613 4971 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/b9ccc9bd-d955-4853-986f-95597f2c70e6-cache\") on node \"crc\" DevicePath \"\"" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.289671 4971 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.289687 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5rhrd\" (UniqueName: \"kubernetes.io/projected/b9ccc9bd-d955-4853-986f-95597f2c70e6-kube-api-access-5rhrd\") on node \"crc\" DevicePath \"\"" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.289701 4971 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b9ccc9bd-d955-4853-986f-95597f2c70e6-etc-swift\") on node \"crc\" DevicePath \"\"" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.314769 4971 generic.go:334] "Generic (PLEG): container finished" podID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerID="85a6791fcdc604b55a12b82702e281ab125eb3d1d4235331866d471d20e24d57" exitCode=137 Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.314996 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b9ccc9bd-d955-4853-986f-95597f2c70e6","Type":"ContainerDied","Data":"85a6791fcdc604b55a12b82702e281ab125eb3d1d4235331866d471d20e24d57"} Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.315074 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b9ccc9bd-d955-4853-986f-95597f2c70e6","Type":"ContainerDied","Data":"e0c53a5599de41f3e42da5a20ab59718975f8b80a1f6f15d525a576f75869862"} Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.315132 4971 scope.go:117] "RemoveContainer" containerID="85a6791fcdc604b55a12b82702e281ab125eb3d1d4235331866d471d20e24d57" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.315807 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.321340 4971 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.342038 4971 scope.go:117] "RemoveContainer" containerID="c21be005b4f8466d61ef2da25c8b9ce2faf1be2408be9a01a2cad03ede7d5772" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.365469 4971 scope.go:117] "RemoveContainer" containerID="4a867f795f4a9c59b6d95924698a7ae6fcb777907d413b4d91002386f13f43ce" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.369314 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.377682 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-storage-0"] Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.390681 4971 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.414139 4971 scope.go:117] "RemoveContainer" containerID="91caa3649b14fd1b0674535f37443068dd6301ac20d525ca2334e554de92f5ce" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.434215 4971 scope.go:117] "RemoveContainer" containerID="d156d1511b10631f281c42115007fa1a0c13d1d792aafd4fd1d7868813992990" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.463518 4971 scope.go:117] "RemoveContainer" containerID="61eee222331bc4cf51d73a2109e6aaf653d19b6ab409e2c0ab5b7ddfc9466313" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.484048 4971 scope.go:117] "RemoveContainer" containerID="9d10d81588ceda578cec676b35a37e455caa807351436843b76133d0dfb2603b" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.505295 4971 scope.go:117] "RemoveContainer" containerID="7170edadd0854106871150c55e357e6c6f8c393bb654d550d3f39d6435d67112" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.525961 4971 scope.go:117] "RemoveContainer" containerID="fad54635f828db66a56bd717dd26946c6194a973dd020bda60d63ac8508944b7" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.548714 4971 scope.go:117] "RemoveContainer" containerID="3b8d32d89ca168e8803a3c1d366cfdd72575c01e7332b5f3d73f865eaf632a07" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.549821 4971 scope.go:117] "RemoveContainer" containerID="0cad2c6a32c384b5b56d689d3a634854a392f78c534cb53e6b7da7260dd94a04" Nov 27 07:19:18 crc kubenswrapper[4971]: E1127 07:19:18.550124 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.562904 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6facf3b5-48aa-4a38-823e-6b7adbbcdfee" path="/var/lib/kubelet/pods/6facf3b5-48aa-4a38-823e-6b7adbbcdfee/volumes" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.564278 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" path="/var/lib/kubelet/pods/b9ccc9bd-d955-4853-986f-95597f2c70e6/volumes" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.571825 4971 scope.go:117] "RemoveContainer" containerID="f747a88650d47a0b28abe01bd9c86b1ce37125bcbf5d4510f59880020f4c6436" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.602119 4971 scope.go:117] "RemoveContainer" containerID="d4737f67b50d857fb0c555964409bd7354a856636790cbe70eea9e16bffacc6e" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.631326 4971 scope.go:117] "RemoveContainer" containerID="6c4925ec24ba366b0b0454d48d8855400c5d48953e95c1f7428155fd84127936" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.650196 4971 scope.go:117] "RemoveContainer" containerID="b7eb7df2e485a291e4bff098a5b8b04d0f94dc8a687ed53e0061a899159d7b70" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.670446 4971 scope.go:117] "RemoveContainer" containerID="dc6a6bc3b9ba7cb3360f786ead574e2d92d8ae060c201dc5a7136ed40e797c3d" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.693223 4971 scope.go:117] "RemoveContainer" containerID="85a6791fcdc604b55a12b82702e281ab125eb3d1d4235331866d471d20e24d57" Nov 27 07:19:18 crc kubenswrapper[4971]: E1127 07:19:18.693937 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85a6791fcdc604b55a12b82702e281ab125eb3d1d4235331866d471d20e24d57\": container with ID starting with 85a6791fcdc604b55a12b82702e281ab125eb3d1d4235331866d471d20e24d57 not found: ID does not exist" containerID="85a6791fcdc604b55a12b82702e281ab125eb3d1d4235331866d471d20e24d57" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.693972 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85a6791fcdc604b55a12b82702e281ab125eb3d1d4235331866d471d20e24d57"} err="failed to get container status \"85a6791fcdc604b55a12b82702e281ab125eb3d1d4235331866d471d20e24d57\": rpc error: code = NotFound desc = could not find container \"85a6791fcdc604b55a12b82702e281ab125eb3d1d4235331866d471d20e24d57\": container with ID starting with 85a6791fcdc604b55a12b82702e281ab125eb3d1d4235331866d471d20e24d57 not found: ID does not exist" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.693997 4971 scope.go:117] "RemoveContainer" containerID="c21be005b4f8466d61ef2da25c8b9ce2faf1be2408be9a01a2cad03ede7d5772" Nov 27 07:19:18 crc kubenswrapper[4971]: E1127 07:19:18.694694 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c21be005b4f8466d61ef2da25c8b9ce2faf1be2408be9a01a2cad03ede7d5772\": container with ID starting with c21be005b4f8466d61ef2da25c8b9ce2faf1be2408be9a01a2cad03ede7d5772 not found: ID does not exist" containerID="c21be005b4f8466d61ef2da25c8b9ce2faf1be2408be9a01a2cad03ede7d5772" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.694722 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c21be005b4f8466d61ef2da25c8b9ce2faf1be2408be9a01a2cad03ede7d5772"} err="failed to get container status \"c21be005b4f8466d61ef2da25c8b9ce2faf1be2408be9a01a2cad03ede7d5772\": rpc error: code = NotFound desc = could not find container \"c21be005b4f8466d61ef2da25c8b9ce2faf1be2408be9a01a2cad03ede7d5772\": container with ID starting with c21be005b4f8466d61ef2da25c8b9ce2faf1be2408be9a01a2cad03ede7d5772 not found: ID does not exist" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.694738 4971 scope.go:117] "RemoveContainer" containerID="4a867f795f4a9c59b6d95924698a7ae6fcb777907d413b4d91002386f13f43ce" Nov 27 07:19:18 crc kubenswrapper[4971]: E1127 07:19:18.695078 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a867f795f4a9c59b6d95924698a7ae6fcb777907d413b4d91002386f13f43ce\": container with ID starting with 4a867f795f4a9c59b6d95924698a7ae6fcb777907d413b4d91002386f13f43ce not found: ID does not exist" containerID="4a867f795f4a9c59b6d95924698a7ae6fcb777907d413b4d91002386f13f43ce" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.695102 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a867f795f4a9c59b6d95924698a7ae6fcb777907d413b4d91002386f13f43ce"} err="failed to get container status \"4a867f795f4a9c59b6d95924698a7ae6fcb777907d413b4d91002386f13f43ce\": rpc error: code = NotFound desc = could not find container \"4a867f795f4a9c59b6d95924698a7ae6fcb777907d413b4d91002386f13f43ce\": container with ID starting with 4a867f795f4a9c59b6d95924698a7ae6fcb777907d413b4d91002386f13f43ce not found: ID does not exist" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.695116 4971 scope.go:117] "RemoveContainer" containerID="91caa3649b14fd1b0674535f37443068dd6301ac20d525ca2334e554de92f5ce" Nov 27 07:19:18 crc kubenswrapper[4971]: E1127 07:19:18.695393 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"91caa3649b14fd1b0674535f37443068dd6301ac20d525ca2334e554de92f5ce\": container with ID starting with 91caa3649b14fd1b0674535f37443068dd6301ac20d525ca2334e554de92f5ce not found: ID does not exist" containerID="91caa3649b14fd1b0674535f37443068dd6301ac20d525ca2334e554de92f5ce" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.695418 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91caa3649b14fd1b0674535f37443068dd6301ac20d525ca2334e554de92f5ce"} err="failed to get container status \"91caa3649b14fd1b0674535f37443068dd6301ac20d525ca2334e554de92f5ce\": rpc error: code = NotFound desc = could not find container \"91caa3649b14fd1b0674535f37443068dd6301ac20d525ca2334e554de92f5ce\": container with ID starting with 91caa3649b14fd1b0674535f37443068dd6301ac20d525ca2334e554de92f5ce not found: ID does not exist" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.695436 4971 scope.go:117] "RemoveContainer" containerID="d156d1511b10631f281c42115007fa1a0c13d1d792aafd4fd1d7868813992990" Nov 27 07:19:18 crc kubenswrapper[4971]: E1127 07:19:18.695841 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d156d1511b10631f281c42115007fa1a0c13d1d792aafd4fd1d7868813992990\": container with ID starting with d156d1511b10631f281c42115007fa1a0c13d1d792aafd4fd1d7868813992990 not found: ID does not exist" containerID="d156d1511b10631f281c42115007fa1a0c13d1d792aafd4fd1d7868813992990" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.695864 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d156d1511b10631f281c42115007fa1a0c13d1d792aafd4fd1d7868813992990"} err="failed to get container status \"d156d1511b10631f281c42115007fa1a0c13d1d792aafd4fd1d7868813992990\": rpc error: code = NotFound desc = could not find container \"d156d1511b10631f281c42115007fa1a0c13d1d792aafd4fd1d7868813992990\": container with ID starting with d156d1511b10631f281c42115007fa1a0c13d1d792aafd4fd1d7868813992990 not found: ID does not exist" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.695876 4971 scope.go:117] "RemoveContainer" containerID="61eee222331bc4cf51d73a2109e6aaf653d19b6ab409e2c0ab5b7ddfc9466313" Nov 27 07:19:18 crc kubenswrapper[4971]: E1127 07:19:18.696237 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"61eee222331bc4cf51d73a2109e6aaf653d19b6ab409e2c0ab5b7ddfc9466313\": container with ID starting with 61eee222331bc4cf51d73a2109e6aaf653d19b6ab409e2c0ab5b7ddfc9466313 not found: ID does not exist" containerID="61eee222331bc4cf51d73a2109e6aaf653d19b6ab409e2c0ab5b7ddfc9466313" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.696258 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61eee222331bc4cf51d73a2109e6aaf653d19b6ab409e2c0ab5b7ddfc9466313"} err="failed to get container status \"61eee222331bc4cf51d73a2109e6aaf653d19b6ab409e2c0ab5b7ddfc9466313\": rpc error: code = NotFound desc = could not find container \"61eee222331bc4cf51d73a2109e6aaf653d19b6ab409e2c0ab5b7ddfc9466313\": container with ID starting with 61eee222331bc4cf51d73a2109e6aaf653d19b6ab409e2c0ab5b7ddfc9466313 not found: ID does not exist" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.696272 4971 scope.go:117] "RemoveContainer" containerID="9d10d81588ceda578cec676b35a37e455caa807351436843b76133d0dfb2603b" Nov 27 07:19:18 crc kubenswrapper[4971]: E1127 07:19:18.696646 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d10d81588ceda578cec676b35a37e455caa807351436843b76133d0dfb2603b\": container with ID starting with 9d10d81588ceda578cec676b35a37e455caa807351436843b76133d0dfb2603b not found: ID does not exist" containerID="9d10d81588ceda578cec676b35a37e455caa807351436843b76133d0dfb2603b" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.696666 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d10d81588ceda578cec676b35a37e455caa807351436843b76133d0dfb2603b"} err="failed to get container status \"9d10d81588ceda578cec676b35a37e455caa807351436843b76133d0dfb2603b\": rpc error: code = NotFound desc = could not find container \"9d10d81588ceda578cec676b35a37e455caa807351436843b76133d0dfb2603b\": container with ID starting with 9d10d81588ceda578cec676b35a37e455caa807351436843b76133d0dfb2603b not found: ID does not exist" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.696683 4971 scope.go:117] "RemoveContainer" containerID="7170edadd0854106871150c55e357e6c6f8c393bb654d550d3f39d6435d67112" Nov 27 07:19:18 crc kubenswrapper[4971]: E1127 07:19:18.696997 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7170edadd0854106871150c55e357e6c6f8c393bb654d550d3f39d6435d67112\": container with ID starting with 7170edadd0854106871150c55e357e6c6f8c393bb654d550d3f39d6435d67112 not found: ID does not exist" containerID="7170edadd0854106871150c55e357e6c6f8c393bb654d550d3f39d6435d67112" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.697016 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7170edadd0854106871150c55e357e6c6f8c393bb654d550d3f39d6435d67112"} err="failed to get container status \"7170edadd0854106871150c55e357e6c6f8c393bb654d550d3f39d6435d67112\": rpc error: code = NotFound desc = could not find container \"7170edadd0854106871150c55e357e6c6f8c393bb654d550d3f39d6435d67112\": container with ID starting with 7170edadd0854106871150c55e357e6c6f8c393bb654d550d3f39d6435d67112 not found: ID does not exist" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.697028 4971 scope.go:117] "RemoveContainer" containerID="fad54635f828db66a56bd717dd26946c6194a973dd020bda60d63ac8508944b7" Nov 27 07:19:18 crc kubenswrapper[4971]: E1127 07:19:18.697321 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fad54635f828db66a56bd717dd26946c6194a973dd020bda60d63ac8508944b7\": container with ID starting with fad54635f828db66a56bd717dd26946c6194a973dd020bda60d63ac8508944b7 not found: ID does not exist" containerID="fad54635f828db66a56bd717dd26946c6194a973dd020bda60d63ac8508944b7" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.697377 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fad54635f828db66a56bd717dd26946c6194a973dd020bda60d63ac8508944b7"} err="failed to get container status \"fad54635f828db66a56bd717dd26946c6194a973dd020bda60d63ac8508944b7\": rpc error: code = NotFound desc = could not find container \"fad54635f828db66a56bd717dd26946c6194a973dd020bda60d63ac8508944b7\": container with ID starting with fad54635f828db66a56bd717dd26946c6194a973dd020bda60d63ac8508944b7 not found: ID does not exist" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.697404 4971 scope.go:117] "RemoveContainer" containerID="3b8d32d89ca168e8803a3c1d366cfdd72575c01e7332b5f3d73f865eaf632a07" Nov 27 07:19:18 crc kubenswrapper[4971]: E1127 07:19:18.697707 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b8d32d89ca168e8803a3c1d366cfdd72575c01e7332b5f3d73f865eaf632a07\": container with ID starting with 3b8d32d89ca168e8803a3c1d366cfdd72575c01e7332b5f3d73f865eaf632a07 not found: ID does not exist" containerID="3b8d32d89ca168e8803a3c1d366cfdd72575c01e7332b5f3d73f865eaf632a07" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.697732 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b8d32d89ca168e8803a3c1d366cfdd72575c01e7332b5f3d73f865eaf632a07"} err="failed to get container status \"3b8d32d89ca168e8803a3c1d366cfdd72575c01e7332b5f3d73f865eaf632a07\": rpc error: code = NotFound desc = could not find container \"3b8d32d89ca168e8803a3c1d366cfdd72575c01e7332b5f3d73f865eaf632a07\": container with ID starting with 3b8d32d89ca168e8803a3c1d366cfdd72575c01e7332b5f3d73f865eaf632a07 not found: ID does not exist" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.697747 4971 scope.go:117] "RemoveContainer" containerID="f747a88650d47a0b28abe01bd9c86b1ce37125bcbf5d4510f59880020f4c6436" Nov 27 07:19:18 crc kubenswrapper[4971]: E1127 07:19:18.697967 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f747a88650d47a0b28abe01bd9c86b1ce37125bcbf5d4510f59880020f4c6436\": container with ID starting with f747a88650d47a0b28abe01bd9c86b1ce37125bcbf5d4510f59880020f4c6436 not found: ID does not exist" containerID="f747a88650d47a0b28abe01bd9c86b1ce37125bcbf5d4510f59880020f4c6436" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.697989 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f747a88650d47a0b28abe01bd9c86b1ce37125bcbf5d4510f59880020f4c6436"} err="failed to get container status \"f747a88650d47a0b28abe01bd9c86b1ce37125bcbf5d4510f59880020f4c6436\": rpc error: code = NotFound desc = could not find container \"f747a88650d47a0b28abe01bd9c86b1ce37125bcbf5d4510f59880020f4c6436\": container with ID starting with f747a88650d47a0b28abe01bd9c86b1ce37125bcbf5d4510f59880020f4c6436 not found: ID does not exist" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.698002 4971 scope.go:117] "RemoveContainer" containerID="d4737f67b50d857fb0c555964409bd7354a856636790cbe70eea9e16bffacc6e" Nov 27 07:19:18 crc kubenswrapper[4971]: E1127 07:19:18.698342 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4737f67b50d857fb0c555964409bd7354a856636790cbe70eea9e16bffacc6e\": container with ID starting with d4737f67b50d857fb0c555964409bd7354a856636790cbe70eea9e16bffacc6e not found: ID does not exist" containerID="d4737f67b50d857fb0c555964409bd7354a856636790cbe70eea9e16bffacc6e" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.698363 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4737f67b50d857fb0c555964409bd7354a856636790cbe70eea9e16bffacc6e"} err="failed to get container status \"d4737f67b50d857fb0c555964409bd7354a856636790cbe70eea9e16bffacc6e\": rpc error: code = NotFound desc = could not find container \"d4737f67b50d857fb0c555964409bd7354a856636790cbe70eea9e16bffacc6e\": container with ID starting with d4737f67b50d857fb0c555964409bd7354a856636790cbe70eea9e16bffacc6e not found: ID does not exist" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.698377 4971 scope.go:117] "RemoveContainer" containerID="6c4925ec24ba366b0b0454d48d8855400c5d48953e95c1f7428155fd84127936" Nov 27 07:19:18 crc kubenswrapper[4971]: E1127 07:19:18.698667 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c4925ec24ba366b0b0454d48d8855400c5d48953e95c1f7428155fd84127936\": container with ID starting with 6c4925ec24ba366b0b0454d48d8855400c5d48953e95c1f7428155fd84127936 not found: ID does not exist" containerID="6c4925ec24ba366b0b0454d48d8855400c5d48953e95c1f7428155fd84127936" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.698692 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c4925ec24ba366b0b0454d48d8855400c5d48953e95c1f7428155fd84127936"} err="failed to get container status \"6c4925ec24ba366b0b0454d48d8855400c5d48953e95c1f7428155fd84127936\": rpc error: code = NotFound desc = could not find container \"6c4925ec24ba366b0b0454d48d8855400c5d48953e95c1f7428155fd84127936\": container with ID starting with 6c4925ec24ba366b0b0454d48d8855400c5d48953e95c1f7428155fd84127936 not found: ID does not exist" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.698708 4971 scope.go:117] "RemoveContainer" containerID="b7eb7df2e485a291e4bff098a5b8b04d0f94dc8a687ed53e0061a899159d7b70" Nov 27 07:19:18 crc kubenswrapper[4971]: E1127 07:19:18.698997 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b7eb7df2e485a291e4bff098a5b8b04d0f94dc8a687ed53e0061a899159d7b70\": container with ID starting with b7eb7df2e485a291e4bff098a5b8b04d0f94dc8a687ed53e0061a899159d7b70 not found: ID does not exist" containerID="b7eb7df2e485a291e4bff098a5b8b04d0f94dc8a687ed53e0061a899159d7b70" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.699020 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7eb7df2e485a291e4bff098a5b8b04d0f94dc8a687ed53e0061a899159d7b70"} err="failed to get container status \"b7eb7df2e485a291e4bff098a5b8b04d0f94dc8a687ed53e0061a899159d7b70\": rpc error: code = NotFound desc = could not find container \"b7eb7df2e485a291e4bff098a5b8b04d0f94dc8a687ed53e0061a899159d7b70\": container with ID starting with b7eb7df2e485a291e4bff098a5b8b04d0f94dc8a687ed53e0061a899159d7b70 not found: ID does not exist" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.699034 4971 scope.go:117] "RemoveContainer" containerID="dc6a6bc3b9ba7cb3360f786ead574e2d92d8ae060c201dc5a7136ed40e797c3d" Nov 27 07:19:18 crc kubenswrapper[4971]: E1127 07:19:18.699406 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc6a6bc3b9ba7cb3360f786ead574e2d92d8ae060c201dc5a7136ed40e797c3d\": container with ID starting with dc6a6bc3b9ba7cb3360f786ead574e2d92d8ae060c201dc5a7136ed40e797c3d not found: ID does not exist" containerID="dc6a6bc3b9ba7cb3360f786ead574e2d92d8ae060c201dc5a7136ed40e797c3d" Nov 27 07:19:18 crc kubenswrapper[4971]: I1127 07:19:18.699426 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc6a6bc3b9ba7cb3360f786ead574e2d92d8ae060c201dc5a7136ed40e797c3d"} err="failed to get container status \"dc6a6bc3b9ba7cb3360f786ead574e2d92d8ae060c201dc5a7136ed40e797c3d\": rpc error: code = NotFound desc = could not find container \"dc6a6bc3b9ba7cb3360f786ead574e2d92d8ae060c201dc5a7136ed40e797c3d\": container with ID starting with dc6a6bc3b9ba7cb3360f786ead574e2d92d8ae060c201dc5a7136ed40e797c3d not found: ID does not exist" Nov 27 07:19:20 crc kubenswrapper[4971]: I1127 07:19:20.205914 4971 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod31d8afb0-f8c2-4d34-879f-260e94779de0"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod31d8afb0-f8c2-4d34-879f-260e94779de0] : Timed out while waiting for systemd to remove kubepods-besteffort-pod31d8afb0_f8c2_4d34_879f_260e94779de0.slice" Nov 27 07:19:20 crc kubenswrapper[4971]: E1127 07:19:20.206762 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort pod31d8afb0-f8c2-4d34-879f-260e94779de0] : unable to destroy cgroup paths for cgroup [kubepods besteffort pod31d8afb0-f8c2-4d34-879f-260e94779de0] : Timed out while waiting for systemd to remove kubepods-besteffort-pod31d8afb0_f8c2_4d34_879f_260e94779de0.slice" pod="openstack/ovn-controller-metrics-x4rpv" podUID="31d8afb0-f8c2-4d34-879f-260e94779de0" Nov 27 07:19:20 crc kubenswrapper[4971]: I1127 07:19:20.341933 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-x4rpv" Nov 27 07:19:20 crc kubenswrapper[4971]: I1127 07:19:20.369610 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-x4rpv"] Nov 27 07:19:20 crc kubenswrapper[4971]: I1127 07:19:20.375495 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-metrics-x4rpv"] Nov 27 07:19:20 crc kubenswrapper[4971]: I1127 07:19:20.559184 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8afb0-f8c2-4d34-879f-260e94779de0" path="/var/lib/kubelet/pods/31d8afb0-f8c2-4d34-879f-260e94779de0/volumes" Nov 27 07:19:25 crc kubenswrapper[4971]: I1127 07:19:25.317997 4971 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod91f979c6-21cc-4848-9eb5-b8bfc4abf082"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod91f979c6-21cc-4848-9eb5-b8bfc4abf082] : Timed out while waiting for systemd to remove kubepods-besteffort-pod91f979c6_21cc_4848_9eb5_b8bfc4abf082.slice" Nov 27 07:19:25 crc kubenswrapper[4971]: E1127 07:19:25.318513 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort pod91f979c6-21cc-4848-9eb5-b8bfc4abf082] : unable to destroy cgroup paths for cgroup [kubepods besteffort pod91f979c6-21cc-4848-9eb5-b8bfc4abf082] : Timed out while waiting for systemd to remove kubepods-besteffort-pod91f979c6_21cc_4848_9eb5_b8bfc4abf082.slice" pod="openstack/neutron1af4-account-delete-48m5w" podUID="91f979c6-21cc-4848-9eb5-b8bfc4abf082" Nov 27 07:19:25 crc kubenswrapper[4971]: I1127 07:19:25.405957 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron1af4-account-delete-48m5w" Nov 27 07:19:25 crc kubenswrapper[4971]: I1127 07:19:25.438126 4971 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","podd6833730-f034-4b5f-954a-19e993167f04"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort podd6833730-f034-4b5f-954a-19e993167f04] : Timed out while waiting for systemd to remove kubepods-besteffort-podd6833730_f034_4b5f_954a_19e993167f04.slice" Nov 27 07:19:31 crc kubenswrapper[4971]: I1127 07:19:31.550523 4971 scope.go:117] "RemoveContainer" containerID="0cad2c6a32c384b5b56d689d3a634854a392f78c534cb53e6b7da7260dd94a04" Nov 27 07:19:31 crc kubenswrapper[4971]: E1127 07:19:31.552651 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:19:45 crc kubenswrapper[4971]: I1127 07:19:45.550477 4971 scope.go:117] "RemoveContainer" containerID="0cad2c6a32c384b5b56d689d3a634854a392f78c534cb53e6b7da7260dd94a04" Nov 27 07:19:45 crc kubenswrapper[4971]: E1127 07:19:45.551274 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:20:00 crc kubenswrapper[4971]: I1127 07:20:00.550933 4971 scope.go:117] "RemoveContainer" containerID="0cad2c6a32c384b5b56d689d3a634854a392f78c534cb53e6b7da7260dd94a04" Nov 27 07:20:00 crc kubenswrapper[4971]: E1127 07:20:00.551773 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:20:13 crc kubenswrapper[4971]: I1127 07:20:13.550120 4971 scope.go:117] "RemoveContainer" containerID="0cad2c6a32c384b5b56d689d3a634854a392f78c534cb53e6b7da7260dd94a04" Nov 27 07:20:13 crc kubenswrapper[4971]: E1127 07:20:13.551000 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:20:26 crc kubenswrapper[4971]: I1127 07:20:26.550445 4971 scope.go:117] "RemoveContainer" containerID="0cad2c6a32c384b5b56d689d3a634854a392f78c534cb53e6b7da7260dd94a04" Nov 27 07:20:26 crc kubenswrapper[4971]: E1127 07:20:26.551376 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:20:41 crc kubenswrapper[4971]: I1127 07:20:41.551507 4971 scope.go:117] "RemoveContainer" containerID="0cad2c6a32c384b5b56d689d3a634854a392f78c534cb53e6b7da7260dd94a04" Nov 27 07:20:41 crc kubenswrapper[4971]: E1127 07:20:41.553294 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:20:47 crc kubenswrapper[4971]: I1127 07:20:47.217048 4971 scope.go:117] "RemoveContainer" containerID="5e05a21a7103d6f887778bd82285b294951fce661aa5d2dd8a1def2dbabc1652" Nov 27 07:20:47 crc kubenswrapper[4971]: I1127 07:20:47.254772 4971 scope.go:117] "RemoveContainer" containerID="35daabf30eff3474954a06c52ad28d749b52aae9fc9b781f447588cf8b408b5f" Nov 27 07:20:47 crc kubenswrapper[4971]: I1127 07:20:47.281465 4971 scope.go:117] "RemoveContainer" containerID="12983e11ebb77e9c91fe21064f9cdc861aef03f33d2b3124e97e962ac8d19cdc" Nov 27 07:20:47 crc kubenswrapper[4971]: I1127 07:20:47.302227 4971 scope.go:117] "RemoveContainer" containerID="dff606b7b165d6268b04e3c652abf9420dd9caabe77ed3c1f0c431beda8c9de5" Nov 27 07:20:47 crc kubenswrapper[4971]: I1127 07:20:47.324764 4971 scope.go:117] "RemoveContainer" containerID="c6a9de26b76ffd90526d58f0c36d51422b5a7bf192b60d1eb043ebe5cedc7092" Nov 27 07:20:47 crc kubenswrapper[4971]: I1127 07:20:47.341343 4971 scope.go:117] "RemoveContainer" containerID="203479ecfee56a9441b43631622196bf609ce8fbfc30cbc6c31eb9c3e9440056" Nov 27 07:20:47 crc kubenswrapper[4971]: I1127 07:20:47.373195 4971 scope.go:117] "RemoveContainer" containerID="c878cae761f2b1c9abbbb40694a91fe64be7c38a2b289d4333edaeb94b60ad38" Nov 27 07:20:47 crc kubenswrapper[4971]: I1127 07:20:47.409435 4971 scope.go:117] "RemoveContainer" containerID="77882cbee517c91fc3e1f3a6a066be0183a0d5dff26c18bb82c636aa29cd5b4c" Nov 27 07:20:47 crc kubenswrapper[4971]: I1127 07:20:47.428918 4971 scope.go:117] "RemoveContainer" containerID="47223c6c413b4fa18667b07a2c8e720f342e5906c3e3e926a37060f670324a52" Nov 27 07:20:47 crc kubenswrapper[4971]: I1127 07:20:47.447274 4971 scope.go:117] "RemoveContainer" containerID="e0a4a62c81fb15153f18b5b439de92b61bd92c1f4a77409cd9393e9468d90fe3" Nov 27 07:20:47 crc kubenswrapper[4971]: I1127 07:20:47.477070 4971 scope.go:117] "RemoveContainer" containerID="5362530fcd9fdea86c97351d6e4b2d531937c2c216938c513ebbdfd364fb5d3e" Nov 27 07:20:47 crc kubenswrapper[4971]: I1127 07:20:47.500628 4971 scope.go:117] "RemoveContainer" containerID="ac55cff26fc898c9d7406993cb5d12c6b2b8a73445d04ffc7d5949cc59f486b9" Nov 27 07:20:47 crc kubenswrapper[4971]: I1127 07:20:47.527110 4971 scope.go:117] "RemoveContainer" containerID="4e798883832bc5d5b16ad0dd76ee1cfdfe53f43556b353147ffab3c5a19ab82e" Nov 27 07:20:47 crc kubenswrapper[4971]: I1127 07:20:47.547217 4971 scope.go:117] "RemoveContainer" containerID="ccb0479ba9995bc8550fbc2dbf993c3449d21c9af046183d2f478467bb11fc07" Nov 27 07:20:47 crc kubenswrapper[4971]: I1127 07:20:47.564887 4971 scope.go:117] "RemoveContainer" containerID="fc482f882a3b7633d1172ebad636e951956bbf0dc53007a5de81a84d031ea868" Nov 27 07:20:47 crc kubenswrapper[4971]: I1127 07:20:47.584023 4971 scope.go:117] "RemoveContainer" containerID="19d72d074c4d1f42fcf35b5590a9d84eff4ace3cd7f8dfc492efd1e086be618e" Nov 27 07:20:47 crc kubenswrapper[4971]: I1127 07:20:47.611073 4971 scope.go:117] "RemoveContainer" containerID="e5ed27503d1f4f17194f847b1b73b48f6fe49bdc0adabb45103b24c97ddb67c7" Nov 27 07:20:47 crc kubenswrapper[4971]: I1127 07:20:47.639482 4971 scope.go:117] "RemoveContainer" containerID="6e1b7a107fa94bae5e4292a3dbefb4ddb70263f27da9c532afc2ecf7f858188d" Nov 27 07:20:47 crc kubenswrapper[4971]: I1127 07:20:47.668952 4971 scope.go:117] "RemoveContainer" containerID="bab5a722723f54ec0f0efa57197716dd795a9b02b94e004eafa54f37ca83903e" Nov 27 07:20:47 crc kubenswrapper[4971]: I1127 07:20:47.701410 4971 scope.go:117] "RemoveContainer" containerID="7a4e158b7c7e9c61d16087892eb264fb5c50d094f187b71cab60d863a90c10b0" Nov 27 07:20:47 crc kubenswrapper[4971]: I1127 07:20:47.717626 4971 scope.go:117] "RemoveContainer" containerID="62fd3284f1d80c7ec42f4da6f1463c1ef338c9ed77298a1ace876099604e1ab1" Nov 27 07:20:47 crc kubenswrapper[4971]: I1127 07:20:47.741769 4971 scope.go:117] "RemoveContainer" containerID="4ebd81db73ba88d8cfa0503889ac418a2664333c3598afa00657ff2e14de274c" Nov 27 07:20:47 crc kubenswrapper[4971]: I1127 07:20:47.762952 4971 scope.go:117] "RemoveContainer" containerID="d8cae12287c5ef396f90e407b77789281e9654588ff7861e1db29623eaae2e8c" Nov 27 07:20:53 crc kubenswrapper[4971]: I1127 07:20:53.549898 4971 scope.go:117] "RemoveContainer" containerID="0cad2c6a32c384b5b56d689d3a634854a392f78c534cb53e6b7da7260dd94a04" Nov 27 07:20:53 crc kubenswrapper[4971]: E1127 07:20:53.550874 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:21:06 crc kubenswrapper[4971]: I1127 07:21:06.551173 4971 scope.go:117] "RemoveContainer" containerID="0cad2c6a32c384b5b56d689d3a634854a392f78c534cb53e6b7da7260dd94a04" Nov 27 07:21:06 crc kubenswrapper[4971]: E1127 07:21:06.551961 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:21:20 crc kubenswrapper[4971]: I1127 07:21:20.550250 4971 scope.go:117] "RemoveContainer" containerID="0cad2c6a32c384b5b56d689d3a634854a392f78c534cb53e6b7da7260dd94a04" Nov 27 07:21:20 crc kubenswrapper[4971]: E1127 07:21:20.551182 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:21:35 crc kubenswrapper[4971]: I1127 07:21:35.549840 4971 scope.go:117] "RemoveContainer" containerID="0cad2c6a32c384b5b56d689d3a634854a392f78c534cb53e6b7da7260dd94a04" Nov 27 07:21:35 crc kubenswrapper[4971]: E1127 07:21:35.550521 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:21:48 crc kubenswrapper[4971]: I1127 07:21:48.071179 4971 scope.go:117] "RemoveContainer" containerID="76df4ac7c1f082519f7ee5545a6be65bf6a51cf8772adc725260109bee894240" Nov 27 07:21:48 crc kubenswrapper[4971]: I1127 07:21:48.120996 4971 scope.go:117] "RemoveContainer" containerID="669e2c2b2985b62934235229da4195fd416e9129758e938e024eadd9d158c9c5" Nov 27 07:21:48 crc kubenswrapper[4971]: I1127 07:21:48.149111 4971 scope.go:117] "RemoveContainer" containerID="4d487d859ddb54b8c37233d551712b8c0af4822104b48abd901d36f7a1eddd1c" Nov 27 07:21:48 crc kubenswrapper[4971]: I1127 07:21:48.179990 4971 scope.go:117] "RemoveContainer" containerID="b055dca2fce241d4dff82cda6eadcdc757cf29563f11d4e106062cc703721e5d" Nov 27 07:21:48 crc kubenswrapper[4971]: I1127 07:21:48.217466 4971 scope.go:117] "RemoveContainer" containerID="99b269fcf95dc6f0a32925b3507b866cc4131b10ea3efe81550f10df524dc91e" Nov 27 07:21:48 crc kubenswrapper[4971]: I1127 07:21:48.240832 4971 scope.go:117] "RemoveContainer" containerID="1e163b91b3515a2dbbcef9e7284d68670096a3a915c4458561ff3dd7b93abebc" Nov 27 07:21:48 crc kubenswrapper[4971]: I1127 07:21:48.270107 4971 scope.go:117] "RemoveContainer" containerID="7c752ad17fdde3f68fd7a467e7e60367d073bc3eb47a408353424a2055983e41" Nov 27 07:21:48 crc kubenswrapper[4971]: I1127 07:21:48.311303 4971 scope.go:117] "RemoveContainer" containerID="15f2b3adb5a54ba7ac4520153e7954e26f56a37b66305e65275cb91b2a888859" Nov 27 07:21:48 crc kubenswrapper[4971]: I1127 07:21:48.328194 4971 scope.go:117] "RemoveContainer" containerID="90d75b56cc24ed99876c2a3d94faea49f7cafd9b68b4d01c2a828f69a6df4484" Nov 27 07:21:48 crc kubenswrapper[4971]: I1127 07:21:48.369326 4971 scope.go:117] "RemoveContainer" containerID="c9e6b5f718d9ab4322f8be2590e93ec413c8974b46389cc43520e7f166ab9a2d" Nov 27 07:21:48 crc kubenswrapper[4971]: I1127 07:21:48.392718 4971 scope.go:117] "RemoveContainer" containerID="e70c98903785fe4279c39fe33060ba6d64e42416ec0207f04ea6c26602ecfd68" Nov 27 07:21:48 crc kubenswrapper[4971]: I1127 07:21:48.411826 4971 scope.go:117] "RemoveContainer" containerID="e5320499a3f1a0d1809604b9208a63f4d3d044c098be5c08c36f1210c9322b8b" Nov 27 07:21:48 crc kubenswrapper[4971]: I1127 07:21:48.445101 4971 scope.go:117] "RemoveContainer" containerID="73523b2b33bbdbab3268c46a389de99191a628958e5fbee9adae95574ea7a1c6" Nov 27 07:21:48 crc kubenswrapper[4971]: I1127 07:21:48.465031 4971 scope.go:117] "RemoveContainer" containerID="fc558568c9d543263589e1931a3c435d97461a215fdc5e59dd09dce5d9f01ade" Nov 27 07:21:48 crc kubenswrapper[4971]: I1127 07:21:48.482529 4971 scope.go:117] "RemoveContainer" containerID="6e5293e955a6149c8c7aa1f5687fb247520e178d25695632832288aedee32ee5" Nov 27 07:21:50 crc kubenswrapper[4971]: I1127 07:21:50.550302 4971 scope.go:117] "RemoveContainer" containerID="0cad2c6a32c384b5b56d689d3a634854a392f78c534cb53e6b7da7260dd94a04" Nov 27 07:21:50 crc kubenswrapper[4971]: E1127 07:21:50.551023 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:22:05 crc kubenswrapper[4971]: I1127 07:22:05.550545 4971 scope.go:117] "RemoveContainer" containerID="0cad2c6a32c384b5b56d689d3a634854a392f78c534cb53e6b7da7260dd94a04" Nov 27 07:22:05 crc kubenswrapper[4971]: E1127 07:22:05.551221 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:22:16 crc kubenswrapper[4971]: I1127 07:22:16.550941 4971 scope.go:117] "RemoveContainer" containerID="0cad2c6a32c384b5b56d689d3a634854a392f78c534cb53e6b7da7260dd94a04" Nov 27 07:22:16 crc kubenswrapper[4971]: E1127 07:22:16.551682 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:22:31 crc kubenswrapper[4971]: I1127 07:22:31.550956 4971 scope.go:117] "RemoveContainer" containerID="0cad2c6a32c384b5b56d689d3a634854a392f78c534cb53e6b7da7260dd94a04" Nov 27 07:22:31 crc kubenswrapper[4971]: E1127 07:22:31.551834 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:22:44 crc kubenswrapper[4971]: I1127 07:22:44.550247 4971 scope.go:117] "RemoveContainer" containerID="0cad2c6a32c384b5b56d689d3a634854a392f78c534cb53e6b7da7260dd94a04" Nov 27 07:22:44 crc kubenswrapper[4971]: E1127 07:22:44.550946 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:22:48 crc kubenswrapper[4971]: I1127 07:22:48.781345 4971 scope.go:117] "RemoveContainer" containerID="22d3d2192ae3c18ce743c1ddda10575c592232c7adf55747fe07b64dfb765bb4" Nov 27 07:22:48 crc kubenswrapper[4971]: I1127 07:22:48.809414 4971 scope.go:117] "RemoveContainer" containerID="19a65f12d0fa7695d3edd4168e5a8ef629cdc60be7faa6681d12e8d04a8cf497" Nov 27 07:22:48 crc kubenswrapper[4971]: I1127 07:22:48.827864 4971 scope.go:117] "RemoveContainer" containerID="80065ffd1cc70ab9f5e46cbb1026eb2c1f6926d1938126123dd083ba516fd507" Nov 27 07:22:48 crc kubenswrapper[4971]: I1127 07:22:48.856029 4971 scope.go:117] "RemoveContainer" containerID="f8c04a3373e562bc809e9e4fc35383c8fef8faa16f99ddf1854711cb4b91495c" Nov 27 07:22:48 crc kubenswrapper[4971]: I1127 07:22:48.876668 4971 scope.go:117] "RemoveContainer" containerID="432905efdade8907249e6259f8519c70b607016106a34709d73f373c91efe0a5" Nov 27 07:22:48 crc kubenswrapper[4971]: I1127 07:22:48.905664 4971 scope.go:117] "RemoveContainer" containerID="f25c15b6849ee4de1357a742e5ff9d74b03bc332580c116f4c0b091d577d4a1d" Nov 27 07:22:48 crc kubenswrapper[4971]: I1127 07:22:48.925966 4971 scope.go:117] "RemoveContainer" containerID="cce9aeab95941685da7fd97b4408ffcb6ac7f7c473cf5bbf43edec5fe397efd6" Nov 27 07:22:48 crc kubenswrapper[4971]: I1127 07:22:48.946186 4971 scope.go:117] "RemoveContainer" containerID="7ac204abd5fcebddae836cc6aa15172601127932e17bded94537e79b8097d011" Nov 27 07:22:48 crc kubenswrapper[4971]: I1127 07:22:48.967027 4971 scope.go:117] "RemoveContainer" containerID="605fc6ed04781d6ae79576290eb45958f2fcc0fff09021399050e8cc200f2306" Nov 27 07:22:48 crc kubenswrapper[4971]: I1127 07:22:48.990344 4971 scope.go:117] "RemoveContainer" containerID="5668516f8e5f98d27ae8ef1c9b2857419b64f8bc06f8aeffeddbb87691114627" Nov 27 07:22:49 crc kubenswrapper[4971]: I1127 07:22:49.011429 4971 scope.go:117] "RemoveContainer" containerID="9cbb1e297d7eff42db8e89fa629eec8f4c7fb3f3405eaec525774254aea00153" Nov 27 07:22:49 crc kubenswrapper[4971]: I1127 07:22:49.033986 4971 scope.go:117] "RemoveContainer" containerID="8dbea1c5d8961b687109edee2265a2311adecbc668e362357af80dca1ef37927" Nov 27 07:22:49 crc kubenswrapper[4971]: I1127 07:22:49.053183 4971 scope.go:117] "RemoveContainer" containerID="32b6c0f43018436689e6595a7fbba50fff0b5d5d5f4ec9b1c259ec629b4ab6bc" Nov 27 07:22:49 crc kubenswrapper[4971]: I1127 07:22:49.073258 4971 scope.go:117] "RemoveContainer" containerID="bce205f17147516a60414770d105f40e40e7784c6e25ddf99d12115151c6a716" Nov 27 07:22:49 crc kubenswrapper[4971]: I1127 07:22:49.091817 4971 scope.go:117] "RemoveContainer" containerID="11a43f33fa3835d6bc3caba0e4cd634a1bc0dcb8a9fc8879c3ba3bbe47cba2b2" Nov 27 07:22:49 crc kubenswrapper[4971]: I1127 07:22:49.125327 4971 scope.go:117] "RemoveContainer" containerID="b06e49f36aeb9385737ef57f67601c2cecfa59d6329f3deb519661b9fff43052" Nov 27 07:22:49 crc kubenswrapper[4971]: I1127 07:22:49.144440 4971 scope.go:117] "RemoveContainer" containerID="8c44ec84f8a34017d255eeeb24780b4156d325f048da346d4c067d9aa0b36d07" Nov 27 07:22:49 crc kubenswrapper[4971]: I1127 07:22:49.161808 4971 scope.go:117] "RemoveContainer" containerID="61d2130ec7c4d80ade741707d5c73217fac765808c16c9e87c4e84949f761922" Nov 27 07:22:49 crc kubenswrapper[4971]: I1127 07:22:49.186993 4971 scope.go:117] "RemoveContainer" containerID="c813e7acb7f4a74ba8b74e4f55535ea0e0307a8468b0623d9ec10735c4807fad" Nov 27 07:22:49 crc kubenswrapper[4971]: I1127 07:22:49.222730 4971 scope.go:117] "RemoveContainer" containerID="24c84e1eb9991d718d731ee29d65ce90cc0654b42f107c87f521aa3fcf209447" Nov 27 07:22:49 crc kubenswrapper[4971]: I1127 07:22:49.237292 4971 scope.go:117] "RemoveContainer" containerID="5d760ae55be3a486a97462d9bd8576d1f7879d82b2f6d6318d4076d0797b7097" Nov 27 07:22:49 crc kubenswrapper[4971]: I1127 07:22:49.254725 4971 scope.go:117] "RemoveContainer" containerID="0d9471b432378d52a3ce483c444b3f5beceab405dd8b991eb4ecb32b754851b8" Nov 27 07:22:49 crc kubenswrapper[4971]: I1127 07:22:49.272019 4971 scope.go:117] "RemoveContainer" containerID="85ed74a43c71f2fd0dc70f5ef0da68149334b289a557cdd8a6923696f588e66b" Nov 27 07:22:49 crc kubenswrapper[4971]: I1127 07:22:49.292986 4971 scope.go:117] "RemoveContainer" containerID="311e0d520fd8e54093bc7d5a472d0e50638192c04dc79858d7a39b858f1c705d" Nov 27 07:22:49 crc kubenswrapper[4971]: I1127 07:22:49.329142 4971 scope.go:117] "RemoveContainer" containerID="213b456ae37e6e54ce799e71136d0150d71e3784b58ba6e15c73b1242adc249b" Nov 27 07:22:55 crc kubenswrapper[4971]: I1127 07:22:55.550865 4971 scope.go:117] "RemoveContainer" containerID="0cad2c6a32c384b5b56d689d3a634854a392f78c534cb53e6b7da7260dd94a04" Nov 27 07:22:55 crc kubenswrapper[4971]: E1127 07:22:55.551693 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:23:08 crc kubenswrapper[4971]: I1127 07:23:08.550335 4971 scope.go:117] "RemoveContainer" containerID="0cad2c6a32c384b5b56d689d3a634854a392f78c534cb53e6b7da7260dd94a04" Nov 27 07:23:08 crc kubenswrapper[4971]: E1127 07:23:08.551078 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:23:21 crc kubenswrapper[4971]: I1127 07:23:21.550337 4971 scope.go:117] "RemoveContainer" containerID="0cad2c6a32c384b5b56d689d3a634854a392f78c534cb53e6b7da7260dd94a04" Nov 27 07:23:21 crc kubenswrapper[4971]: E1127 07:23:21.551247 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:23:34 crc kubenswrapper[4971]: I1127 07:23:34.550683 4971 scope.go:117] "RemoveContainer" containerID="0cad2c6a32c384b5b56d689d3a634854a392f78c534cb53e6b7da7260dd94a04" Nov 27 07:23:35 crc kubenswrapper[4971]: I1127 07:23:35.852257 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"cbff8fd08ba606e2a0830aa64d2cffe84345db1c425fc1298bb5e45af4173719"} Nov 27 07:23:49 crc kubenswrapper[4971]: I1127 07:23:49.571252 4971 scope.go:117] "RemoveContainer" containerID="86a75ee86a1e45fea7cc0b2bf039f079e8bc453673a092221bda14b2056c6baf" Nov 27 07:23:49 crc kubenswrapper[4971]: I1127 07:23:49.593612 4971 scope.go:117] "RemoveContainer" containerID="c4384c63c41c30e5aff2a9142833b3197827809310d63cf252a7f987b5fa0052" Nov 27 07:23:49 crc kubenswrapper[4971]: I1127 07:23:49.612823 4971 scope.go:117] "RemoveContainer" containerID="960448ee56e126a49d6f57a232fb215e9851f384a304f93ed3bcb04c83b92864" Nov 27 07:23:49 crc kubenswrapper[4971]: I1127 07:23:49.641777 4971 scope.go:117] "RemoveContainer" containerID="8e8e087cfd5dc9b5572bd417f86679e6c4d8fa3a5d6ad123e1e7cf39a19c38ce" Nov 27 07:23:49 crc kubenswrapper[4971]: I1127 07:23:49.661080 4971 scope.go:117] "RemoveContainer" containerID="62c5d6cd46420fb65a7f206aa0adabc71b71475222eed36141bfe65ea7e170a8" Nov 27 07:23:49 crc kubenswrapper[4971]: I1127 07:23:49.677357 4971 scope.go:117] "RemoveContainer" containerID="4ddcbc42db2ba1064b1be23b19557092bc9f250cd522d599f52c0edf05df8c9a" Nov 27 07:23:49 crc kubenswrapper[4971]: I1127 07:23:49.694995 4971 scope.go:117] "RemoveContainer" containerID="1b90d79708e13d8b636a1a0c7a9d004c9d25e1cdfbdf4629947028c179260d11" Nov 27 07:23:49 crc kubenswrapper[4971]: I1127 07:23:49.722291 4971 scope.go:117] "RemoveContainer" containerID="3586a41a7891cea521492885be55a9a9bd05a1cb5921d455b0e22ebf6c5e936b" Nov 27 07:23:49 crc kubenswrapper[4971]: I1127 07:23:49.742169 4971 scope.go:117] "RemoveContainer" containerID="7844a05a6c63d726288200a10d028c8c4ca0f2610d820f5ec82c8c30a346ef9d" Nov 27 07:23:49 crc kubenswrapper[4971]: I1127 07:23:49.782293 4971 scope.go:117] "RemoveContainer" containerID="46227a133d66ed19c72b28861c349b925b200b4f43e8eb5ee9084b038610548a" Nov 27 07:24:49 crc kubenswrapper[4971]: I1127 07:24:49.893190 4971 scope.go:117] "RemoveContainer" containerID="18682d190c4a5b0b55ee9d27eb1abd136955ddd4b910117777d70786a6f380e8" Nov 27 07:24:49 crc kubenswrapper[4971]: I1127 07:24:49.913046 4971 scope.go:117] "RemoveContainer" containerID="021e6a2a11bacfa832c03480f267d7d6f46b8ff018e5da0bdfaee90588e4feda" Nov 27 07:24:49 crc kubenswrapper[4971]: I1127 07:24:49.957928 4971 scope.go:117] "RemoveContainer" containerID="bf254fe08bcc7d4264d54d1c7f653dc9ece6ef4aa57f2cd553f285b6b06f6503" Nov 27 07:24:49 crc kubenswrapper[4971]: I1127 07:24:49.996483 4971 scope.go:117] "RemoveContainer" containerID="c581d4d83b4f2a55778d440cd3c9dd25c65d57ded13db9b0fc5dd5a6570dbd13" Nov 27 07:24:50 crc kubenswrapper[4971]: I1127 07:24:50.018364 4971 scope.go:117] "RemoveContainer" containerID="f8a41df47e9d0c49f7996de3bff1779009911f0b42f60ee5708ce91da4034a1e" Nov 27 07:24:50 crc kubenswrapper[4971]: I1127 07:24:50.054409 4971 scope.go:117] "RemoveContainer" containerID="0f8d01fbf6c7eb20e7a39706dcdd74da63c98558af8036f29f36eda787a01536" Nov 27 07:24:50 crc kubenswrapper[4971]: I1127 07:24:50.072944 4971 scope.go:117] "RemoveContainer" containerID="6353f174a6c8b0b83eddbe0ddd990ad6ef08c5fe350d99ff37ab1e31a82eb790" Nov 27 07:24:50 crc kubenswrapper[4971]: I1127 07:24:50.094868 4971 scope.go:117] "RemoveContainer" containerID="9889def6b44901ab96b7e37483508d92be73fc8357e22d9f41857a9c610d8c0a" Nov 27 07:24:50 crc kubenswrapper[4971]: I1127 07:24:50.118728 4971 scope.go:117] "RemoveContainer" containerID="f42ea15691565b6f744579005d0dff4dd176377414435bc8e4f9733d230cc174" Nov 27 07:24:50 crc kubenswrapper[4971]: I1127 07:24:50.138733 4971 scope.go:117] "RemoveContainer" containerID="172f687c02402ec48aa19bcb923e165cebb3972c8e1b83d44601a1d5f44f7220" Nov 27 07:24:50 crc kubenswrapper[4971]: I1127 07:24:50.157741 4971 scope.go:117] "RemoveContainer" containerID="9b107fde4f5f63e1923a73cfe0c3ffca844d489bcb74c6b62efcceacaef66508" Nov 27 07:24:50 crc kubenswrapper[4971]: I1127 07:24:50.180925 4971 scope.go:117] "RemoveContainer" containerID="e6ecd3fddd03d9acdb8874669eb4ae87d6f12ad1116ff9c9cbf2e034df95e87e" Nov 27 07:25:50 crc kubenswrapper[4971]: I1127 07:25:50.295131 4971 scope.go:117] "RemoveContainer" containerID="1b572a1aecdf546cde5c96515c9893664d21cadbc5a9100a606b1ffea63c1f0b" Nov 27 07:25:56 crc kubenswrapper[4971]: I1127 07:25:56.413321 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 07:25:56 crc kubenswrapper[4971]: I1127 07:25:56.414027 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 07:26:26 crc kubenswrapper[4971]: I1127 07:26:26.413494 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 07:26:26 crc kubenswrapper[4971]: I1127 07:26:26.414165 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 07:26:56 crc kubenswrapper[4971]: I1127 07:26:56.413496 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 07:26:56 crc kubenswrapper[4971]: I1127 07:26:56.414689 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 07:26:56 crc kubenswrapper[4971]: I1127 07:26:56.414774 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 07:26:56 crc kubenswrapper[4971]: I1127 07:26:56.416188 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cbff8fd08ba606e2a0830aa64d2cffe84345db1c425fc1298bb5e45af4173719"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 07:26:56 crc kubenswrapper[4971]: I1127 07:26:56.416324 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://cbff8fd08ba606e2a0830aa64d2cffe84345db1c425fc1298bb5e45af4173719" gracePeriod=600 Nov 27 07:26:57 crc kubenswrapper[4971]: I1127 07:26:57.515030 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="cbff8fd08ba606e2a0830aa64d2cffe84345db1c425fc1298bb5e45af4173719" exitCode=0 Nov 27 07:26:57 crc kubenswrapper[4971]: I1127 07:26:57.515066 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"cbff8fd08ba606e2a0830aa64d2cffe84345db1c425fc1298bb5e45af4173719"} Nov 27 07:26:57 crc kubenswrapper[4971]: I1127 07:26:57.515636 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"04550a7c8f01b448540cbf469ed7c45ccb50f18f8471fca2a80a53490bede4db"} Nov 27 07:26:57 crc kubenswrapper[4971]: I1127 07:26:57.515664 4971 scope.go:117] "RemoveContainer" containerID="0cad2c6a32c384b5b56d689d3a634854a392f78c534cb53e6b7da7260dd94a04" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.083043 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rvkm8"] Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.092094 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc889790-089f-4007-876f-874880dad975" containerName="barbican-api" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.092133 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc889790-089f-4007-876f-874880dad975" containerName="barbican-api" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.092158 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6439a3c-ee26-467c-8e42-5abbbf390f16" containerName="setup-container" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.092169 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6439a3c-ee26-467c-8e42-5abbbf390f16" containerName="setup-container" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.092182 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3fa1872-f7d9-4531-bc33-619419f530a5" containerName="init" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.092194 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3fa1872-f7d9-4531-bc33-619419f530a5" containerName="init" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.092209 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69d47892-79da-4e4e-8de2-a84801d4d6b9" containerName="nova-api-api" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.092217 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="69d47892-79da-4e4e-8de2-a84801d4d6b9" containerName="nova-api-api" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.092232 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e13a581-61d0-4a1f-ad42-5f2783417c70" containerName="ovsdbserver-sb" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.092241 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e13a581-61d0-4a1f-ad42-5f2783417c70" containerName="ovsdbserver-sb" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.092257 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="rsync" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.092265 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="rsync" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.092283 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbef532c-59fb-40ac-bde3-35b8f3616d85" containerName="mariadb-account-delete" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.092291 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbef532c-59fb-40ac-bde3-35b8f3616d85" containerName="mariadb-account-delete" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.092305 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c88f9c00-b02f-4070-b81e-733009e44691" containerName="placement-api" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.092314 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="c88f9c00-b02f-4070-b81e-733009e44691" containerName="placement-api" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.092324 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad" containerName="nova-cell0-conductor-conductor" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.092332 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad" containerName="nova-cell0-conductor-conductor" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.092344 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2e2055e-1200-46e8-a49e-c6b490702c9b" containerName="glance-httpd" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.092352 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2e2055e-1200-46e8-a49e-c6b490702c9b" containerName="glance-httpd" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.092368 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="account-server" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.092376 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="account-server" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.092387 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41a3ef31-df5e-4cb9-8983-40a16f46823c" containerName="sg-core" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.092396 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="41a3ef31-df5e-4cb9-8983-40a16f46823c" containerName="sg-core" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.092406 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="640c3829-d2e9-49e1-82e3-bd213aa992dd" containerName="setup-container" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.092415 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="640c3829-d2e9-49e1-82e3-bd213aa992dd" containerName="setup-container" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.092426 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1206c914-fbe7-4e8f-8470-861b0ebf75de" containerName="glance-httpd" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.092437 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="1206c914-fbe7-4e8f-8470-861b0ebf75de" containerName="glance-httpd" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.092451 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abd4a589-1b2e-4559-852f-2c27c0d8c459" containerName="openstack-network-exporter" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.092461 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="abd4a589-1b2e-4559-852f-2c27c0d8c459" containerName="openstack-network-exporter" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.092479 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="object-server" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.092490 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="object-server" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.092499 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3616559-d640-4b3b-a4b3-b9d9af1d0061" containerName="barbican-worker" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.092508 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3616559-d640-4b3b-a4b3-b9d9af1d0061" containerName="barbican-worker" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.092521 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35b90587-df5b-4f15-8c34-f1b0a8506d85" containerName="nova-metadata-log" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.092550 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="35b90587-df5b-4f15-8c34-f1b0a8506d85" containerName="nova-metadata-log" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.092562 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83ad8915-ac3b-4891-ae7a-9b862747569f" containerName="mariadb-account-delete" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.092570 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="83ad8915-ac3b-4891-ae7a-9b862747569f" containerName="mariadb-account-delete" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.092588 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="object-replicator" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.092596 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="object-replicator" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.092606 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e060bc1f-a47d-45ef-88bd-cfd0645f9ce2" containerName="ovsdbserver-nb" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.092617 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="e060bc1f-a47d-45ef-88bd-cfd0645f9ce2" containerName="ovsdbserver-nb" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.092629 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="container-auditor" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.092638 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="container-auditor" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.092651 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e060bc1f-a47d-45ef-88bd-cfd0645f9ce2" containerName="openstack-network-exporter" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.092660 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="e060bc1f-a47d-45ef-88bd-cfd0645f9ce2" containerName="openstack-network-exporter" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.092670 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c81bcd57-9f3c-4511-a6f1-ca0bbb68af0d" containerName="mariadb-account-delete" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.092678 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="c81bcd57-9f3c-4511-a6f1-ca0bbb68af0d" containerName="mariadb-account-delete" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.092693 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6754a19e-e024-4b15-8464-49e127bd35ad" containerName="probe" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.092702 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="6754a19e-e024-4b15-8464-49e127bd35ad" containerName="probe" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.092717 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcf58afd-21c6-4c9d-8702-09bc98859732" containerName="mysql-bootstrap" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.092725 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcf58afd-21c6-4c9d-8702-09bc98859732" containerName="mysql-bootstrap" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.092740 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="container-replicator" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.092749 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="container-replicator" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.092763 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19b68245-2b99-4337-892f-059f05113ad6" containerName="mariadb-account-delete" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.092773 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="19b68245-2b99-4337-892f-059f05113ad6" containerName="mariadb-account-delete" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.092787 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="622e028f-779d-4306-923c-ee204fdef6b0" containerName="neutron-api" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.092795 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="622e028f-779d-4306-923c-ee204fdef6b0" containerName="neutron-api" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.092807 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e067498-7bc0-4bc5-a9a6-696c8aa3cf71" containerName="barbican-keystone-listener-log" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.092815 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e067498-7bc0-4bc5-a9a6-696c8aa3cf71" containerName="barbican-keystone-listener-log" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.092827 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6facf3b5-48aa-4a38-823e-6b7adbbcdfee" containerName="ovs-vswitchd" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.092836 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="6facf3b5-48aa-4a38-823e-6b7adbbcdfee" containerName="ovs-vswitchd" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.092849 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e13a581-61d0-4a1f-ad42-5f2783417c70" containerName="openstack-network-exporter" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.092858 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e13a581-61d0-4a1f-ad42-5f2783417c70" containerName="openstack-network-exporter" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.092867 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19b68245-2b99-4337-892f-059f05113ad6" containerName="mariadb-account-delete" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.092874 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="19b68245-2b99-4337-892f-059f05113ad6" containerName="mariadb-account-delete" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.092891 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a0dcbed-5f66-4faf-83c2-1227bc05e9d3" containerName="mysql-bootstrap" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.092900 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a0dcbed-5f66-4faf-83c2-1227bc05e9d3" containerName="mysql-bootstrap" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.092909 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2ac1dd8-824c-482d-8c0e-47573535f172" containerName="keystone-api" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.092919 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2ac1dd8-824c-482d-8c0e-47573535f172" containerName="keystone-api" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.092935 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91f979c6-21cc-4848-9eb5-b8bfc4abf082" containerName="mariadb-account-delete" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.092946 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="91f979c6-21cc-4848-9eb5-b8bfc4abf082" containerName="mariadb-account-delete" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.092962 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="account-replicator" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.092975 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="account-replicator" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.092986 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2988a7d8-1d6b-46d8-b204-8e02d0be3b4d" containerName="cinder-api-log" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.092997 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="2988a7d8-1d6b-46d8-b204-8e02d0be3b4d" containerName="cinder-api-log" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093008 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="640c3829-d2e9-49e1-82e3-bd213aa992dd" containerName="rabbitmq" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093016 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="640c3829-d2e9-49e1-82e3-bd213aa992dd" containerName="rabbitmq" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093027 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0aa6e1b6-c18b-4a02-a396-880350cde407" containerName="ovn-controller" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093036 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="0aa6e1b6-c18b-4a02-a396-880350cde407" containerName="ovn-controller" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093045 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abd4a589-1b2e-4559-852f-2c27c0d8c459" containerName="ovn-northd" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093053 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="abd4a589-1b2e-4559-852f-2c27c0d8c459" containerName="ovn-northd" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093063 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fabf66ef-ae78-4497-998d-95abd13bbab8" containerName="extract-content" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093071 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="fabf66ef-ae78-4497-998d-95abd13bbab8" containerName="extract-content" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093087 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c88f9c00-b02f-4070-b81e-733009e44691" containerName="placement-log" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093096 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="c88f9c00-b02f-4070-b81e-733009e44691" containerName="placement-log" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093106 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="622e028f-779d-4306-923c-ee204fdef6b0" containerName="neutron-httpd" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093115 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="622e028f-779d-4306-923c-ee204fdef6b0" containerName="neutron-httpd" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093126 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82a33cc1-32fe-464f-ac33-b802fd32a4c1" containerName="proxy-httpd" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093137 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="82a33cc1-32fe-464f-ac33-b802fd32a4c1" containerName="proxy-httpd" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093148 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36cda10d-7f8f-403c-82b8-fbbdf89e8ed5" containerName="nova-scheduler-scheduler" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093157 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="36cda10d-7f8f-403c-82b8-fbbdf89e8ed5" containerName="nova-scheduler-scheduler" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093168 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6facf3b5-48aa-4a38-823e-6b7adbbcdfee" containerName="ovsdb-server" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093176 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="6facf3b5-48aa-4a38-823e-6b7adbbcdfee" containerName="ovsdb-server" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093185 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="container-updater" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093193 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="container-updater" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093208 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41a3ef31-df5e-4cb9-8983-40a16f46823c" containerName="proxy-httpd" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093216 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="41a3ef31-df5e-4cb9-8983-40a16f46823c" containerName="proxy-httpd" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093232 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea" containerName="nova-cell1-novncproxy-novncproxy" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093242 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea" containerName="nova-cell1-novncproxy-novncproxy" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093255 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6754a19e-e024-4b15-8464-49e127bd35ad" containerName="cinder-scheduler" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093263 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="6754a19e-e024-4b15-8464-49e127bd35ad" containerName="cinder-scheduler" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093274 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31d8afb0-f8c2-4d34-879f-260e94779de0" containerName="openstack-network-exporter" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093282 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="31d8afb0-f8c2-4d34-879f-260e94779de0" containerName="openstack-network-exporter" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093291 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3fa1872-f7d9-4531-bc33-619419f530a5" containerName="dnsmasq-dns" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093300 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3fa1872-f7d9-4531-bc33-619419f530a5" containerName="dnsmasq-dns" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093314 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6439a3c-ee26-467c-8e42-5abbbf390f16" containerName="rabbitmq" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093323 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6439a3c-ee26-467c-8e42-5abbbf390f16" containerName="rabbitmq" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093335 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2988a7d8-1d6b-46d8-b204-8e02d0be3b4d" containerName="cinder-api" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093343 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="2988a7d8-1d6b-46d8-b204-8e02d0be3b4d" containerName="cinder-api" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093358 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3616559-d640-4b3b-a4b3-b9d9af1d0061" containerName="barbican-worker-log" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093366 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3616559-d640-4b3b-a4b3-b9d9af1d0061" containerName="barbican-worker-log" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093377 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2b1d941-1738-4967-a97c-6a7b2c36531c" containerName="kube-state-metrics" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093384 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2b1d941-1738-4967-a97c-6a7b2c36531c" containerName="kube-state-metrics" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093396 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2e2055e-1200-46e8-a49e-c6b490702c9b" containerName="glance-log" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093404 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2e2055e-1200-46e8-a49e-c6b490702c9b" containerName="glance-log" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093414 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51e69600-7930-4ccb-a8ff-0ad3377bf3a7" containerName="mariadb-account-delete" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093422 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="51e69600-7930-4ccb-a8ff-0ad3377bf3a7" containerName="mariadb-account-delete" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093434 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e067498-7bc0-4bc5-a9a6-696c8aa3cf71" containerName="barbican-keystone-listener" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093442 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e067498-7bc0-4bc5-a9a6-696c8aa3cf71" containerName="barbican-keystone-listener" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093456 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aba3aa08-61e1-48d3-bf4b-cb45e0b71561" containerName="mariadb-account-delete" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093464 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="aba3aa08-61e1-48d3-bf4b-cb45e0b71561" containerName="mariadb-account-delete" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093478 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6833730-f034-4b5f-954a-19e993167f04" containerName="memcached" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093486 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6833730-f034-4b5f-954a-19e993167f04" containerName="memcached" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093499 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="account-reaper" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093507 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="account-reaper" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093523 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41a3ef31-df5e-4cb9-8983-40a16f46823c" containerName="ceilometer-notification-agent" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093550 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="41a3ef31-df5e-4cb9-8983-40a16f46823c" containerName="ceilometer-notification-agent" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093564 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fabf66ef-ae78-4497-998d-95abd13bbab8" containerName="registry-server" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093573 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="fabf66ef-ae78-4497-998d-95abd13bbab8" containerName="registry-server" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093586 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="object-expirer" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093595 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="object-expirer" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093605 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69d47892-79da-4e4e-8de2-a84801d4d6b9" containerName="nova-api-log" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093614 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="69d47892-79da-4e4e-8de2-a84801d4d6b9" containerName="nova-api-log" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093628 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1206c914-fbe7-4e8f-8470-861b0ebf75de" containerName="glance-log" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093636 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="1206c914-fbe7-4e8f-8470-861b0ebf75de" containerName="glance-log" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093650 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="account-auditor" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093659 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="account-auditor" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093670 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41a3ef31-df5e-4cb9-8983-40a16f46823c" containerName="ceilometer-central-agent" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093679 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="41a3ef31-df5e-4cb9-8983-40a16f46823c" containerName="ceilometer-central-agent" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093689 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="swift-recon-cron" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093698 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="swift-recon-cron" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093706 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82a33cc1-32fe-464f-ac33-b802fd32a4c1" containerName="proxy-server" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093716 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="82a33cc1-32fe-464f-ac33-b802fd32a4c1" containerName="proxy-server" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093731 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a0dcbed-5f66-4faf-83c2-1227bc05e9d3" containerName="galera" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093740 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a0dcbed-5f66-4faf-83c2-1227bc05e9d3" containerName="galera" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093753 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcf58afd-21c6-4c9d-8702-09bc98859732" containerName="galera" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093761 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcf58afd-21c6-4c9d-8702-09bc98859732" containerName="galera" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093776 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f461fed-9df2-44a5-b99c-17f30adf0d9c" containerName="nova-cell1-conductor-conductor" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093785 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f461fed-9df2-44a5-b99c-17f30adf0d9c" containerName="nova-cell1-conductor-conductor" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093799 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="container-server" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093807 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="container-server" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093817 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35b90587-df5b-4f15-8c34-f1b0a8506d85" containerName="nova-metadata-metadata" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093826 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="35b90587-df5b-4f15-8c34-f1b0a8506d85" containerName="nova-metadata-metadata" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093838 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="object-auditor" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093846 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="object-auditor" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093857 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fabf66ef-ae78-4497-998d-95abd13bbab8" containerName="extract-utilities" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093869 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="fabf66ef-ae78-4497-998d-95abd13bbab8" containerName="extract-utilities" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093880 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="object-updater" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093888 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="object-updater" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093900 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc889790-089f-4007-876f-874880dad975" containerName="barbican-api-log" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093911 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc889790-089f-4007-876f-874880dad975" containerName="barbican-api-log" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.093925 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6facf3b5-48aa-4a38-823e-6b7adbbcdfee" containerName="ovsdb-server-init" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.093934 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="6facf3b5-48aa-4a38-823e-6b7adbbcdfee" containerName="ovsdb-server-init" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094153 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="e060bc1f-a47d-45ef-88bd-cfd0645f9ce2" containerName="openstack-network-exporter" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094166 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="account-server" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094179 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="container-updater" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094193 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="41a3ef31-df5e-4cb9-8983-40a16f46823c" containerName="ceilometer-notification-agent" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094204 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="83ad8915-ac3b-4891-ae7a-9b862747569f" containerName="mariadb-account-delete" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094216 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="622e028f-779d-4306-923c-ee204fdef6b0" containerName="neutron-httpd" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094228 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2e2055e-1200-46e8-a49e-c6b490702c9b" containerName="glance-log" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094241 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc889790-089f-4007-876f-874880dad975" containerName="barbican-api-log" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094254 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="object-auditor" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094265 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="82a33cc1-32fe-464f-ac33-b802fd32a4c1" containerName="proxy-server" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094280 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="36cda10d-7f8f-403c-82b8-fbbdf89e8ed5" containerName="nova-scheduler-scheduler" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094294 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f461fed-9df2-44a5-b99c-17f30adf0d9c" containerName="nova-cell1-conductor-conductor" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094306 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="c88f9c00-b02f-4070-b81e-733009e44691" containerName="placement-log" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094320 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e13a581-61d0-4a1f-ad42-5f2783417c70" containerName="ovsdbserver-sb" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094335 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="object-server" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094347 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2ac1dd8-824c-482d-8c0e-47573535f172" containerName="keystone-api" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094359 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="35b90587-df5b-4f15-8c34-f1b0a8506d85" containerName="nova-metadata-metadata" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094369 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e067498-7bc0-4bc5-a9a6-696c8aa3cf71" containerName="barbican-keystone-listener-log" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094383 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="2988a7d8-1d6b-46d8-b204-8e02d0be3b4d" containerName="cinder-api" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094392 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="swift-recon-cron" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094402 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="69d47892-79da-4e4e-8de2-a84801d4d6b9" containerName="nova-api-log" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094411 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="6facf3b5-48aa-4a38-823e-6b7adbbcdfee" containerName="ovs-vswitchd" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094424 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="account-reaper" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094438 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="82a33cc1-32fe-464f-ac33-b802fd32a4c1" containerName="proxy-httpd" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094450 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="fabf66ef-ae78-4497-998d-95abd13bbab8" containerName="registry-server" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094465 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="35b90587-df5b-4f15-8c34-f1b0a8506d85" containerName="nova-metadata-log" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094476 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="41a3ef31-df5e-4cb9-8983-40a16f46823c" containerName="ceilometer-central-agent" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094488 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="container-server" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094500 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="container-replicator" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094515 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6833730-f034-4b5f-954a-19e993167f04" containerName="memcached" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094525 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="account-replicator" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094557 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3616559-d640-4b3b-a4b3-b9d9af1d0061" containerName="barbican-worker" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094567 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="1206c914-fbe7-4e8f-8470-861b0ebf75de" containerName="glance-log" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094580 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="31d8afb0-f8c2-4d34-879f-260e94779de0" containerName="openstack-network-exporter" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094594 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="640c3829-d2e9-49e1-82e3-bd213aa992dd" containerName="rabbitmq" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094605 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="19b68245-2b99-4337-892f-059f05113ad6" containerName="mariadb-account-delete" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094615 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc889790-089f-4007-876f-874880dad975" containerName="barbican-api" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094626 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6439a3c-ee26-467c-8e42-5abbbf390f16" containerName="rabbitmq" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094639 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a0dcbed-5f66-4faf-83c2-1227bc05e9d3" containerName="galera" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094647 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="0aa6e1b6-c18b-4a02-a396-880350cde407" containerName="ovn-controller" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094656 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="object-expirer" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094665 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="6754a19e-e024-4b15-8464-49e127bd35ad" containerName="cinder-scheduler" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094675 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="51e69600-7930-4ccb-a8ff-0ad3377bf3a7" containerName="mariadb-account-delete" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094683 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="91f979c6-21cc-4848-9eb5-b8bfc4abf082" containerName="mariadb-account-delete" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094697 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="6facf3b5-48aa-4a38-823e-6b7adbbcdfee" containerName="ovsdb-server" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094709 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3616559-d640-4b3b-a4b3-b9d9af1d0061" containerName="barbican-worker-log" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094718 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e13a581-61d0-4a1f-ad42-5f2783417c70" containerName="openstack-network-exporter" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094726 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="622e028f-779d-4306-923c-ee204fdef6b0" containerName="neutron-api" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094737 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbef532c-59fb-40ac-bde3-35b8f3616d85" containerName="mariadb-account-delete" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094784 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="rsync" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094796 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="41a3ef31-df5e-4cb9-8983-40a16f46823c" containerName="sg-core" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094809 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="69d47892-79da-4e4e-8de2-a84801d4d6b9" containerName="nova-api-api" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094822 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="abd4a589-1b2e-4559-852f-2c27c0d8c459" containerName="openstack-network-exporter" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094832 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="c88f9c00-b02f-4070-b81e-733009e44691" containerName="placement-api" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094840 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="2988a7d8-1d6b-46d8-b204-8e02d0be3b4d" containerName="cinder-api-log" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094852 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="bcf58afd-21c6-4c9d-8702-09bc98859732" containerName="galera" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094863 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e067498-7bc0-4bc5-a9a6-696c8aa3cf71" containerName="barbican-keystone-listener" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094871 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="account-auditor" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094881 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="41a3ef31-df5e-4cb9-8983-40a16f46823c" containerName="proxy-httpd" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094891 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="c81bcd57-9f3c-4511-a6f1-ca0bbb68af0d" containerName="mariadb-account-delete" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094901 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="1206c914-fbe7-4e8f-8470-861b0ebf75de" containerName="glance-httpd" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094915 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="bff7fe8c-c2fb-48e7-bfbf-440ef8ff2aad" containerName="nova-cell0-conductor-conductor" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094929 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="object-updater" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094938 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2e2055e-1200-46e8-a49e-c6b490702c9b" containerName="glance-httpd" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094951 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="e060bc1f-a47d-45ef-88bd-cfd0645f9ce2" containerName="ovsdbserver-nb" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094961 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="abd4a589-1b2e-4559-852f-2c27c0d8c459" containerName="ovn-northd" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094974 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="aba3aa08-61e1-48d3-bf4b-cb45e0b71561" containerName="mariadb-account-delete" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094985 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="6754a19e-e024-4b15-8464-49e127bd35ad" containerName="probe" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.094998 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2b1d941-1738-4967-a97c-6a7b2c36531c" containerName="kube-state-metrics" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.095009 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa693ecf-e3d2-4a1b-9bd0-df1e508e3bea" containerName="nova-cell1-novncproxy-novncproxy" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.095022 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="object-replicator" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.095031 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9ccc9bd-d955-4853-986f-95597f2c70e6" containerName="container-auditor" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.095039 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbef532c-59fb-40ac-bde3-35b8f3616d85" containerName="mariadb-account-delete" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.095047 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3fa1872-f7d9-4531-bc33-619419f530a5" containerName="dnsmasq-dns" Nov 27 07:27:57 crc kubenswrapper[4971]: E1127 07:27:57.095239 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbef532c-59fb-40ac-bde3-35b8f3616d85" containerName="mariadb-account-delete" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.095250 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbef532c-59fb-40ac-bde3-35b8f3616d85" containerName="mariadb-account-delete" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.095447 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="19b68245-2b99-4337-892f-059f05113ad6" containerName="mariadb-account-delete" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.096520 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rvkm8" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.102641 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rvkm8"] Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.142868 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28d80de1-2e08-4fcc-909d-0a9fa46dcb9f-catalog-content\") pod \"redhat-operators-rvkm8\" (UID: \"28d80de1-2e08-4fcc-909d-0a9fa46dcb9f\") " pod="openshift-marketplace/redhat-operators-rvkm8" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.143001 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r58m5\" (UniqueName: \"kubernetes.io/projected/28d80de1-2e08-4fcc-909d-0a9fa46dcb9f-kube-api-access-r58m5\") pod \"redhat-operators-rvkm8\" (UID: \"28d80de1-2e08-4fcc-909d-0a9fa46dcb9f\") " pod="openshift-marketplace/redhat-operators-rvkm8" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.143043 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28d80de1-2e08-4fcc-909d-0a9fa46dcb9f-utilities\") pod \"redhat-operators-rvkm8\" (UID: \"28d80de1-2e08-4fcc-909d-0a9fa46dcb9f\") " pod="openshift-marketplace/redhat-operators-rvkm8" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.244144 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r58m5\" (UniqueName: \"kubernetes.io/projected/28d80de1-2e08-4fcc-909d-0a9fa46dcb9f-kube-api-access-r58m5\") pod \"redhat-operators-rvkm8\" (UID: \"28d80de1-2e08-4fcc-909d-0a9fa46dcb9f\") " pod="openshift-marketplace/redhat-operators-rvkm8" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.244213 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28d80de1-2e08-4fcc-909d-0a9fa46dcb9f-utilities\") pod \"redhat-operators-rvkm8\" (UID: \"28d80de1-2e08-4fcc-909d-0a9fa46dcb9f\") " pod="openshift-marketplace/redhat-operators-rvkm8" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.244314 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28d80de1-2e08-4fcc-909d-0a9fa46dcb9f-catalog-content\") pod \"redhat-operators-rvkm8\" (UID: \"28d80de1-2e08-4fcc-909d-0a9fa46dcb9f\") " pod="openshift-marketplace/redhat-operators-rvkm8" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.244949 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28d80de1-2e08-4fcc-909d-0a9fa46dcb9f-catalog-content\") pod \"redhat-operators-rvkm8\" (UID: \"28d80de1-2e08-4fcc-909d-0a9fa46dcb9f\") " pod="openshift-marketplace/redhat-operators-rvkm8" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.245812 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28d80de1-2e08-4fcc-909d-0a9fa46dcb9f-utilities\") pod \"redhat-operators-rvkm8\" (UID: \"28d80de1-2e08-4fcc-909d-0a9fa46dcb9f\") " pod="openshift-marketplace/redhat-operators-rvkm8" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.265937 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r58m5\" (UniqueName: \"kubernetes.io/projected/28d80de1-2e08-4fcc-909d-0a9fa46dcb9f-kube-api-access-r58m5\") pod \"redhat-operators-rvkm8\" (UID: \"28d80de1-2e08-4fcc-909d-0a9fa46dcb9f\") " pod="openshift-marketplace/redhat-operators-rvkm8" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.431624 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rvkm8" Nov 27 07:27:57 crc kubenswrapper[4971]: I1127 07:27:57.669812 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rvkm8"] Nov 27 07:27:58 crc kubenswrapper[4971]: I1127 07:27:58.052709 4971 generic.go:334] "Generic (PLEG): container finished" podID="28d80de1-2e08-4fcc-909d-0a9fa46dcb9f" containerID="755b78d164b0c8a31da9153d6aa2dee896c122027c8432517678559abca6de5f" exitCode=0 Nov 27 07:27:58 crc kubenswrapper[4971]: I1127 07:27:58.053187 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rvkm8" event={"ID":"28d80de1-2e08-4fcc-909d-0a9fa46dcb9f","Type":"ContainerDied","Data":"755b78d164b0c8a31da9153d6aa2dee896c122027c8432517678559abca6de5f"} Nov 27 07:27:58 crc kubenswrapper[4971]: I1127 07:27:58.053227 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rvkm8" event={"ID":"28d80de1-2e08-4fcc-909d-0a9fa46dcb9f","Type":"ContainerStarted","Data":"adb05204e1b827026607060e4b3f77b960f5ec535d346ec567e5d9135c4201dc"} Nov 27 07:27:58 crc kubenswrapper[4971]: I1127 07:27:58.056175 4971 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 27 07:27:59 crc kubenswrapper[4971]: I1127 07:27:59.089084 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rvkm8" event={"ID":"28d80de1-2e08-4fcc-909d-0a9fa46dcb9f","Type":"ContainerStarted","Data":"c54d9ebe53aaaae7c92facb0ac0b77175ec74301700e32915e1a0707a31bb19b"} Nov 27 07:28:00 crc kubenswrapper[4971]: I1127 07:28:00.099883 4971 generic.go:334] "Generic (PLEG): container finished" podID="28d80de1-2e08-4fcc-909d-0a9fa46dcb9f" containerID="c54d9ebe53aaaae7c92facb0ac0b77175ec74301700e32915e1a0707a31bb19b" exitCode=0 Nov 27 07:28:00 crc kubenswrapper[4971]: I1127 07:28:00.099931 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rvkm8" event={"ID":"28d80de1-2e08-4fcc-909d-0a9fa46dcb9f","Type":"ContainerDied","Data":"c54d9ebe53aaaae7c92facb0ac0b77175ec74301700e32915e1a0707a31bb19b"} Nov 27 07:28:01 crc kubenswrapper[4971]: I1127 07:28:01.107971 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rvkm8" event={"ID":"28d80de1-2e08-4fcc-909d-0a9fa46dcb9f","Type":"ContainerStarted","Data":"8adffb8969b641cbf6c5d74ac934fa760d8d972880989ac883c199ac0c5a15be"} Nov 27 07:28:01 crc kubenswrapper[4971]: I1127 07:28:01.125043 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rvkm8" podStartSLOduration=1.5435942520000001 podStartE2EDuration="4.125021329s" podCreationTimestamp="2025-11-27 07:27:57 +0000 UTC" firstStartedPulling="2025-11-27 07:27:58.055868475 +0000 UTC m=+2116.247912393" lastFinishedPulling="2025-11-27 07:28:00.637295512 +0000 UTC m=+2118.829339470" observedRunningTime="2025-11-27 07:28:01.124251367 +0000 UTC m=+2119.316295285" watchObservedRunningTime="2025-11-27 07:28:01.125021329 +0000 UTC m=+2119.317065247" Nov 27 07:28:02 crc kubenswrapper[4971]: I1127 07:28:02.848388 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-8l8gh"] Nov 27 07:28:02 crc kubenswrapper[4971]: I1127 07:28:02.851309 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8l8gh" Nov 27 07:28:02 crc kubenswrapper[4971]: I1127 07:28:02.884050 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8l8gh"] Nov 27 07:28:03 crc kubenswrapper[4971]: I1127 07:28:03.049375 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45c5z\" (UniqueName: \"kubernetes.io/projected/2677e996-721b-4cff-8b1d-fb927f04e0e6-kube-api-access-45c5z\") pod \"community-operators-8l8gh\" (UID: \"2677e996-721b-4cff-8b1d-fb927f04e0e6\") " pod="openshift-marketplace/community-operators-8l8gh" Nov 27 07:28:03 crc kubenswrapper[4971]: I1127 07:28:03.049486 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2677e996-721b-4cff-8b1d-fb927f04e0e6-catalog-content\") pod \"community-operators-8l8gh\" (UID: \"2677e996-721b-4cff-8b1d-fb927f04e0e6\") " pod="openshift-marketplace/community-operators-8l8gh" Nov 27 07:28:03 crc kubenswrapper[4971]: I1127 07:28:03.049551 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2677e996-721b-4cff-8b1d-fb927f04e0e6-utilities\") pod \"community-operators-8l8gh\" (UID: \"2677e996-721b-4cff-8b1d-fb927f04e0e6\") " pod="openshift-marketplace/community-operators-8l8gh" Nov 27 07:28:03 crc kubenswrapper[4971]: I1127 07:28:03.151331 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45c5z\" (UniqueName: \"kubernetes.io/projected/2677e996-721b-4cff-8b1d-fb927f04e0e6-kube-api-access-45c5z\") pod \"community-operators-8l8gh\" (UID: \"2677e996-721b-4cff-8b1d-fb927f04e0e6\") " pod="openshift-marketplace/community-operators-8l8gh" Nov 27 07:28:03 crc kubenswrapper[4971]: I1127 07:28:03.151418 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2677e996-721b-4cff-8b1d-fb927f04e0e6-catalog-content\") pod \"community-operators-8l8gh\" (UID: \"2677e996-721b-4cff-8b1d-fb927f04e0e6\") " pod="openshift-marketplace/community-operators-8l8gh" Nov 27 07:28:03 crc kubenswrapper[4971]: I1127 07:28:03.151466 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2677e996-721b-4cff-8b1d-fb927f04e0e6-utilities\") pod \"community-operators-8l8gh\" (UID: \"2677e996-721b-4cff-8b1d-fb927f04e0e6\") " pod="openshift-marketplace/community-operators-8l8gh" Nov 27 07:28:03 crc kubenswrapper[4971]: I1127 07:28:03.152114 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2677e996-721b-4cff-8b1d-fb927f04e0e6-utilities\") pod \"community-operators-8l8gh\" (UID: \"2677e996-721b-4cff-8b1d-fb927f04e0e6\") " pod="openshift-marketplace/community-operators-8l8gh" Nov 27 07:28:03 crc kubenswrapper[4971]: I1127 07:28:03.152258 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2677e996-721b-4cff-8b1d-fb927f04e0e6-catalog-content\") pod \"community-operators-8l8gh\" (UID: \"2677e996-721b-4cff-8b1d-fb927f04e0e6\") " pod="openshift-marketplace/community-operators-8l8gh" Nov 27 07:28:03 crc kubenswrapper[4971]: I1127 07:28:03.174310 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45c5z\" (UniqueName: \"kubernetes.io/projected/2677e996-721b-4cff-8b1d-fb927f04e0e6-kube-api-access-45c5z\") pod \"community-operators-8l8gh\" (UID: \"2677e996-721b-4cff-8b1d-fb927f04e0e6\") " pod="openshift-marketplace/community-operators-8l8gh" Nov 27 07:28:03 crc kubenswrapper[4971]: I1127 07:28:03.183907 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8l8gh" Nov 27 07:28:03 crc kubenswrapper[4971]: I1127 07:28:03.699482 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8l8gh"] Nov 27 07:28:03 crc kubenswrapper[4971]: W1127 07:28:03.707740 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2677e996_721b_4cff_8b1d_fb927f04e0e6.slice/crio-bd6e4d3990d6f559340f5147aa558fe2cc0264d61058b7b354da52d71cd4e969 WatchSource:0}: Error finding container bd6e4d3990d6f559340f5147aa558fe2cc0264d61058b7b354da52d71cd4e969: Status 404 returned error can't find the container with id bd6e4d3990d6f559340f5147aa558fe2cc0264d61058b7b354da52d71cd4e969 Nov 27 07:28:04 crc kubenswrapper[4971]: I1127 07:28:04.133168 4971 generic.go:334] "Generic (PLEG): container finished" podID="2677e996-721b-4cff-8b1d-fb927f04e0e6" containerID="44b5c54883d9a38de4c097d2983e3bfb9550c0e84162a5078a5b68a6d74a5750" exitCode=0 Nov 27 07:28:04 crc kubenswrapper[4971]: I1127 07:28:04.133378 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8l8gh" event={"ID":"2677e996-721b-4cff-8b1d-fb927f04e0e6","Type":"ContainerDied","Data":"44b5c54883d9a38de4c097d2983e3bfb9550c0e84162a5078a5b68a6d74a5750"} Nov 27 07:28:04 crc kubenswrapper[4971]: I1127 07:28:04.133460 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8l8gh" event={"ID":"2677e996-721b-4cff-8b1d-fb927f04e0e6","Type":"ContainerStarted","Data":"bd6e4d3990d6f559340f5147aa558fe2cc0264d61058b7b354da52d71cd4e969"} Nov 27 07:28:05 crc kubenswrapper[4971]: I1127 07:28:05.457302 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-n8p7d"] Nov 27 07:28:05 crc kubenswrapper[4971]: I1127 07:28:05.459209 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n8p7d" Nov 27 07:28:05 crc kubenswrapper[4971]: I1127 07:28:05.468861 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-n8p7d"] Nov 27 07:28:05 crc kubenswrapper[4971]: I1127 07:28:05.588137 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5cf7\" (UniqueName: \"kubernetes.io/projected/d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c-kube-api-access-r5cf7\") pod \"redhat-marketplace-n8p7d\" (UID: \"d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c\") " pod="openshift-marketplace/redhat-marketplace-n8p7d" Nov 27 07:28:05 crc kubenswrapper[4971]: I1127 07:28:05.588208 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c-catalog-content\") pod \"redhat-marketplace-n8p7d\" (UID: \"d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c\") " pod="openshift-marketplace/redhat-marketplace-n8p7d" Nov 27 07:28:05 crc kubenswrapper[4971]: I1127 07:28:05.588246 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c-utilities\") pod \"redhat-marketplace-n8p7d\" (UID: \"d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c\") " pod="openshift-marketplace/redhat-marketplace-n8p7d" Nov 27 07:28:05 crc kubenswrapper[4971]: I1127 07:28:05.689397 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5cf7\" (UniqueName: \"kubernetes.io/projected/d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c-kube-api-access-r5cf7\") pod \"redhat-marketplace-n8p7d\" (UID: \"d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c\") " pod="openshift-marketplace/redhat-marketplace-n8p7d" Nov 27 07:28:05 crc kubenswrapper[4971]: I1127 07:28:05.689489 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c-catalog-content\") pod \"redhat-marketplace-n8p7d\" (UID: \"d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c\") " pod="openshift-marketplace/redhat-marketplace-n8p7d" Nov 27 07:28:05 crc kubenswrapper[4971]: I1127 07:28:05.689559 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c-utilities\") pod \"redhat-marketplace-n8p7d\" (UID: \"d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c\") " pod="openshift-marketplace/redhat-marketplace-n8p7d" Nov 27 07:28:05 crc kubenswrapper[4971]: I1127 07:28:05.690562 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c-utilities\") pod \"redhat-marketplace-n8p7d\" (UID: \"d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c\") " pod="openshift-marketplace/redhat-marketplace-n8p7d" Nov 27 07:28:05 crc kubenswrapper[4971]: I1127 07:28:05.690777 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c-catalog-content\") pod \"redhat-marketplace-n8p7d\" (UID: \"d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c\") " pod="openshift-marketplace/redhat-marketplace-n8p7d" Nov 27 07:28:05 crc kubenswrapper[4971]: I1127 07:28:05.712110 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5cf7\" (UniqueName: \"kubernetes.io/projected/d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c-kube-api-access-r5cf7\") pod \"redhat-marketplace-n8p7d\" (UID: \"d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c\") " pod="openshift-marketplace/redhat-marketplace-n8p7d" Nov 27 07:28:05 crc kubenswrapper[4971]: I1127 07:28:05.789941 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n8p7d" Nov 27 07:28:06 crc kubenswrapper[4971]: I1127 07:28:06.062931 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-n8p7d"] Nov 27 07:28:06 crc kubenswrapper[4971]: I1127 07:28:06.154875 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n8p7d" event={"ID":"d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c","Type":"ContainerStarted","Data":"81d82183de14731fd82eeceb3c9c2601263f90745a02a3398f63639c466a1646"} Nov 27 07:28:07 crc kubenswrapper[4971]: I1127 07:28:07.167162 4971 generic.go:334] "Generic (PLEG): container finished" podID="d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c" containerID="22d38e24ed33ec9fbc9e4f88515fc35c5d78f51fa3c5e39609b4f8c68b632a1d" exitCode=0 Nov 27 07:28:07 crc kubenswrapper[4971]: I1127 07:28:07.167686 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n8p7d" event={"ID":"d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c","Type":"ContainerDied","Data":"22d38e24ed33ec9fbc9e4f88515fc35c5d78f51fa3c5e39609b4f8c68b632a1d"} Nov 27 07:28:07 crc kubenswrapper[4971]: I1127 07:28:07.432297 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rvkm8" Nov 27 07:28:07 crc kubenswrapper[4971]: I1127 07:28:07.432402 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rvkm8" Nov 27 07:28:07 crc kubenswrapper[4971]: I1127 07:28:07.503665 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rvkm8" Nov 27 07:28:08 crc kubenswrapper[4971]: I1127 07:28:08.234486 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rvkm8" Nov 27 07:28:09 crc kubenswrapper[4971]: I1127 07:28:09.188137 4971 generic.go:334] "Generic (PLEG): container finished" podID="2677e996-721b-4cff-8b1d-fb927f04e0e6" containerID="3e0b531794aa8b2befb75c357bb1e969cbbcda544fcdd74ccfee9d6c864914a9" exitCode=0 Nov 27 07:28:09 crc kubenswrapper[4971]: I1127 07:28:09.188254 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8l8gh" event={"ID":"2677e996-721b-4cff-8b1d-fb927f04e0e6","Type":"ContainerDied","Data":"3e0b531794aa8b2befb75c357bb1e969cbbcda544fcdd74ccfee9d6c864914a9"} Nov 27 07:28:10 crc kubenswrapper[4971]: I1127 07:28:10.197482 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n8p7d" event={"ID":"d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c","Type":"ContainerStarted","Data":"f0dde287b1f00dacae568e9b271a8f733ea94dc8b8673b047bb523ca8012204b"} Nov 27 07:28:10 crc kubenswrapper[4971]: I1127 07:28:10.200651 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8l8gh" event={"ID":"2677e996-721b-4cff-8b1d-fb927f04e0e6","Type":"ContainerStarted","Data":"d5690977889e4e6c9fdea28582e602212cf5e82b6a236a69eed87be7d11fa832"} Nov 27 07:28:10 crc kubenswrapper[4971]: I1127 07:28:10.257873 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-8l8gh" podStartSLOduration=2.733945624 podStartE2EDuration="8.257846501s" podCreationTimestamp="2025-11-27 07:28:02 +0000 UTC" firstStartedPulling="2025-11-27 07:28:04.135171063 +0000 UTC m=+2122.327215001" lastFinishedPulling="2025-11-27 07:28:09.65907194 +0000 UTC m=+2127.851115878" observedRunningTime="2025-11-27 07:28:10.250660417 +0000 UTC m=+2128.442704335" watchObservedRunningTime="2025-11-27 07:28:10.257846501 +0000 UTC m=+2128.449890419" Nov 27 07:28:10 crc kubenswrapper[4971]: I1127 07:28:10.838591 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rvkm8"] Nov 27 07:28:10 crc kubenswrapper[4971]: I1127 07:28:10.839174 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rvkm8" podUID="28d80de1-2e08-4fcc-909d-0a9fa46dcb9f" containerName="registry-server" containerID="cri-o://8adffb8969b641cbf6c5d74ac934fa760d8d972880989ac883c199ac0c5a15be" gracePeriod=2 Nov 27 07:28:11 crc kubenswrapper[4971]: I1127 07:28:11.228652 4971 generic.go:334] "Generic (PLEG): container finished" podID="d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c" containerID="f0dde287b1f00dacae568e9b271a8f733ea94dc8b8673b047bb523ca8012204b" exitCode=0 Nov 27 07:28:11 crc kubenswrapper[4971]: I1127 07:28:11.228739 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n8p7d" event={"ID":"d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c","Type":"ContainerDied","Data":"f0dde287b1f00dacae568e9b271a8f733ea94dc8b8673b047bb523ca8012204b"} Nov 27 07:28:11 crc kubenswrapper[4971]: I1127 07:28:11.232562 4971 generic.go:334] "Generic (PLEG): container finished" podID="28d80de1-2e08-4fcc-909d-0a9fa46dcb9f" containerID="8adffb8969b641cbf6c5d74ac934fa760d8d972880989ac883c199ac0c5a15be" exitCode=0 Nov 27 07:28:11 crc kubenswrapper[4971]: I1127 07:28:11.232644 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rvkm8" event={"ID":"28d80de1-2e08-4fcc-909d-0a9fa46dcb9f","Type":"ContainerDied","Data":"8adffb8969b641cbf6c5d74ac934fa760d8d972880989ac883c199ac0c5a15be"} Nov 27 07:28:11 crc kubenswrapper[4971]: I1127 07:28:11.354025 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rvkm8" Nov 27 07:28:11 crc kubenswrapper[4971]: I1127 07:28:11.405360 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28d80de1-2e08-4fcc-909d-0a9fa46dcb9f-catalog-content\") pod \"28d80de1-2e08-4fcc-909d-0a9fa46dcb9f\" (UID: \"28d80de1-2e08-4fcc-909d-0a9fa46dcb9f\") " Nov 27 07:28:11 crc kubenswrapper[4971]: I1127 07:28:11.405578 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r58m5\" (UniqueName: \"kubernetes.io/projected/28d80de1-2e08-4fcc-909d-0a9fa46dcb9f-kube-api-access-r58m5\") pod \"28d80de1-2e08-4fcc-909d-0a9fa46dcb9f\" (UID: \"28d80de1-2e08-4fcc-909d-0a9fa46dcb9f\") " Nov 27 07:28:11 crc kubenswrapper[4971]: I1127 07:28:11.405764 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28d80de1-2e08-4fcc-909d-0a9fa46dcb9f-utilities\") pod \"28d80de1-2e08-4fcc-909d-0a9fa46dcb9f\" (UID: \"28d80de1-2e08-4fcc-909d-0a9fa46dcb9f\") " Nov 27 07:28:11 crc kubenswrapper[4971]: I1127 07:28:11.407286 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/28d80de1-2e08-4fcc-909d-0a9fa46dcb9f-utilities" (OuterVolumeSpecName: "utilities") pod "28d80de1-2e08-4fcc-909d-0a9fa46dcb9f" (UID: "28d80de1-2e08-4fcc-909d-0a9fa46dcb9f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:28:11 crc kubenswrapper[4971]: I1127 07:28:11.413292 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28d80de1-2e08-4fcc-909d-0a9fa46dcb9f-kube-api-access-r58m5" (OuterVolumeSpecName: "kube-api-access-r58m5") pod "28d80de1-2e08-4fcc-909d-0a9fa46dcb9f" (UID: "28d80de1-2e08-4fcc-909d-0a9fa46dcb9f"). InnerVolumeSpecName "kube-api-access-r58m5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:28:11 crc kubenswrapper[4971]: I1127 07:28:11.508239 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r58m5\" (UniqueName: \"kubernetes.io/projected/28d80de1-2e08-4fcc-909d-0a9fa46dcb9f-kube-api-access-r58m5\") on node \"crc\" DevicePath \"\"" Nov 27 07:28:11 crc kubenswrapper[4971]: I1127 07:28:11.508508 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28d80de1-2e08-4fcc-909d-0a9fa46dcb9f-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 07:28:11 crc kubenswrapper[4971]: I1127 07:28:11.535305 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/28d80de1-2e08-4fcc-909d-0a9fa46dcb9f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "28d80de1-2e08-4fcc-909d-0a9fa46dcb9f" (UID: "28d80de1-2e08-4fcc-909d-0a9fa46dcb9f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:28:11 crc kubenswrapper[4971]: I1127 07:28:11.610375 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28d80de1-2e08-4fcc-909d-0a9fa46dcb9f-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 07:28:12 crc kubenswrapper[4971]: I1127 07:28:12.245220 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n8p7d" event={"ID":"d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c","Type":"ContainerStarted","Data":"372e636f2749d4b9543f04628718adfb759d46876608ebdc84d0267ff9ec61eb"} Nov 27 07:28:12 crc kubenswrapper[4971]: I1127 07:28:12.248463 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rvkm8" event={"ID":"28d80de1-2e08-4fcc-909d-0a9fa46dcb9f","Type":"ContainerDied","Data":"adb05204e1b827026607060e4b3f77b960f5ec535d346ec567e5d9135c4201dc"} Nov 27 07:28:12 crc kubenswrapper[4971]: I1127 07:28:12.248568 4971 scope.go:117] "RemoveContainer" containerID="8adffb8969b641cbf6c5d74ac934fa760d8d972880989ac883c199ac0c5a15be" Nov 27 07:28:12 crc kubenswrapper[4971]: I1127 07:28:12.248720 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rvkm8" Nov 27 07:28:12 crc kubenswrapper[4971]: I1127 07:28:12.268743 4971 scope.go:117] "RemoveContainer" containerID="c54d9ebe53aaaae7c92facb0ac0b77175ec74301700e32915e1a0707a31bb19b" Nov 27 07:28:12 crc kubenswrapper[4971]: I1127 07:28:12.270926 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-n8p7d" podStartSLOduration=4.005535519 podStartE2EDuration="7.270907938s" podCreationTimestamp="2025-11-27 07:28:05 +0000 UTC" firstStartedPulling="2025-11-27 07:28:08.492878696 +0000 UTC m=+2126.684922614" lastFinishedPulling="2025-11-27 07:28:11.758251115 +0000 UTC m=+2129.950295033" observedRunningTime="2025-11-27 07:28:12.266233486 +0000 UTC m=+2130.458277434" watchObservedRunningTime="2025-11-27 07:28:12.270907938 +0000 UTC m=+2130.462951856" Nov 27 07:28:12 crc kubenswrapper[4971]: I1127 07:28:12.289731 4971 scope.go:117] "RemoveContainer" containerID="755b78d164b0c8a31da9153d6aa2dee896c122027c8432517678559abca6de5f" Nov 27 07:28:12 crc kubenswrapper[4971]: I1127 07:28:12.292201 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rvkm8"] Nov 27 07:28:12 crc kubenswrapper[4971]: I1127 07:28:12.298046 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rvkm8"] Nov 27 07:28:12 crc kubenswrapper[4971]: I1127 07:28:12.559795 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28d80de1-2e08-4fcc-909d-0a9fa46dcb9f" path="/var/lib/kubelet/pods/28d80de1-2e08-4fcc-909d-0a9fa46dcb9f/volumes" Nov 27 07:28:13 crc kubenswrapper[4971]: I1127 07:28:13.184592 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-8l8gh" Nov 27 07:28:13 crc kubenswrapper[4971]: I1127 07:28:13.184701 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-8l8gh" Nov 27 07:28:13 crc kubenswrapper[4971]: I1127 07:28:13.240665 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-8l8gh" Nov 27 07:28:15 crc kubenswrapper[4971]: I1127 07:28:15.790456 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-n8p7d" Nov 27 07:28:15 crc kubenswrapper[4971]: I1127 07:28:15.790520 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-n8p7d" Nov 27 07:28:15 crc kubenswrapper[4971]: I1127 07:28:15.840322 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-n8p7d" Nov 27 07:28:16 crc kubenswrapper[4971]: I1127 07:28:16.357291 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-n8p7d" Nov 27 07:28:18 crc kubenswrapper[4971]: I1127 07:28:18.441279 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-n8p7d"] Nov 27 07:28:18 crc kubenswrapper[4971]: I1127 07:28:18.441619 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-n8p7d" podUID="d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c" containerName="registry-server" containerID="cri-o://372e636f2749d4b9543f04628718adfb759d46876608ebdc84d0267ff9ec61eb" gracePeriod=2 Nov 27 07:28:18 crc kubenswrapper[4971]: I1127 07:28:18.901732 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n8p7d" Nov 27 07:28:19 crc kubenswrapper[4971]: I1127 07:28:19.025966 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r5cf7\" (UniqueName: \"kubernetes.io/projected/d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c-kube-api-access-r5cf7\") pod \"d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c\" (UID: \"d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c\") " Nov 27 07:28:19 crc kubenswrapper[4971]: I1127 07:28:19.026111 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c-utilities\") pod \"d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c\" (UID: \"d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c\") " Nov 27 07:28:19 crc kubenswrapper[4971]: I1127 07:28:19.026287 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c-catalog-content\") pod \"d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c\" (UID: \"d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c\") " Nov 27 07:28:19 crc kubenswrapper[4971]: I1127 07:28:19.027831 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c-utilities" (OuterVolumeSpecName: "utilities") pod "d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c" (UID: "d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:28:19 crc kubenswrapper[4971]: I1127 07:28:19.031782 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c-kube-api-access-r5cf7" (OuterVolumeSpecName: "kube-api-access-r5cf7") pod "d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c" (UID: "d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c"). InnerVolumeSpecName "kube-api-access-r5cf7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:28:19 crc kubenswrapper[4971]: I1127 07:28:19.044555 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c" (UID: "d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:28:19 crc kubenswrapper[4971]: I1127 07:28:19.128426 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r5cf7\" (UniqueName: \"kubernetes.io/projected/d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c-kube-api-access-r5cf7\") on node \"crc\" DevicePath \"\"" Nov 27 07:28:19 crc kubenswrapper[4971]: I1127 07:28:19.128474 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 07:28:19 crc kubenswrapper[4971]: I1127 07:28:19.128493 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 07:28:19 crc kubenswrapper[4971]: I1127 07:28:19.321025 4971 generic.go:334] "Generic (PLEG): container finished" podID="d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c" containerID="372e636f2749d4b9543f04628718adfb759d46876608ebdc84d0267ff9ec61eb" exitCode=0 Nov 27 07:28:19 crc kubenswrapper[4971]: I1127 07:28:19.321108 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n8p7d" event={"ID":"d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c","Type":"ContainerDied","Data":"372e636f2749d4b9543f04628718adfb759d46876608ebdc84d0267ff9ec61eb"} Nov 27 07:28:19 crc kubenswrapper[4971]: I1127 07:28:19.321181 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n8p7d" event={"ID":"d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c","Type":"ContainerDied","Data":"81d82183de14731fd82eeceb3c9c2601263f90745a02a3398f63639c466a1646"} Nov 27 07:28:19 crc kubenswrapper[4971]: I1127 07:28:19.321222 4971 scope.go:117] "RemoveContainer" containerID="372e636f2749d4b9543f04628718adfb759d46876608ebdc84d0267ff9ec61eb" Nov 27 07:28:19 crc kubenswrapper[4971]: I1127 07:28:19.321231 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n8p7d" Nov 27 07:28:19 crc kubenswrapper[4971]: I1127 07:28:19.355028 4971 scope.go:117] "RemoveContainer" containerID="f0dde287b1f00dacae568e9b271a8f733ea94dc8b8673b047bb523ca8012204b" Nov 27 07:28:19 crc kubenswrapper[4971]: I1127 07:28:19.367679 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-n8p7d"] Nov 27 07:28:19 crc kubenswrapper[4971]: I1127 07:28:19.372567 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-n8p7d"] Nov 27 07:28:19 crc kubenswrapper[4971]: I1127 07:28:19.390522 4971 scope.go:117] "RemoveContainer" containerID="22d38e24ed33ec9fbc9e4f88515fc35c5d78f51fa3c5e39609b4f8c68b632a1d" Nov 27 07:28:19 crc kubenswrapper[4971]: I1127 07:28:19.412816 4971 scope.go:117] "RemoveContainer" containerID="372e636f2749d4b9543f04628718adfb759d46876608ebdc84d0267ff9ec61eb" Nov 27 07:28:19 crc kubenswrapper[4971]: E1127 07:28:19.413237 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"372e636f2749d4b9543f04628718adfb759d46876608ebdc84d0267ff9ec61eb\": container with ID starting with 372e636f2749d4b9543f04628718adfb759d46876608ebdc84d0267ff9ec61eb not found: ID does not exist" containerID="372e636f2749d4b9543f04628718adfb759d46876608ebdc84d0267ff9ec61eb" Nov 27 07:28:19 crc kubenswrapper[4971]: I1127 07:28:19.413277 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"372e636f2749d4b9543f04628718adfb759d46876608ebdc84d0267ff9ec61eb"} err="failed to get container status \"372e636f2749d4b9543f04628718adfb759d46876608ebdc84d0267ff9ec61eb\": rpc error: code = NotFound desc = could not find container \"372e636f2749d4b9543f04628718adfb759d46876608ebdc84d0267ff9ec61eb\": container with ID starting with 372e636f2749d4b9543f04628718adfb759d46876608ebdc84d0267ff9ec61eb not found: ID does not exist" Nov 27 07:28:19 crc kubenswrapper[4971]: I1127 07:28:19.413310 4971 scope.go:117] "RemoveContainer" containerID="f0dde287b1f00dacae568e9b271a8f733ea94dc8b8673b047bb523ca8012204b" Nov 27 07:28:19 crc kubenswrapper[4971]: E1127 07:28:19.413547 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f0dde287b1f00dacae568e9b271a8f733ea94dc8b8673b047bb523ca8012204b\": container with ID starting with f0dde287b1f00dacae568e9b271a8f733ea94dc8b8673b047bb523ca8012204b not found: ID does not exist" containerID="f0dde287b1f00dacae568e9b271a8f733ea94dc8b8673b047bb523ca8012204b" Nov 27 07:28:19 crc kubenswrapper[4971]: I1127 07:28:19.413582 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f0dde287b1f00dacae568e9b271a8f733ea94dc8b8673b047bb523ca8012204b"} err="failed to get container status \"f0dde287b1f00dacae568e9b271a8f733ea94dc8b8673b047bb523ca8012204b\": rpc error: code = NotFound desc = could not find container \"f0dde287b1f00dacae568e9b271a8f733ea94dc8b8673b047bb523ca8012204b\": container with ID starting with f0dde287b1f00dacae568e9b271a8f733ea94dc8b8673b047bb523ca8012204b not found: ID does not exist" Nov 27 07:28:19 crc kubenswrapper[4971]: I1127 07:28:19.413603 4971 scope.go:117] "RemoveContainer" containerID="22d38e24ed33ec9fbc9e4f88515fc35c5d78f51fa3c5e39609b4f8c68b632a1d" Nov 27 07:28:19 crc kubenswrapper[4971]: E1127 07:28:19.413918 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22d38e24ed33ec9fbc9e4f88515fc35c5d78f51fa3c5e39609b4f8c68b632a1d\": container with ID starting with 22d38e24ed33ec9fbc9e4f88515fc35c5d78f51fa3c5e39609b4f8c68b632a1d not found: ID does not exist" containerID="22d38e24ed33ec9fbc9e4f88515fc35c5d78f51fa3c5e39609b4f8c68b632a1d" Nov 27 07:28:19 crc kubenswrapper[4971]: I1127 07:28:19.413961 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22d38e24ed33ec9fbc9e4f88515fc35c5d78f51fa3c5e39609b4f8c68b632a1d"} err="failed to get container status \"22d38e24ed33ec9fbc9e4f88515fc35c5d78f51fa3c5e39609b4f8c68b632a1d\": rpc error: code = NotFound desc = could not find container \"22d38e24ed33ec9fbc9e4f88515fc35c5d78f51fa3c5e39609b4f8c68b632a1d\": container with ID starting with 22d38e24ed33ec9fbc9e4f88515fc35c5d78f51fa3c5e39609b4f8c68b632a1d not found: ID does not exist" Nov 27 07:28:20 crc kubenswrapper[4971]: I1127 07:28:20.568131 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c" path="/var/lib/kubelet/pods/d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c/volumes" Nov 27 07:28:23 crc kubenswrapper[4971]: I1127 07:28:23.250084 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-8l8gh" Nov 27 07:28:23 crc kubenswrapper[4971]: I1127 07:28:23.354907 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8l8gh"] Nov 27 07:28:23 crc kubenswrapper[4971]: I1127 07:28:23.400691 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jp5gl"] Nov 27 07:28:23 crc kubenswrapper[4971]: I1127 07:28:23.401431 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-jp5gl" podUID="9300284e-8f0c-42eb-a339-820379f2b91c" containerName="registry-server" containerID="cri-o://8a15d15b5be2dd90274ada67dbb42d451b41a458531a59ada5a7e8ccd26f99f4" gracePeriod=2 Nov 27 07:28:23 crc kubenswrapper[4971]: I1127 07:28:23.900021 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jp5gl" Nov 27 07:28:24 crc kubenswrapper[4971]: I1127 07:28:24.023652 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-86l6c\" (UniqueName: \"kubernetes.io/projected/9300284e-8f0c-42eb-a339-820379f2b91c-kube-api-access-86l6c\") pod \"9300284e-8f0c-42eb-a339-820379f2b91c\" (UID: \"9300284e-8f0c-42eb-a339-820379f2b91c\") " Nov 27 07:28:24 crc kubenswrapper[4971]: I1127 07:28:24.023742 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9300284e-8f0c-42eb-a339-820379f2b91c-utilities\") pod \"9300284e-8f0c-42eb-a339-820379f2b91c\" (UID: \"9300284e-8f0c-42eb-a339-820379f2b91c\") " Nov 27 07:28:24 crc kubenswrapper[4971]: I1127 07:28:24.023828 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9300284e-8f0c-42eb-a339-820379f2b91c-catalog-content\") pod \"9300284e-8f0c-42eb-a339-820379f2b91c\" (UID: \"9300284e-8f0c-42eb-a339-820379f2b91c\") " Nov 27 07:28:24 crc kubenswrapper[4971]: I1127 07:28:24.038180 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9300284e-8f0c-42eb-a339-820379f2b91c-utilities" (OuterVolumeSpecName: "utilities") pod "9300284e-8f0c-42eb-a339-820379f2b91c" (UID: "9300284e-8f0c-42eb-a339-820379f2b91c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:28:24 crc kubenswrapper[4971]: I1127 07:28:24.079243 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9300284e-8f0c-42eb-a339-820379f2b91c-kube-api-access-86l6c" (OuterVolumeSpecName: "kube-api-access-86l6c") pod "9300284e-8f0c-42eb-a339-820379f2b91c" (UID: "9300284e-8f0c-42eb-a339-820379f2b91c"). InnerVolumeSpecName "kube-api-access-86l6c". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:28:24 crc kubenswrapper[4971]: I1127 07:28:24.083780 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9300284e-8f0c-42eb-a339-820379f2b91c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9300284e-8f0c-42eb-a339-820379f2b91c" (UID: "9300284e-8f0c-42eb-a339-820379f2b91c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:28:24 crc kubenswrapper[4971]: I1127 07:28:24.126081 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9300284e-8f0c-42eb-a339-820379f2b91c-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 07:28:24 crc kubenswrapper[4971]: I1127 07:28:24.126128 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-86l6c\" (UniqueName: \"kubernetes.io/projected/9300284e-8f0c-42eb-a339-820379f2b91c-kube-api-access-86l6c\") on node \"crc\" DevicePath \"\"" Nov 27 07:28:24 crc kubenswrapper[4971]: I1127 07:28:24.126144 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9300284e-8f0c-42eb-a339-820379f2b91c-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 07:28:24 crc kubenswrapper[4971]: I1127 07:28:24.367272 4971 generic.go:334] "Generic (PLEG): container finished" podID="9300284e-8f0c-42eb-a339-820379f2b91c" containerID="8a15d15b5be2dd90274ada67dbb42d451b41a458531a59ada5a7e8ccd26f99f4" exitCode=0 Nov 27 07:28:24 crc kubenswrapper[4971]: I1127 07:28:24.367338 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jp5gl" event={"ID":"9300284e-8f0c-42eb-a339-820379f2b91c","Type":"ContainerDied","Data":"8a15d15b5be2dd90274ada67dbb42d451b41a458531a59ada5a7e8ccd26f99f4"} Nov 27 07:28:24 crc kubenswrapper[4971]: I1127 07:28:24.367370 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jp5gl" Nov 27 07:28:24 crc kubenswrapper[4971]: I1127 07:28:24.367382 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jp5gl" event={"ID":"9300284e-8f0c-42eb-a339-820379f2b91c","Type":"ContainerDied","Data":"454214d93e5dc9f6eefaa52bd8479dad87d2ba51a0d868b250951dbf4539a9d8"} Nov 27 07:28:24 crc kubenswrapper[4971]: I1127 07:28:24.367411 4971 scope.go:117] "RemoveContainer" containerID="8a15d15b5be2dd90274ada67dbb42d451b41a458531a59ada5a7e8ccd26f99f4" Nov 27 07:28:24 crc kubenswrapper[4971]: I1127 07:28:24.393002 4971 scope.go:117] "RemoveContainer" containerID="7dbd9cd76c6ada3cd6803b994ee52c820a52bd00519d02406f81974e6158a947" Nov 27 07:28:24 crc kubenswrapper[4971]: I1127 07:28:24.410839 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jp5gl"] Nov 27 07:28:24 crc kubenswrapper[4971]: I1127 07:28:24.418024 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-jp5gl"] Nov 27 07:28:24 crc kubenswrapper[4971]: I1127 07:28:24.430924 4971 scope.go:117] "RemoveContainer" containerID="3e909acb4be6176018a798548e63330e73faced5e499823208284ab2f5bf1d4f" Nov 27 07:28:24 crc kubenswrapper[4971]: I1127 07:28:24.449634 4971 scope.go:117] "RemoveContainer" containerID="8a15d15b5be2dd90274ada67dbb42d451b41a458531a59ada5a7e8ccd26f99f4" Nov 27 07:28:24 crc kubenswrapper[4971]: E1127 07:28:24.450164 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a15d15b5be2dd90274ada67dbb42d451b41a458531a59ada5a7e8ccd26f99f4\": container with ID starting with 8a15d15b5be2dd90274ada67dbb42d451b41a458531a59ada5a7e8ccd26f99f4 not found: ID does not exist" containerID="8a15d15b5be2dd90274ada67dbb42d451b41a458531a59ada5a7e8ccd26f99f4" Nov 27 07:28:24 crc kubenswrapper[4971]: I1127 07:28:24.450202 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a15d15b5be2dd90274ada67dbb42d451b41a458531a59ada5a7e8ccd26f99f4"} err="failed to get container status \"8a15d15b5be2dd90274ada67dbb42d451b41a458531a59ada5a7e8ccd26f99f4\": rpc error: code = NotFound desc = could not find container \"8a15d15b5be2dd90274ada67dbb42d451b41a458531a59ada5a7e8ccd26f99f4\": container with ID starting with 8a15d15b5be2dd90274ada67dbb42d451b41a458531a59ada5a7e8ccd26f99f4 not found: ID does not exist" Nov 27 07:28:24 crc kubenswrapper[4971]: I1127 07:28:24.450273 4971 scope.go:117] "RemoveContainer" containerID="7dbd9cd76c6ada3cd6803b994ee52c820a52bd00519d02406f81974e6158a947" Nov 27 07:28:24 crc kubenswrapper[4971]: E1127 07:28:24.450702 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7dbd9cd76c6ada3cd6803b994ee52c820a52bd00519d02406f81974e6158a947\": container with ID starting with 7dbd9cd76c6ada3cd6803b994ee52c820a52bd00519d02406f81974e6158a947 not found: ID does not exist" containerID="7dbd9cd76c6ada3cd6803b994ee52c820a52bd00519d02406f81974e6158a947" Nov 27 07:28:24 crc kubenswrapper[4971]: I1127 07:28:24.450728 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7dbd9cd76c6ada3cd6803b994ee52c820a52bd00519d02406f81974e6158a947"} err="failed to get container status \"7dbd9cd76c6ada3cd6803b994ee52c820a52bd00519d02406f81974e6158a947\": rpc error: code = NotFound desc = could not find container \"7dbd9cd76c6ada3cd6803b994ee52c820a52bd00519d02406f81974e6158a947\": container with ID starting with 7dbd9cd76c6ada3cd6803b994ee52c820a52bd00519d02406f81974e6158a947 not found: ID does not exist" Nov 27 07:28:24 crc kubenswrapper[4971]: I1127 07:28:24.450741 4971 scope.go:117] "RemoveContainer" containerID="3e909acb4be6176018a798548e63330e73faced5e499823208284ab2f5bf1d4f" Nov 27 07:28:24 crc kubenswrapper[4971]: E1127 07:28:24.451185 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e909acb4be6176018a798548e63330e73faced5e499823208284ab2f5bf1d4f\": container with ID starting with 3e909acb4be6176018a798548e63330e73faced5e499823208284ab2f5bf1d4f not found: ID does not exist" containerID="3e909acb4be6176018a798548e63330e73faced5e499823208284ab2f5bf1d4f" Nov 27 07:28:24 crc kubenswrapper[4971]: I1127 07:28:24.451205 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e909acb4be6176018a798548e63330e73faced5e499823208284ab2f5bf1d4f"} err="failed to get container status \"3e909acb4be6176018a798548e63330e73faced5e499823208284ab2f5bf1d4f\": rpc error: code = NotFound desc = could not find container \"3e909acb4be6176018a798548e63330e73faced5e499823208284ab2f5bf1d4f\": container with ID starting with 3e909acb4be6176018a798548e63330e73faced5e499823208284ab2f5bf1d4f not found: ID does not exist" Nov 27 07:28:24 crc kubenswrapper[4971]: I1127 07:28:24.561256 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9300284e-8f0c-42eb-a339-820379f2b91c" path="/var/lib/kubelet/pods/9300284e-8f0c-42eb-a339-820379f2b91c/volumes" Nov 27 07:28:56 crc kubenswrapper[4971]: I1127 07:28:56.413587 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 07:28:56 crc kubenswrapper[4971]: I1127 07:28:56.414372 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 07:29:09 crc kubenswrapper[4971]: I1127 07:29:09.637152 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-79x9r"] Nov 27 07:29:09 crc kubenswrapper[4971]: E1127 07:29:09.638974 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c" containerName="extract-utilities" Nov 27 07:29:09 crc kubenswrapper[4971]: I1127 07:29:09.639101 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c" containerName="extract-utilities" Nov 27 07:29:09 crc kubenswrapper[4971]: E1127 07:29:09.639171 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28d80de1-2e08-4fcc-909d-0a9fa46dcb9f" containerName="extract-content" Nov 27 07:29:09 crc kubenswrapper[4971]: I1127 07:29:09.639260 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="28d80de1-2e08-4fcc-909d-0a9fa46dcb9f" containerName="extract-content" Nov 27 07:29:09 crc kubenswrapper[4971]: E1127 07:29:09.639329 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28d80de1-2e08-4fcc-909d-0a9fa46dcb9f" containerName="extract-utilities" Nov 27 07:29:09 crc kubenswrapper[4971]: I1127 07:29:09.639383 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="28d80de1-2e08-4fcc-909d-0a9fa46dcb9f" containerName="extract-utilities" Nov 27 07:29:09 crc kubenswrapper[4971]: E1127 07:29:09.639444 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9300284e-8f0c-42eb-a339-820379f2b91c" containerName="extract-content" Nov 27 07:29:09 crc kubenswrapper[4971]: I1127 07:29:09.639496 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="9300284e-8f0c-42eb-a339-820379f2b91c" containerName="extract-content" Nov 27 07:29:09 crc kubenswrapper[4971]: E1127 07:29:09.639664 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9300284e-8f0c-42eb-a339-820379f2b91c" containerName="extract-utilities" Nov 27 07:29:09 crc kubenswrapper[4971]: I1127 07:29:09.639726 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="9300284e-8f0c-42eb-a339-820379f2b91c" containerName="extract-utilities" Nov 27 07:29:09 crc kubenswrapper[4971]: E1127 07:29:09.639785 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c" containerName="extract-content" Nov 27 07:29:09 crc kubenswrapper[4971]: I1127 07:29:09.639843 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c" containerName="extract-content" Nov 27 07:29:09 crc kubenswrapper[4971]: E1127 07:29:09.639901 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9300284e-8f0c-42eb-a339-820379f2b91c" containerName="registry-server" Nov 27 07:29:09 crc kubenswrapper[4971]: I1127 07:29:09.639952 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="9300284e-8f0c-42eb-a339-820379f2b91c" containerName="registry-server" Nov 27 07:29:09 crc kubenswrapper[4971]: E1127 07:29:09.640010 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c" containerName="registry-server" Nov 27 07:29:09 crc kubenswrapper[4971]: I1127 07:29:09.640066 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c" containerName="registry-server" Nov 27 07:29:09 crc kubenswrapper[4971]: E1127 07:29:09.640128 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28d80de1-2e08-4fcc-909d-0a9fa46dcb9f" containerName="registry-server" Nov 27 07:29:09 crc kubenswrapper[4971]: I1127 07:29:09.640180 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="28d80de1-2e08-4fcc-909d-0a9fa46dcb9f" containerName="registry-server" Nov 27 07:29:09 crc kubenswrapper[4971]: I1127 07:29:09.640356 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="9300284e-8f0c-42eb-a339-820379f2b91c" containerName="registry-server" Nov 27 07:29:09 crc kubenswrapper[4971]: I1127 07:29:09.640428 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2f7a7c9-7a80-42a6-9f95-bddca60e0c7c" containerName="registry-server" Nov 27 07:29:09 crc kubenswrapper[4971]: I1127 07:29:09.640486 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="28d80de1-2e08-4fcc-909d-0a9fa46dcb9f" containerName="registry-server" Nov 27 07:29:09 crc kubenswrapper[4971]: I1127 07:29:09.641692 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-79x9r" Nov 27 07:29:09 crc kubenswrapper[4971]: I1127 07:29:09.655736 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-79x9r"] Nov 27 07:29:09 crc kubenswrapper[4971]: I1127 07:29:09.829308 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jz2ng\" (UniqueName: \"kubernetes.io/projected/60e516db-8eec-4bc2-8886-da466d49ac79-kube-api-access-jz2ng\") pod \"certified-operators-79x9r\" (UID: \"60e516db-8eec-4bc2-8886-da466d49ac79\") " pod="openshift-marketplace/certified-operators-79x9r" Nov 27 07:29:09 crc kubenswrapper[4971]: I1127 07:29:09.829368 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60e516db-8eec-4bc2-8886-da466d49ac79-utilities\") pod \"certified-operators-79x9r\" (UID: \"60e516db-8eec-4bc2-8886-da466d49ac79\") " pod="openshift-marketplace/certified-operators-79x9r" Nov 27 07:29:09 crc kubenswrapper[4971]: I1127 07:29:09.829391 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60e516db-8eec-4bc2-8886-da466d49ac79-catalog-content\") pod \"certified-operators-79x9r\" (UID: \"60e516db-8eec-4bc2-8886-da466d49ac79\") " pod="openshift-marketplace/certified-operators-79x9r" Nov 27 07:29:09 crc kubenswrapper[4971]: I1127 07:29:09.930681 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60e516db-8eec-4bc2-8886-da466d49ac79-utilities\") pod \"certified-operators-79x9r\" (UID: \"60e516db-8eec-4bc2-8886-da466d49ac79\") " pod="openshift-marketplace/certified-operators-79x9r" Nov 27 07:29:09 crc kubenswrapper[4971]: I1127 07:29:09.930735 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60e516db-8eec-4bc2-8886-da466d49ac79-catalog-content\") pod \"certified-operators-79x9r\" (UID: \"60e516db-8eec-4bc2-8886-da466d49ac79\") " pod="openshift-marketplace/certified-operators-79x9r" Nov 27 07:29:09 crc kubenswrapper[4971]: I1127 07:29:09.930855 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jz2ng\" (UniqueName: \"kubernetes.io/projected/60e516db-8eec-4bc2-8886-da466d49ac79-kube-api-access-jz2ng\") pod \"certified-operators-79x9r\" (UID: \"60e516db-8eec-4bc2-8886-da466d49ac79\") " pod="openshift-marketplace/certified-operators-79x9r" Nov 27 07:29:09 crc kubenswrapper[4971]: I1127 07:29:09.931350 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60e516db-8eec-4bc2-8886-da466d49ac79-utilities\") pod \"certified-operators-79x9r\" (UID: \"60e516db-8eec-4bc2-8886-da466d49ac79\") " pod="openshift-marketplace/certified-operators-79x9r" Nov 27 07:29:09 crc kubenswrapper[4971]: I1127 07:29:09.931350 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60e516db-8eec-4bc2-8886-da466d49ac79-catalog-content\") pod \"certified-operators-79x9r\" (UID: \"60e516db-8eec-4bc2-8886-da466d49ac79\") " pod="openshift-marketplace/certified-operators-79x9r" Nov 27 07:29:09 crc kubenswrapper[4971]: I1127 07:29:09.950569 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jz2ng\" (UniqueName: \"kubernetes.io/projected/60e516db-8eec-4bc2-8886-da466d49ac79-kube-api-access-jz2ng\") pod \"certified-operators-79x9r\" (UID: \"60e516db-8eec-4bc2-8886-da466d49ac79\") " pod="openshift-marketplace/certified-operators-79x9r" Nov 27 07:29:10 crc kubenswrapper[4971]: I1127 07:29:10.011885 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-79x9r" Nov 27 07:29:10 crc kubenswrapper[4971]: I1127 07:29:10.474004 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-79x9r"] Nov 27 07:29:10 crc kubenswrapper[4971]: I1127 07:29:10.769453 4971 generic.go:334] "Generic (PLEG): container finished" podID="60e516db-8eec-4bc2-8886-da466d49ac79" containerID="3a93f95fe1446b658df62037ae66fd5b9197a7b620fe8d566400d13c313cd469" exitCode=0 Nov 27 07:29:10 crc kubenswrapper[4971]: I1127 07:29:10.770870 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-79x9r" event={"ID":"60e516db-8eec-4bc2-8886-da466d49ac79","Type":"ContainerDied","Data":"3a93f95fe1446b658df62037ae66fd5b9197a7b620fe8d566400d13c313cd469"} Nov 27 07:29:10 crc kubenswrapper[4971]: I1127 07:29:10.771124 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-79x9r" event={"ID":"60e516db-8eec-4bc2-8886-da466d49ac79","Type":"ContainerStarted","Data":"fafaabb462efa1cc913c6cc589d15b7a6fe8314c3ceba01fab4f156f6091ecfc"} Nov 27 07:29:11 crc kubenswrapper[4971]: I1127 07:29:11.778442 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-79x9r" event={"ID":"60e516db-8eec-4bc2-8886-da466d49ac79","Type":"ContainerStarted","Data":"c79b906418426a6a31c3960467a8046a89a9fda385c7a2be5f000cacfb7a9334"} Nov 27 07:29:12 crc kubenswrapper[4971]: I1127 07:29:12.789297 4971 generic.go:334] "Generic (PLEG): container finished" podID="60e516db-8eec-4bc2-8886-da466d49ac79" containerID="c79b906418426a6a31c3960467a8046a89a9fda385c7a2be5f000cacfb7a9334" exitCode=0 Nov 27 07:29:12 crc kubenswrapper[4971]: I1127 07:29:12.789362 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-79x9r" event={"ID":"60e516db-8eec-4bc2-8886-da466d49ac79","Type":"ContainerDied","Data":"c79b906418426a6a31c3960467a8046a89a9fda385c7a2be5f000cacfb7a9334"} Nov 27 07:29:13 crc kubenswrapper[4971]: I1127 07:29:13.798485 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-79x9r" event={"ID":"60e516db-8eec-4bc2-8886-da466d49ac79","Type":"ContainerStarted","Data":"2d40ff4a525804fe6ceb3e1c5ff901928c30394a72a6a8d4d8b9a93c9b84fdfe"} Nov 27 07:29:13 crc kubenswrapper[4971]: I1127 07:29:13.821099 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-79x9r" podStartSLOduration=2.008585077 podStartE2EDuration="4.821075592s" podCreationTimestamp="2025-11-27 07:29:09 +0000 UTC" firstStartedPulling="2025-11-27 07:29:10.771123711 +0000 UTC m=+2188.963167629" lastFinishedPulling="2025-11-27 07:29:13.583614226 +0000 UTC m=+2191.775658144" observedRunningTime="2025-11-27 07:29:13.816809939 +0000 UTC m=+2192.008853867" watchObservedRunningTime="2025-11-27 07:29:13.821075592 +0000 UTC m=+2192.013119510" Nov 27 07:29:20 crc kubenswrapper[4971]: I1127 07:29:20.012255 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-79x9r" Nov 27 07:29:20 crc kubenswrapper[4971]: I1127 07:29:20.012574 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-79x9r" Nov 27 07:29:20 crc kubenswrapper[4971]: I1127 07:29:20.082456 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-79x9r" Nov 27 07:29:20 crc kubenswrapper[4971]: I1127 07:29:20.898076 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-79x9r" Nov 27 07:29:20 crc kubenswrapper[4971]: I1127 07:29:20.955147 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-79x9r"] Nov 27 07:29:22 crc kubenswrapper[4971]: I1127 07:29:22.864808 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-79x9r" podUID="60e516db-8eec-4bc2-8886-da466d49ac79" containerName="registry-server" containerID="cri-o://2d40ff4a525804fe6ceb3e1c5ff901928c30394a72a6a8d4d8b9a93c9b84fdfe" gracePeriod=2 Nov 27 07:29:23 crc kubenswrapper[4971]: I1127 07:29:23.290327 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-79x9r" Nov 27 07:29:23 crc kubenswrapper[4971]: I1127 07:29:23.465042 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jz2ng\" (UniqueName: \"kubernetes.io/projected/60e516db-8eec-4bc2-8886-da466d49ac79-kube-api-access-jz2ng\") pod \"60e516db-8eec-4bc2-8886-da466d49ac79\" (UID: \"60e516db-8eec-4bc2-8886-da466d49ac79\") " Nov 27 07:29:23 crc kubenswrapper[4971]: I1127 07:29:23.465218 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60e516db-8eec-4bc2-8886-da466d49ac79-utilities\") pod \"60e516db-8eec-4bc2-8886-da466d49ac79\" (UID: \"60e516db-8eec-4bc2-8886-da466d49ac79\") " Nov 27 07:29:23 crc kubenswrapper[4971]: I1127 07:29:23.465274 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60e516db-8eec-4bc2-8886-da466d49ac79-catalog-content\") pod \"60e516db-8eec-4bc2-8886-da466d49ac79\" (UID: \"60e516db-8eec-4bc2-8886-da466d49ac79\") " Nov 27 07:29:23 crc kubenswrapper[4971]: I1127 07:29:23.466098 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/60e516db-8eec-4bc2-8886-da466d49ac79-utilities" (OuterVolumeSpecName: "utilities") pod "60e516db-8eec-4bc2-8886-da466d49ac79" (UID: "60e516db-8eec-4bc2-8886-da466d49ac79"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:29:23 crc kubenswrapper[4971]: I1127 07:29:23.471153 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60e516db-8eec-4bc2-8886-da466d49ac79-kube-api-access-jz2ng" (OuterVolumeSpecName: "kube-api-access-jz2ng") pod "60e516db-8eec-4bc2-8886-da466d49ac79" (UID: "60e516db-8eec-4bc2-8886-da466d49ac79"). InnerVolumeSpecName "kube-api-access-jz2ng". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:29:23 crc kubenswrapper[4971]: I1127 07:29:23.517513 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/60e516db-8eec-4bc2-8886-da466d49ac79-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "60e516db-8eec-4bc2-8886-da466d49ac79" (UID: "60e516db-8eec-4bc2-8886-da466d49ac79"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:29:23 crc kubenswrapper[4971]: I1127 07:29:23.567569 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jz2ng\" (UniqueName: \"kubernetes.io/projected/60e516db-8eec-4bc2-8886-da466d49ac79-kube-api-access-jz2ng\") on node \"crc\" DevicePath \"\"" Nov 27 07:29:23 crc kubenswrapper[4971]: I1127 07:29:23.567640 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60e516db-8eec-4bc2-8886-da466d49ac79-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 07:29:23 crc kubenswrapper[4971]: I1127 07:29:23.567655 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60e516db-8eec-4bc2-8886-da466d49ac79-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 07:29:23 crc kubenswrapper[4971]: I1127 07:29:23.874559 4971 generic.go:334] "Generic (PLEG): container finished" podID="60e516db-8eec-4bc2-8886-da466d49ac79" containerID="2d40ff4a525804fe6ceb3e1c5ff901928c30394a72a6a8d4d8b9a93c9b84fdfe" exitCode=0 Nov 27 07:29:23 crc kubenswrapper[4971]: I1127 07:29:23.874613 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-79x9r" event={"ID":"60e516db-8eec-4bc2-8886-da466d49ac79","Type":"ContainerDied","Data":"2d40ff4a525804fe6ceb3e1c5ff901928c30394a72a6a8d4d8b9a93c9b84fdfe"} Nov 27 07:29:23 crc kubenswrapper[4971]: I1127 07:29:23.874649 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-79x9r" event={"ID":"60e516db-8eec-4bc2-8886-da466d49ac79","Type":"ContainerDied","Data":"fafaabb462efa1cc913c6cc589d15b7a6fe8314c3ceba01fab4f156f6091ecfc"} Nov 27 07:29:23 crc kubenswrapper[4971]: I1127 07:29:23.874674 4971 scope.go:117] "RemoveContainer" containerID="2d40ff4a525804fe6ceb3e1c5ff901928c30394a72a6a8d4d8b9a93c9b84fdfe" Nov 27 07:29:23 crc kubenswrapper[4971]: I1127 07:29:23.874718 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-79x9r" Nov 27 07:29:23 crc kubenswrapper[4971]: I1127 07:29:23.902439 4971 scope.go:117] "RemoveContainer" containerID="c79b906418426a6a31c3960467a8046a89a9fda385c7a2be5f000cacfb7a9334" Nov 27 07:29:23 crc kubenswrapper[4971]: I1127 07:29:23.925994 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-79x9r"] Nov 27 07:29:23 crc kubenswrapper[4971]: I1127 07:29:23.933084 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-79x9r"] Nov 27 07:29:23 crc kubenswrapper[4971]: I1127 07:29:23.953419 4971 scope.go:117] "RemoveContainer" containerID="3a93f95fe1446b658df62037ae66fd5b9197a7b620fe8d566400d13c313cd469" Nov 27 07:29:23 crc kubenswrapper[4971]: I1127 07:29:23.986022 4971 scope.go:117] "RemoveContainer" containerID="2d40ff4a525804fe6ceb3e1c5ff901928c30394a72a6a8d4d8b9a93c9b84fdfe" Nov 27 07:29:23 crc kubenswrapper[4971]: E1127 07:29:23.986646 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d40ff4a525804fe6ceb3e1c5ff901928c30394a72a6a8d4d8b9a93c9b84fdfe\": container with ID starting with 2d40ff4a525804fe6ceb3e1c5ff901928c30394a72a6a8d4d8b9a93c9b84fdfe not found: ID does not exist" containerID="2d40ff4a525804fe6ceb3e1c5ff901928c30394a72a6a8d4d8b9a93c9b84fdfe" Nov 27 07:29:23 crc kubenswrapper[4971]: I1127 07:29:23.986708 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d40ff4a525804fe6ceb3e1c5ff901928c30394a72a6a8d4d8b9a93c9b84fdfe"} err="failed to get container status \"2d40ff4a525804fe6ceb3e1c5ff901928c30394a72a6a8d4d8b9a93c9b84fdfe\": rpc error: code = NotFound desc = could not find container \"2d40ff4a525804fe6ceb3e1c5ff901928c30394a72a6a8d4d8b9a93c9b84fdfe\": container with ID starting with 2d40ff4a525804fe6ceb3e1c5ff901928c30394a72a6a8d4d8b9a93c9b84fdfe not found: ID does not exist" Nov 27 07:29:23 crc kubenswrapper[4971]: I1127 07:29:23.986746 4971 scope.go:117] "RemoveContainer" containerID="c79b906418426a6a31c3960467a8046a89a9fda385c7a2be5f000cacfb7a9334" Nov 27 07:29:23 crc kubenswrapper[4971]: E1127 07:29:23.987397 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c79b906418426a6a31c3960467a8046a89a9fda385c7a2be5f000cacfb7a9334\": container with ID starting with c79b906418426a6a31c3960467a8046a89a9fda385c7a2be5f000cacfb7a9334 not found: ID does not exist" containerID="c79b906418426a6a31c3960467a8046a89a9fda385c7a2be5f000cacfb7a9334" Nov 27 07:29:23 crc kubenswrapper[4971]: I1127 07:29:23.987459 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c79b906418426a6a31c3960467a8046a89a9fda385c7a2be5f000cacfb7a9334"} err="failed to get container status \"c79b906418426a6a31c3960467a8046a89a9fda385c7a2be5f000cacfb7a9334\": rpc error: code = NotFound desc = could not find container \"c79b906418426a6a31c3960467a8046a89a9fda385c7a2be5f000cacfb7a9334\": container with ID starting with c79b906418426a6a31c3960467a8046a89a9fda385c7a2be5f000cacfb7a9334 not found: ID does not exist" Nov 27 07:29:23 crc kubenswrapper[4971]: I1127 07:29:23.987498 4971 scope.go:117] "RemoveContainer" containerID="3a93f95fe1446b658df62037ae66fd5b9197a7b620fe8d566400d13c313cd469" Nov 27 07:29:23 crc kubenswrapper[4971]: E1127 07:29:23.987977 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a93f95fe1446b658df62037ae66fd5b9197a7b620fe8d566400d13c313cd469\": container with ID starting with 3a93f95fe1446b658df62037ae66fd5b9197a7b620fe8d566400d13c313cd469 not found: ID does not exist" containerID="3a93f95fe1446b658df62037ae66fd5b9197a7b620fe8d566400d13c313cd469" Nov 27 07:29:23 crc kubenswrapper[4971]: I1127 07:29:23.988024 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a93f95fe1446b658df62037ae66fd5b9197a7b620fe8d566400d13c313cd469"} err="failed to get container status \"3a93f95fe1446b658df62037ae66fd5b9197a7b620fe8d566400d13c313cd469\": rpc error: code = NotFound desc = could not find container \"3a93f95fe1446b658df62037ae66fd5b9197a7b620fe8d566400d13c313cd469\": container with ID starting with 3a93f95fe1446b658df62037ae66fd5b9197a7b620fe8d566400d13c313cd469 not found: ID does not exist" Nov 27 07:29:24 crc kubenswrapper[4971]: I1127 07:29:24.559434 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="60e516db-8eec-4bc2-8886-da466d49ac79" path="/var/lib/kubelet/pods/60e516db-8eec-4bc2-8886-da466d49ac79/volumes" Nov 27 07:29:26 crc kubenswrapper[4971]: I1127 07:29:26.413405 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 07:29:26 crc kubenswrapper[4971]: I1127 07:29:26.413766 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 07:29:56 crc kubenswrapper[4971]: I1127 07:29:56.413789 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 07:29:56 crc kubenswrapper[4971]: I1127 07:29:56.414780 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 07:29:56 crc kubenswrapper[4971]: I1127 07:29:56.415037 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 07:29:56 crc kubenswrapper[4971]: I1127 07:29:56.416188 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"04550a7c8f01b448540cbf469ed7c45ccb50f18f8471fca2a80a53490bede4db"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 07:29:56 crc kubenswrapper[4971]: I1127 07:29:56.416281 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://04550a7c8f01b448540cbf469ed7c45ccb50f18f8471fca2a80a53490bede4db" gracePeriod=600 Nov 27 07:29:56 crc kubenswrapper[4971]: E1127 07:29:56.544046 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:29:57 crc kubenswrapper[4971]: I1127 07:29:57.201981 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="04550a7c8f01b448540cbf469ed7c45ccb50f18f8471fca2a80a53490bede4db" exitCode=0 Nov 27 07:29:57 crc kubenswrapper[4971]: I1127 07:29:57.202060 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"04550a7c8f01b448540cbf469ed7c45ccb50f18f8471fca2a80a53490bede4db"} Nov 27 07:29:57 crc kubenswrapper[4971]: I1127 07:29:57.202141 4971 scope.go:117] "RemoveContainer" containerID="cbff8fd08ba606e2a0830aa64d2cffe84345db1c425fc1298bb5e45af4173719" Nov 27 07:29:57 crc kubenswrapper[4971]: I1127 07:29:57.202947 4971 scope.go:117] "RemoveContainer" containerID="04550a7c8f01b448540cbf469ed7c45ccb50f18f8471fca2a80a53490bede4db" Nov 27 07:29:57 crc kubenswrapper[4971]: E1127 07:29:57.203413 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:30:00 crc kubenswrapper[4971]: I1127 07:30:00.163256 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403810-d9b6q"] Nov 27 07:30:00 crc kubenswrapper[4971]: E1127 07:30:00.163725 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60e516db-8eec-4bc2-8886-da466d49ac79" containerName="extract-content" Nov 27 07:30:00 crc kubenswrapper[4971]: I1127 07:30:00.163744 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="60e516db-8eec-4bc2-8886-da466d49ac79" containerName="extract-content" Nov 27 07:30:00 crc kubenswrapper[4971]: E1127 07:30:00.163766 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60e516db-8eec-4bc2-8886-da466d49ac79" containerName="extract-utilities" Nov 27 07:30:00 crc kubenswrapper[4971]: I1127 07:30:00.163773 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="60e516db-8eec-4bc2-8886-da466d49ac79" containerName="extract-utilities" Nov 27 07:30:00 crc kubenswrapper[4971]: E1127 07:30:00.163786 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60e516db-8eec-4bc2-8886-da466d49ac79" containerName="registry-server" Nov 27 07:30:00 crc kubenswrapper[4971]: I1127 07:30:00.163793 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="60e516db-8eec-4bc2-8886-da466d49ac79" containerName="registry-server" Nov 27 07:30:00 crc kubenswrapper[4971]: I1127 07:30:00.163994 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="60e516db-8eec-4bc2-8886-da466d49ac79" containerName="registry-server" Nov 27 07:30:00 crc kubenswrapper[4971]: I1127 07:30:00.164647 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403810-d9b6q" Nov 27 07:30:00 crc kubenswrapper[4971]: I1127 07:30:00.169121 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 27 07:30:00 crc kubenswrapper[4971]: I1127 07:30:00.178328 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403810-d9b6q"] Nov 27 07:30:00 crc kubenswrapper[4971]: I1127 07:30:00.178523 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 27 07:30:00 crc kubenswrapper[4971]: I1127 07:30:00.240667 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/16c5281c-08c7-4023-8d82-64ffa321b419-config-volume\") pod \"collect-profiles-29403810-d9b6q\" (UID: \"16c5281c-08c7-4023-8d82-64ffa321b419\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403810-d9b6q" Nov 27 07:30:00 crc kubenswrapper[4971]: I1127 07:30:00.241131 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/16c5281c-08c7-4023-8d82-64ffa321b419-secret-volume\") pod \"collect-profiles-29403810-d9b6q\" (UID: \"16c5281c-08c7-4023-8d82-64ffa321b419\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403810-d9b6q" Nov 27 07:30:00 crc kubenswrapper[4971]: I1127 07:30:00.241171 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vl6wt\" (UniqueName: \"kubernetes.io/projected/16c5281c-08c7-4023-8d82-64ffa321b419-kube-api-access-vl6wt\") pod \"collect-profiles-29403810-d9b6q\" (UID: \"16c5281c-08c7-4023-8d82-64ffa321b419\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403810-d9b6q" Nov 27 07:30:00 crc kubenswrapper[4971]: I1127 07:30:00.342571 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/16c5281c-08c7-4023-8d82-64ffa321b419-secret-volume\") pod \"collect-profiles-29403810-d9b6q\" (UID: \"16c5281c-08c7-4023-8d82-64ffa321b419\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403810-d9b6q" Nov 27 07:30:00 crc kubenswrapper[4971]: I1127 07:30:00.342681 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vl6wt\" (UniqueName: \"kubernetes.io/projected/16c5281c-08c7-4023-8d82-64ffa321b419-kube-api-access-vl6wt\") pod \"collect-profiles-29403810-d9b6q\" (UID: \"16c5281c-08c7-4023-8d82-64ffa321b419\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403810-d9b6q" Nov 27 07:30:00 crc kubenswrapper[4971]: I1127 07:30:00.342745 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/16c5281c-08c7-4023-8d82-64ffa321b419-config-volume\") pod \"collect-profiles-29403810-d9b6q\" (UID: \"16c5281c-08c7-4023-8d82-64ffa321b419\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403810-d9b6q" Nov 27 07:30:00 crc kubenswrapper[4971]: I1127 07:30:00.343699 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/16c5281c-08c7-4023-8d82-64ffa321b419-config-volume\") pod \"collect-profiles-29403810-d9b6q\" (UID: \"16c5281c-08c7-4023-8d82-64ffa321b419\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403810-d9b6q" Nov 27 07:30:00 crc kubenswrapper[4971]: I1127 07:30:00.353343 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/16c5281c-08c7-4023-8d82-64ffa321b419-secret-volume\") pod \"collect-profiles-29403810-d9b6q\" (UID: \"16c5281c-08c7-4023-8d82-64ffa321b419\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403810-d9b6q" Nov 27 07:30:00 crc kubenswrapper[4971]: I1127 07:30:00.360737 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vl6wt\" (UniqueName: \"kubernetes.io/projected/16c5281c-08c7-4023-8d82-64ffa321b419-kube-api-access-vl6wt\") pod \"collect-profiles-29403810-d9b6q\" (UID: \"16c5281c-08c7-4023-8d82-64ffa321b419\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403810-d9b6q" Nov 27 07:30:00 crc kubenswrapper[4971]: I1127 07:30:00.489884 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403810-d9b6q" Nov 27 07:30:00 crc kubenswrapper[4971]: I1127 07:30:00.944662 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403810-d9b6q"] Nov 27 07:30:01 crc kubenswrapper[4971]: I1127 07:30:01.235660 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403810-d9b6q" event={"ID":"16c5281c-08c7-4023-8d82-64ffa321b419","Type":"ContainerStarted","Data":"8a7005164b8e83966b8fafce8be97bc5c5460a9ad7d4e1ee9ea5db89739f6ef9"} Nov 27 07:30:01 crc kubenswrapper[4971]: I1127 07:30:01.235711 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403810-d9b6q" event={"ID":"16c5281c-08c7-4023-8d82-64ffa321b419","Type":"ContainerStarted","Data":"2171654a1b38661ea431d73a110ff9b362b21f5b4db741f912ce1ddc12c32f2e"} Nov 27 07:30:01 crc kubenswrapper[4971]: I1127 07:30:01.257965 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29403810-d9b6q" podStartSLOduration=1.257947332 podStartE2EDuration="1.257947332s" podCreationTimestamp="2025-11-27 07:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 07:30:01.257468558 +0000 UTC m=+2239.449512506" watchObservedRunningTime="2025-11-27 07:30:01.257947332 +0000 UTC m=+2239.449991250" Nov 27 07:30:02 crc kubenswrapper[4971]: I1127 07:30:02.244591 4971 generic.go:334] "Generic (PLEG): container finished" podID="16c5281c-08c7-4023-8d82-64ffa321b419" containerID="8a7005164b8e83966b8fafce8be97bc5c5460a9ad7d4e1ee9ea5db89739f6ef9" exitCode=0 Nov 27 07:30:02 crc kubenswrapper[4971]: I1127 07:30:02.244710 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403810-d9b6q" event={"ID":"16c5281c-08c7-4023-8d82-64ffa321b419","Type":"ContainerDied","Data":"8a7005164b8e83966b8fafce8be97bc5c5460a9ad7d4e1ee9ea5db89739f6ef9"} Nov 27 07:30:03 crc kubenswrapper[4971]: I1127 07:30:03.505789 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403810-d9b6q" Nov 27 07:30:03 crc kubenswrapper[4971]: I1127 07:30:03.596427 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/16c5281c-08c7-4023-8d82-64ffa321b419-config-volume\") pod \"16c5281c-08c7-4023-8d82-64ffa321b419\" (UID: \"16c5281c-08c7-4023-8d82-64ffa321b419\") " Nov 27 07:30:03 crc kubenswrapper[4971]: I1127 07:30:03.596501 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vl6wt\" (UniqueName: \"kubernetes.io/projected/16c5281c-08c7-4023-8d82-64ffa321b419-kube-api-access-vl6wt\") pod \"16c5281c-08c7-4023-8d82-64ffa321b419\" (UID: \"16c5281c-08c7-4023-8d82-64ffa321b419\") " Nov 27 07:30:03 crc kubenswrapper[4971]: I1127 07:30:03.596519 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/16c5281c-08c7-4023-8d82-64ffa321b419-secret-volume\") pod \"16c5281c-08c7-4023-8d82-64ffa321b419\" (UID: \"16c5281c-08c7-4023-8d82-64ffa321b419\") " Nov 27 07:30:03 crc kubenswrapper[4971]: I1127 07:30:03.598202 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/16c5281c-08c7-4023-8d82-64ffa321b419-config-volume" (OuterVolumeSpecName: "config-volume") pod "16c5281c-08c7-4023-8d82-64ffa321b419" (UID: "16c5281c-08c7-4023-8d82-64ffa321b419"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:30:03 crc kubenswrapper[4971]: I1127 07:30:03.602950 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16c5281c-08c7-4023-8d82-64ffa321b419-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "16c5281c-08c7-4023-8d82-64ffa321b419" (UID: "16c5281c-08c7-4023-8d82-64ffa321b419"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:30:03 crc kubenswrapper[4971]: I1127 07:30:03.603761 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16c5281c-08c7-4023-8d82-64ffa321b419-kube-api-access-vl6wt" (OuterVolumeSpecName: "kube-api-access-vl6wt") pod "16c5281c-08c7-4023-8d82-64ffa321b419" (UID: "16c5281c-08c7-4023-8d82-64ffa321b419"). InnerVolumeSpecName "kube-api-access-vl6wt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:30:03 crc kubenswrapper[4971]: I1127 07:30:03.698335 4971 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/16c5281c-08c7-4023-8d82-64ffa321b419-config-volume\") on node \"crc\" DevicePath \"\"" Nov 27 07:30:03 crc kubenswrapper[4971]: I1127 07:30:03.698628 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vl6wt\" (UniqueName: \"kubernetes.io/projected/16c5281c-08c7-4023-8d82-64ffa321b419-kube-api-access-vl6wt\") on node \"crc\" DevicePath \"\"" Nov 27 07:30:03 crc kubenswrapper[4971]: I1127 07:30:03.698721 4971 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/16c5281c-08c7-4023-8d82-64ffa321b419-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 27 07:30:04 crc kubenswrapper[4971]: I1127 07:30:04.259445 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403810-d9b6q" event={"ID":"16c5281c-08c7-4023-8d82-64ffa321b419","Type":"ContainerDied","Data":"2171654a1b38661ea431d73a110ff9b362b21f5b4db741f912ce1ddc12c32f2e"} Nov 27 07:30:04 crc kubenswrapper[4971]: I1127 07:30:04.259869 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2171654a1b38661ea431d73a110ff9b362b21f5b4db741f912ce1ddc12c32f2e" Nov 27 07:30:04 crc kubenswrapper[4971]: I1127 07:30:04.259511 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403810-d9b6q" Nov 27 07:30:04 crc kubenswrapper[4971]: I1127 07:30:04.348648 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403765-26klv"] Nov 27 07:30:04 crc kubenswrapper[4971]: I1127 07:30:04.354944 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403765-26klv"] Nov 27 07:30:04 crc kubenswrapper[4971]: I1127 07:30:04.560444 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89bb08d2-3252-4d85-91bc-3b595be4c32a" path="/var/lib/kubelet/pods/89bb08d2-3252-4d85-91bc-3b595be4c32a/volumes" Nov 27 07:30:07 crc kubenswrapper[4971]: I1127 07:30:07.550496 4971 scope.go:117] "RemoveContainer" containerID="04550a7c8f01b448540cbf469ed7c45ccb50f18f8471fca2a80a53490bede4db" Nov 27 07:30:07 crc kubenswrapper[4971]: E1127 07:30:07.550753 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:30:18 crc kubenswrapper[4971]: I1127 07:30:18.550855 4971 scope.go:117] "RemoveContainer" containerID="04550a7c8f01b448540cbf469ed7c45ccb50f18f8471fca2a80a53490bede4db" Nov 27 07:30:18 crc kubenswrapper[4971]: E1127 07:30:18.551821 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:30:33 crc kubenswrapper[4971]: I1127 07:30:33.551228 4971 scope.go:117] "RemoveContainer" containerID="04550a7c8f01b448540cbf469ed7c45ccb50f18f8471fca2a80a53490bede4db" Nov 27 07:30:33 crc kubenswrapper[4971]: E1127 07:30:33.552285 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:30:47 crc kubenswrapper[4971]: I1127 07:30:47.550033 4971 scope.go:117] "RemoveContainer" containerID="04550a7c8f01b448540cbf469ed7c45ccb50f18f8471fca2a80a53490bede4db" Nov 27 07:30:47 crc kubenswrapper[4971]: E1127 07:30:47.550911 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:30:50 crc kubenswrapper[4971]: I1127 07:30:50.515127 4971 scope.go:117] "RemoveContainer" containerID="39a1a798b99bdec77c86ab43b9169bbc01a2272afe93d1204b239bfde5fade05" Nov 27 07:30:59 crc kubenswrapper[4971]: I1127 07:30:59.550395 4971 scope.go:117] "RemoveContainer" containerID="04550a7c8f01b448540cbf469ed7c45ccb50f18f8471fca2a80a53490bede4db" Nov 27 07:30:59 crc kubenswrapper[4971]: E1127 07:30:59.551211 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:31:11 crc kubenswrapper[4971]: I1127 07:31:11.550403 4971 scope.go:117] "RemoveContainer" containerID="04550a7c8f01b448540cbf469ed7c45ccb50f18f8471fca2a80a53490bede4db" Nov 27 07:31:11 crc kubenswrapper[4971]: E1127 07:31:11.551458 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:31:25 crc kubenswrapper[4971]: I1127 07:31:25.550671 4971 scope.go:117] "RemoveContainer" containerID="04550a7c8f01b448540cbf469ed7c45ccb50f18f8471fca2a80a53490bede4db" Nov 27 07:31:25 crc kubenswrapper[4971]: E1127 07:31:25.551634 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:31:38 crc kubenswrapper[4971]: I1127 07:31:38.550693 4971 scope.go:117] "RemoveContainer" containerID="04550a7c8f01b448540cbf469ed7c45ccb50f18f8471fca2a80a53490bede4db" Nov 27 07:31:38 crc kubenswrapper[4971]: E1127 07:31:38.551496 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:31:51 crc kubenswrapper[4971]: I1127 07:31:51.550321 4971 scope.go:117] "RemoveContainer" containerID="04550a7c8f01b448540cbf469ed7c45ccb50f18f8471fca2a80a53490bede4db" Nov 27 07:31:51 crc kubenswrapper[4971]: E1127 07:31:51.553239 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:32:05 crc kubenswrapper[4971]: I1127 07:32:05.549979 4971 scope.go:117] "RemoveContainer" containerID="04550a7c8f01b448540cbf469ed7c45ccb50f18f8471fca2a80a53490bede4db" Nov 27 07:32:05 crc kubenswrapper[4971]: E1127 07:32:05.550876 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:32:16 crc kubenswrapper[4971]: I1127 07:32:16.550429 4971 scope.go:117] "RemoveContainer" containerID="04550a7c8f01b448540cbf469ed7c45ccb50f18f8471fca2a80a53490bede4db" Nov 27 07:32:16 crc kubenswrapper[4971]: E1127 07:32:16.551358 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:32:27 crc kubenswrapper[4971]: I1127 07:32:27.550625 4971 scope.go:117] "RemoveContainer" containerID="04550a7c8f01b448540cbf469ed7c45ccb50f18f8471fca2a80a53490bede4db" Nov 27 07:32:27 crc kubenswrapper[4971]: E1127 07:32:27.551686 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:32:40 crc kubenswrapper[4971]: I1127 07:32:40.551721 4971 scope.go:117] "RemoveContainer" containerID="04550a7c8f01b448540cbf469ed7c45ccb50f18f8471fca2a80a53490bede4db" Nov 27 07:32:40 crc kubenswrapper[4971]: E1127 07:32:40.553856 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:32:54 crc kubenswrapper[4971]: I1127 07:32:54.551414 4971 scope.go:117] "RemoveContainer" containerID="04550a7c8f01b448540cbf469ed7c45ccb50f18f8471fca2a80a53490bede4db" Nov 27 07:32:54 crc kubenswrapper[4971]: E1127 07:32:54.553054 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:33:08 crc kubenswrapper[4971]: I1127 07:33:08.550972 4971 scope.go:117] "RemoveContainer" containerID="04550a7c8f01b448540cbf469ed7c45ccb50f18f8471fca2a80a53490bede4db" Nov 27 07:33:08 crc kubenswrapper[4971]: E1127 07:33:08.552121 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:33:22 crc kubenswrapper[4971]: I1127 07:33:22.556011 4971 scope.go:117] "RemoveContainer" containerID="04550a7c8f01b448540cbf469ed7c45ccb50f18f8471fca2a80a53490bede4db" Nov 27 07:33:22 crc kubenswrapper[4971]: E1127 07:33:22.557046 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:33:34 crc kubenswrapper[4971]: I1127 07:33:34.551086 4971 scope.go:117] "RemoveContainer" containerID="04550a7c8f01b448540cbf469ed7c45ccb50f18f8471fca2a80a53490bede4db" Nov 27 07:33:34 crc kubenswrapper[4971]: E1127 07:33:34.551954 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:33:45 crc kubenswrapper[4971]: I1127 07:33:45.550727 4971 scope.go:117] "RemoveContainer" containerID="04550a7c8f01b448540cbf469ed7c45ccb50f18f8471fca2a80a53490bede4db" Nov 27 07:33:45 crc kubenswrapper[4971]: E1127 07:33:45.552044 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:33:59 crc kubenswrapper[4971]: I1127 07:33:59.550404 4971 scope.go:117] "RemoveContainer" containerID="04550a7c8f01b448540cbf469ed7c45ccb50f18f8471fca2a80a53490bede4db" Nov 27 07:33:59 crc kubenswrapper[4971]: E1127 07:33:59.551649 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:34:11 crc kubenswrapper[4971]: I1127 07:34:11.550716 4971 scope.go:117] "RemoveContainer" containerID="04550a7c8f01b448540cbf469ed7c45ccb50f18f8471fca2a80a53490bede4db" Nov 27 07:34:11 crc kubenswrapper[4971]: E1127 07:34:11.551427 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:34:24 crc kubenswrapper[4971]: I1127 07:34:24.551232 4971 scope.go:117] "RemoveContainer" containerID="04550a7c8f01b448540cbf469ed7c45ccb50f18f8471fca2a80a53490bede4db" Nov 27 07:34:24 crc kubenswrapper[4971]: E1127 07:34:24.552010 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:34:36 crc kubenswrapper[4971]: I1127 07:34:36.550876 4971 scope.go:117] "RemoveContainer" containerID="04550a7c8f01b448540cbf469ed7c45ccb50f18f8471fca2a80a53490bede4db" Nov 27 07:34:36 crc kubenswrapper[4971]: E1127 07:34:36.551749 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:34:48 crc kubenswrapper[4971]: I1127 07:34:48.550500 4971 scope.go:117] "RemoveContainer" containerID="04550a7c8f01b448540cbf469ed7c45ccb50f18f8471fca2a80a53490bede4db" Nov 27 07:34:48 crc kubenswrapper[4971]: E1127 07:34:48.552995 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:35:00 crc kubenswrapper[4971]: I1127 07:35:00.554149 4971 scope.go:117] "RemoveContainer" containerID="04550a7c8f01b448540cbf469ed7c45ccb50f18f8471fca2a80a53490bede4db" Nov 27 07:35:01 crc kubenswrapper[4971]: I1127 07:35:01.952152 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"8a20f41bfce62977b3f58e351a0fb72fcc23df5b5d10de3a2ad75a2995b30ba9"} Nov 27 07:37:26 crc kubenswrapper[4971]: I1127 07:37:26.413578 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 07:37:26 crc kubenswrapper[4971]: I1127 07:37:26.414691 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 07:37:56 crc kubenswrapper[4971]: I1127 07:37:56.412863 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 07:37:56 crc kubenswrapper[4971]: I1127 07:37:56.415090 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 07:38:26 crc kubenswrapper[4971]: I1127 07:38:26.413880 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 07:38:26 crc kubenswrapper[4971]: I1127 07:38:26.414370 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 07:38:26 crc kubenswrapper[4971]: I1127 07:38:26.414407 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 07:38:26 crc kubenswrapper[4971]: I1127 07:38:26.414815 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8a20f41bfce62977b3f58e351a0fb72fcc23df5b5d10de3a2ad75a2995b30ba9"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 07:38:26 crc kubenswrapper[4971]: I1127 07:38:26.414868 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://8a20f41bfce62977b3f58e351a0fb72fcc23df5b5d10de3a2ad75a2995b30ba9" gracePeriod=600 Nov 27 07:38:26 crc kubenswrapper[4971]: I1127 07:38:26.779576 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="8a20f41bfce62977b3f58e351a0fb72fcc23df5b5d10de3a2ad75a2995b30ba9" exitCode=0 Nov 27 07:38:26 crc kubenswrapper[4971]: I1127 07:38:26.779648 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"8a20f41bfce62977b3f58e351a0fb72fcc23df5b5d10de3a2ad75a2995b30ba9"} Nov 27 07:38:26 crc kubenswrapper[4971]: I1127 07:38:26.780009 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"57f5aba5ce0ca112914b3a5119727b4d09c3fed417999b800c0ae9c7a63553ba"} Nov 27 07:38:26 crc kubenswrapper[4971]: I1127 07:38:26.780038 4971 scope.go:117] "RemoveContainer" containerID="04550a7c8f01b448540cbf469ed7c45ccb50f18f8471fca2a80a53490bede4db" Nov 27 07:38:49 crc kubenswrapper[4971]: I1127 07:38:49.713886 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-p4x2b"] Nov 27 07:38:49 crc kubenswrapper[4971]: E1127 07:38:49.715143 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16c5281c-08c7-4023-8d82-64ffa321b419" containerName="collect-profiles" Nov 27 07:38:49 crc kubenswrapper[4971]: I1127 07:38:49.715159 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="16c5281c-08c7-4023-8d82-64ffa321b419" containerName="collect-profiles" Nov 27 07:38:49 crc kubenswrapper[4971]: I1127 07:38:49.715358 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="16c5281c-08c7-4023-8d82-64ffa321b419" containerName="collect-profiles" Nov 27 07:38:49 crc kubenswrapper[4971]: I1127 07:38:49.720673 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p4x2b" Nov 27 07:38:49 crc kubenswrapper[4971]: I1127 07:38:49.729490 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-p4x2b"] Nov 27 07:38:49 crc kubenswrapper[4971]: I1127 07:38:49.853725 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1bb27b2c-c1b4-4b92-a05f-fd073f84c541-catalog-content\") pod \"redhat-operators-p4x2b\" (UID: \"1bb27b2c-c1b4-4b92-a05f-fd073f84c541\") " pod="openshift-marketplace/redhat-operators-p4x2b" Nov 27 07:38:49 crc kubenswrapper[4971]: I1127 07:38:49.853830 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mlh8n\" (UniqueName: \"kubernetes.io/projected/1bb27b2c-c1b4-4b92-a05f-fd073f84c541-kube-api-access-mlh8n\") pod \"redhat-operators-p4x2b\" (UID: \"1bb27b2c-c1b4-4b92-a05f-fd073f84c541\") " pod="openshift-marketplace/redhat-operators-p4x2b" Nov 27 07:38:49 crc kubenswrapper[4971]: I1127 07:38:49.853858 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1bb27b2c-c1b4-4b92-a05f-fd073f84c541-utilities\") pod \"redhat-operators-p4x2b\" (UID: \"1bb27b2c-c1b4-4b92-a05f-fd073f84c541\") " pod="openshift-marketplace/redhat-operators-p4x2b" Nov 27 07:38:49 crc kubenswrapper[4971]: I1127 07:38:49.958252 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mlh8n\" (UniqueName: \"kubernetes.io/projected/1bb27b2c-c1b4-4b92-a05f-fd073f84c541-kube-api-access-mlh8n\") pod \"redhat-operators-p4x2b\" (UID: \"1bb27b2c-c1b4-4b92-a05f-fd073f84c541\") " pod="openshift-marketplace/redhat-operators-p4x2b" Nov 27 07:38:49 crc kubenswrapper[4971]: I1127 07:38:49.958332 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1bb27b2c-c1b4-4b92-a05f-fd073f84c541-utilities\") pod \"redhat-operators-p4x2b\" (UID: \"1bb27b2c-c1b4-4b92-a05f-fd073f84c541\") " pod="openshift-marketplace/redhat-operators-p4x2b" Nov 27 07:38:49 crc kubenswrapper[4971]: I1127 07:38:49.958451 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1bb27b2c-c1b4-4b92-a05f-fd073f84c541-catalog-content\") pod \"redhat-operators-p4x2b\" (UID: \"1bb27b2c-c1b4-4b92-a05f-fd073f84c541\") " pod="openshift-marketplace/redhat-operators-p4x2b" Nov 27 07:38:49 crc kubenswrapper[4971]: I1127 07:38:49.959022 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1bb27b2c-c1b4-4b92-a05f-fd073f84c541-utilities\") pod \"redhat-operators-p4x2b\" (UID: \"1bb27b2c-c1b4-4b92-a05f-fd073f84c541\") " pod="openshift-marketplace/redhat-operators-p4x2b" Nov 27 07:38:49 crc kubenswrapper[4971]: I1127 07:38:49.959103 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1bb27b2c-c1b4-4b92-a05f-fd073f84c541-catalog-content\") pod \"redhat-operators-p4x2b\" (UID: \"1bb27b2c-c1b4-4b92-a05f-fd073f84c541\") " pod="openshift-marketplace/redhat-operators-p4x2b" Nov 27 07:38:50 crc kubenswrapper[4971]: I1127 07:38:50.002261 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mlh8n\" (UniqueName: \"kubernetes.io/projected/1bb27b2c-c1b4-4b92-a05f-fd073f84c541-kube-api-access-mlh8n\") pod \"redhat-operators-p4x2b\" (UID: \"1bb27b2c-c1b4-4b92-a05f-fd073f84c541\") " pod="openshift-marketplace/redhat-operators-p4x2b" Nov 27 07:38:50 crc kubenswrapper[4971]: I1127 07:38:50.054390 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p4x2b" Nov 27 07:38:50 crc kubenswrapper[4971]: I1127 07:38:50.573978 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-p4x2b"] Nov 27 07:38:50 crc kubenswrapper[4971]: I1127 07:38:50.970467 4971 generic.go:334] "Generic (PLEG): container finished" podID="1bb27b2c-c1b4-4b92-a05f-fd073f84c541" containerID="6e7e87f89abbdcd66c553248ba7e81afdeaee683155b72cc364f46beac1ec09e" exitCode=0 Nov 27 07:38:50 crc kubenswrapper[4971]: I1127 07:38:50.970560 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p4x2b" event={"ID":"1bb27b2c-c1b4-4b92-a05f-fd073f84c541","Type":"ContainerDied","Data":"6e7e87f89abbdcd66c553248ba7e81afdeaee683155b72cc364f46beac1ec09e"} Nov 27 07:38:50 crc kubenswrapper[4971]: I1127 07:38:50.970855 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p4x2b" event={"ID":"1bb27b2c-c1b4-4b92-a05f-fd073f84c541","Type":"ContainerStarted","Data":"fe740f5098e09cab82ce891b60f20b502dd4e1b4e639ac43dfb37029e98f7edf"} Nov 27 07:38:50 crc kubenswrapper[4971]: I1127 07:38:50.972894 4971 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 27 07:38:51 crc kubenswrapper[4971]: I1127 07:38:51.980698 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p4x2b" event={"ID":"1bb27b2c-c1b4-4b92-a05f-fd073f84c541","Type":"ContainerStarted","Data":"d9824c00d6cb59e01757a03565aeda7cb57d8f715f25df4682bb99b86927eb39"} Nov 27 07:38:52 crc kubenswrapper[4971]: I1127 07:38:52.989147 4971 generic.go:334] "Generic (PLEG): container finished" podID="1bb27b2c-c1b4-4b92-a05f-fd073f84c541" containerID="d9824c00d6cb59e01757a03565aeda7cb57d8f715f25df4682bb99b86927eb39" exitCode=0 Nov 27 07:38:52 crc kubenswrapper[4971]: I1127 07:38:52.989655 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p4x2b" event={"ID":"1bb27b2c-c1b4-4b92-a05f-fd073f84c541","Type":"ContainerDied","Data":"d9824c00d6cb59e01757a03565aeda7cb57d8f715f25df4682bb99b86927eb39"} Nov 27 07:38:53 crc kubenswrapper[4971]: I1127 07:38:53.998146 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p4x2b" event={"ID":"1bb27b2c-c1b4-4b92-a05f-fd073f84c541","Type":"ContainerStarted","Data":"f1712b9f2f9c5facda3c6cd3b1488ff69709b94919ab491257c0bdd0f86da25a"} Nov 27 07:38:54 crc kubenswrapper[4971]: I1127 07:38:54.030033 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-p4x2b" podStartSLOduration=2.391395133 podStartE2EDuration="5.030010217s" podCreationTimestamp="2025-11-27 07:38:49 +0000 UTC" firstStartedPulling="2025-11-27 07:38:50.972590761 +0000 UTC m=+2769.164634679" lastFinishedPulling="2025-11-27 07:38:53.611205845 +0000 UTC m=+2771.803249763" observedRunningTime="2025-11-27 07:38:54.024815617 +0000 UTC m=+2772.216859535" watchObservedRunningTime="2025-11-27 07:38:54.030010217 +0000 UTC m=+2772.222054135" Nov 27 07:39:00 crc kubenswrapper[4971]: I1127 07:39:00.054585 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-p4x2b" Nov 27 07:39:00 crc kubenswrapper[4971]: I1127 07:39:00.055402 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-p4x2b" Nov 27 07:39:00 crc kubenswrapper[4971]: I1127 07:39:00.107824 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-p4x2b" Nov 27 07:39:01 crc kubenswrapper[4971]: I1127 07:39:01.086088 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-p4x2b" Nov 27 07:39:01 crc kubenswrapper[4971]: I1127 07:39:01.139662 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-p4x2b"] Nov 27 07:39:03 crc kubenswrapper[4971]: I1127 07:39:03.058552 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-p4x2b" podUID="1bb27b2c-c1b4-4b92-a05f-fd073f84c541" containerName="registry-server" containerID="cri-o://f1712b9f2f9c5facda3c6cd3b1488ff69709b94919ab491257c0bdd0f86da25a" gracePeriod=2 Nov 27 07:39:04 crc kubenswrapper[4971]: I1127 07:39:04.016882 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p4x2b" Nov 27 07:39:04 crc kubenswrapper[4971]: I1127 07:39:04.067352 4971 generic.go:334] "Generic (PLEG): container finished" podID="1bb27b2c-c1b4-4b92-a05f-fd073f84c541" containerID="f1712b9f2f9c5facda3c6cd3b1488ff69709b94919ab491257c0bdd0f86da25a" exitCode=0 Nov 27 07:39:04 crc kubenswrapper[4971]: I1127 07:39:04.067380 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p4x2b" Nov 27 07:39:04 crc kubenswrapper[4971]: I1127 07:39:04.067397 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p4x2b" event={"ID":"1bb27b2c-c1b4-4b92-a05f-fd073f84c541","Type":"ContainerDied","Data":"f1712b9f2f9c5facda3c6cd3b1488ff69709b94919ab491257c0bdd0f86da25a"} Nov 27 07:39:04 crc kubenswrapper[4971]: I1127 07:39:04.068596 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p4x2b" event={"ID":"1bb27b2c-c1b4-4b92-a05f-fd073f84c541","Type":"ContainerDied","Data":"fe740f5098e09cab82ce891b60f20b502dd4e1b4e639ac43dfb37029e98f7edf"} Nov 27 07:39:04 crc kubenswrapper[4971]: I1127 07:39:04.068637 4971 scope.go:117] "RemoveContainer" containerID="f1712b9f2f9c5facda3c6cd3b1488ff69709b94919ab491257c0bdd0f86da25a" Nov 27 07:39:04 crc kubenswrapper[4971]: I1127 07:39:04.092008 4971 scope.go:117] "RemoveContainer" containerID="d9824c00d6cb59e01757a03565aeda7cb57d8f715f25df4682bb99b86927eb39" Nov 27 07:39:04 crc kubenswrapper[4971]: I1127 07:39:04.107953 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1bb27b2c-c1b4-4b92-a05f-fd073f84c541-catalog-content\") pod \"1bb27b2c-c1b4-4b92-a05f-fd073f84c541\" (UID: \"1bb27b2c-c1b4-4b92-a05f-fd073f84c541\") " Nov 27 07:39:04 crc kubenswrapper[4971]: I1127 07:39:04.108111 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1bb27b2c-c1b4-4b92-a05f-fd073f84c541-utilities\") pod \"1bb27b2c-c1b4-4b92-a05f-fd073f84c541\" (UID: \"1bb27b2c-c1b4-4b92-a05f-fd073f84c541\") " Nov 27 07:39:04 crc kubenswrapper[4971]: I1127 07:39:04.108149 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mlh8n\" (UniqueName: \"kubernetes.io/projected/1bb27b2c-c1b4-4b92-a05f-fd073f84c541-kube-api-access-mlh8n\") pod \"1bb27b2c-c1b4-4b92-a05f-fd073f84c541\" (UID: \"1bb27b2c-c1b4-4b92-a05f-fd073f84c541\") " Nov 27 07:39:04 crc kubenswrapper[4971]: I1127 07:39:04.109214 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1bb27b2c-c1b4-4b92-a05f-fd073f84c541-utilities" (OuterVolumeSpecName: "utilities") pod "1bb27b2c-c1b4-4b92-a05f-fd073f84c541" (UID: "1bb27b2c-c1b4-4b92-a05f-fd073f84c541"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:39:04 crc kubenswrapper[4971]: I1127 07:39:04.113963 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bb27b2c-c1b4-4b92-a05f-fd073f84c541-kube-api-access-mlh8n" (OuterVolumeSpecName: "kube-api-access-mlh8n") pod "1bb27b2c-c1b4-4b92-a05f-fd073f84c541" (UID: "1bb27b2c-c1b4-4b92-a05f-fd073f84c541"). InnerVolumeSpecName "kube-api-access-mlh8n". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:39:04 crc kubenswrapper[4971]: I1127 07:39:04.117621 4971 scope.go:117] "RemoveContainer" containerID="6e7e87f89abbdcd66c553248ba7e81afdeaee683155b72cc364f46beac1ec09e" Nov 27 07:39:04 crc kubenswrapper[4971]: I1127 07:39:04.159451 4971 scope.go:117] "RemoveContainer" containerID="f1712b9f2f9c5facda3c6cd3b1488ff69709b94919ab491257c0bdd0f86da25a" Nov 27 07:39:04 crc kubenswrapper[4971]: E1127 07:39:04.160018 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f1712b9f2f9c5facda3c6cd3b1488ff69709b94919ab491257c0bdd0f86da25a\": container with ID starting with f1712b9f2f9c5facda3c6cd3b1488ff69709b94919ab491257c0bdd0f86da25a not found: ID does not exist" containerID="f1712b9f2f9c5facda3c6cd3b1488ff69709b94919ab491257c0bdd0f86da25a" Nov 27 07:39:04 crc kubenswrapper[4971]: I1127 07:39:04.160057 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1712b9f2f9c5facda3c6cd3b1488ff69709b94919ab491257c0bdd0f86da25a"} err="failed to get container status \"f1712b9f2f9c5facda3c6cd3b1488ff69709b94919ab491257c0bdd0f86da25a\": rpc error: code = NotFound desc = could not find container \"f1712b9f2f9c5facda3c6cd3b1488ff69709b94919ab491257c0bdd0f86da25a\": container with ID starting with f1712b9f2f9c5facda3c6cd3b1488ff69709b94919ab491257c0bdd0f86da25a not found: ID does not exist" Nov 27 07:39:04 crc kubenswrapper[4971]: I1127 07:39:04.160085 4971 scope.go:117] "RemoveContainer" containerID="d9824c00d6cb59e01757a03565aeda7cb57d8f715f25df4682bb99b86927eb39" Nov 27 07:39:04 crc kubenswrapper[4971]: E1127 07:39:04.160403 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d9824c00d6cb59e01757a03565aeda7cb57d8f715f25df4682bb99b86927eb39\": container with ID starting with d9824c00d6cb59e01757a03565aeda7cb57d8f715f25df4682bb99b86927eb39 not found: ID does not exist" containerID="d9824c00d6cb59e01757a03565aeda7cb57d8f715f25df4682bb99b86927eb39" Nov 27 07:39:04 crc kubenswrapper[4971]: I1127 07:39:04.160445 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9824c00d6cb59e01757a03565aeda7cb57d8f715f25df4682bb99b86927eb39"} err="failed to get container status \"d9824c00d6cb59e01757a03565aeda7cb57d8f715f25df4682bb99b86927eb39\": rpc error: code = NotFound desc = could not find container \"d9824c00d6cb59e01757a03565aeda7cb57d8f715f25df4682bb99b86927eb39\": container with ID starting with d9824c00d6cb59e01757a03565aeda7cb57d8f715f25df4682bb99b86927eb39 not found: ID does not exist" Nov 27 07:39:04 crc kubenswrapper[4971]: I1127 07:39:04.160462 4971 scope.go:117] "RemoveContainer" containerID="6e7e87f89abbdcd66c553248ba7e81afdeaee683155b72cc364f46beac1ec09e" Nov 27 07:39:04 crc kubenswrapper[4971]: E1127 07:39:04.160746 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e7e87f89abbdcd66c553248ba7e81afdeaee683155b72cc364f46beac1ec09e\": container with ID starting with 6e7e87f89abbdcd66c553248ba7e81afdeaee683155b72cc364f46beac1ec09e not found: ID does not exist" containerID="6e7e87f89abbdcd66c553248ba7e81afdeaee683155b72cc364f46beac1ec09e" Nov 27 07:39:04 crc kubenswrapper[4971]: I1127 07:39:04.160762 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e7e87f89abbdcd66c553248ba7e81afdeaee683155b72cc364f46beac1ec09e"} err="failed to get container status \"6e7e87f89abbdcd66c553248ba7e81afdeaee683155b72cc364f46beac1ec09e\": rpc error: code = NotFound desc = could not find container \"6e7e87f89abbdcd66c553248ba7e81afdeaee683155b72cc364f46beac1ec09e\": container with ID starting with 6e7e87f89abbdcd66c553248ba7e81afdeaee683155b72cc364f46beac1ec09e not found: ID does not exist" Nov 27 07:39:04 crc kubenswrapper[4971]: I1127 07:39:04.204196 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1bb27b2c-c1b4-4b92-a05f-fd073f84c541-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1bb27b2c-c1b4-4b92-a05f-fd073f84c541" (UID: "1bb27b2c-c1b4-4b92-a05f-fd073f84c541"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:39:04 crc kubenswrapper[4971]: I1127 07:39:04.209809 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1bb27b2c-c1b4-4b92-a05f-fd073f84c541-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 07:39:04 crc kubenswrapper[4971]: I1127 07:39:04.209848 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mlh8n\" (UniqueName: \"kubernetes.io/projected/1bb27b2c-c1b4-4b92-a05f-fd073f84c541-kube-api-access-mlh8n\") on node \"crc\" DevicePath \"\"" Nov 27 07:39:04 crc kubenswrapper[4971]: I1127 07:39:04.209860 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1bb27b2c-c1b4-4b92-a05f-fd073f84c541-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 07:39:04 crc kubenswrapper[4971]: I1127 07:39:04.403251 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-p4x2b"] Nov 27 07:39:04 crc kubenswrapper[4971]: I1127 07:39:04.408767 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-p4x2b"] Nov 27 07:39:04 crc kubenswrapper[4971]: I1127 07:39:04.559427 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bb27b2c-c1b4-4b92-a05f-fd073f84c541" path="/var/lib/kubelet/pods/1bb27b2c-c1b4-4b92-a05f-fd073f84c541/volumes" Nov 27 07:40:09 crc kubenswrapper[4971]: I1127 07:40:09.735640 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-s2p5t"] Nov 27 07:40:09 crc kubenswrapper[4971]: E1127 07:40:09.736787 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1bb27b2c-c1b4-4b92-a05f-fd073f84c541" containerName="registry-server" Nov 27 07:40:09 crc kubenswrapper[4971]: I1127 07:40:09.736811 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="1bb27b2c-c1b4-4b92-a05f-fd073f84c541" containerName="registry-server" Nov 27 07:40:09 crc kubenswrapper[4971]: E1127 07:40:09.736863 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1bb27b2c-c1b4-4b92-a05f-fd073f84c541" containerName="extract-content" Nov 27 07:40:09 crc kubenswrapper[4971]: I1127 07:40:09.736875 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="1bb27b2c-c1b4-4b92-a05f-fd073f84c541" containerName="extract-content" Nov 27 07:40:09 crc kubenswrapper[4971]: E1127 07:40:09.736890 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1bb27b2c-c1b4-4b92-a05f-fd073f84c541" containerName="extract-utilities" Nov 27 07:40:09 crc kubenswrapper[4971]: I1127 07:40:09.736904 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="1bb27b2c-c1b4-4b92-a05f-fd073f84c541" containerName="extract-utilities" Nov 27 07:40:09 crc kubenswrapper[4971]: I1127 07:40:09.737130 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="1bb27b2c-c1b4-4b92-a05f-fd073f84c541" containerName="registry-server" Nov 27 07:40:09 crc kubenswrapper[4971]: I1127 07:40:09.738806 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s2p5t" Nov 27 07:40:09 crc kubenswrapper[4971]: I1127 07:40:09.744955 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-s2p5t"] Nov 27 07:40:09 crc kubenswrapper[4971]: I1127 07:40:09.844781 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c65eba2-f4fc-4356-8511-410304785cd0-catalog-content\") pod \"certified-operators-s2p5t\" (UID: \"5c65eba2-f4fc-4356-8511-410304785cd0\") " pod="openshift-marketplace/certified-operators-s2p5t" Nov 27 07:40:09 crc kubenswrapper[4971]: I1127 07:40:09.844847 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v48tv\" (UniqueName: \"kubernetes.io/projected/5c65eba2-f4fc-4356-8511-410304785cd0-kube-api-access-v48tv\") pod \"certified-operators-s2p5t\" (UID: \"5c65eba2-f4fc-4356-8511-410304785cd0\") " pod="openshift-marketplace/certified-operators-s2p5t" Nov 27 07:40:09 crc kubenswrapper[4971]: I1127 07:40:09.844881 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c65eba2-f4fc-4356-8511-410304785cd0-utilities\") pod \"certified-operators-s2p5t\" (UID: \"5c65eba2-f4fc-4356-8511-410304785cd0\") " pod="openshift-marketplace/certified-operators-s2p5t" Nov 27 07:40:09 crc kubenswrapper[4971]: I1127 07:40:09.946022 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c65eba2-f4fc-4356-8511-410304785cd0-catalog-content\") pod \"certified-operators-s2p5t\" (UID: \"5c65eba2-f4fc-4356-8511-410304785cd0\") " pod="openshift-marketplace/certified-operators-s2p5t" Nov 27 07:40:09 crc kubenswrapper[4971]: I1127 07:40:09.946092 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v48tv\" (UniqueName: \"kubernetes.io/projected/5c65eba2-f4fc-4356-8511-410304785cd0-kube-api-access-v48tv\") pod \"certified-operators-s2p5t\" (UID: \"5c65eba2-f4fc-4356-8511-410304785cd0\") " pod="openshift-marketplace/certified-operators-s2p5t" Nov 27 07:40:09 crc kubenswrapper[4971]: I1127 07:40:09.946123 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c65eba2-f4fc-4356-8511-410304785cd0-utilities\") pod \"certified-operators-s2p5t\" (UID: \"5c65eba2-f4fc-4356-8511-410304785cd0\") " pod="openshift-marketplace/certified-operators-s2p5t" Nov 27 07:40:09 crc kubenswrapper[4971]: I1127 07:40:09.946781 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c65eba2-f4fc-4356-8511-410304785cd0-catalog-content\") pod \"certified-operators-s2p5t\" (UID: \"5c65eba2-f4fc-4356-8511-410304785cd0\") " pod="openshift-marketplace/certified-operators-s2p5t" Nov 27 07:40:09 crc kubenswrapper[4971]: I1127 07:40:09.946789 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c65eba2-f4fc-4356-8511-410304785cd0-utilities\") pod \"certified-operators-s2p5t\" (UID: \"5c65eba2-f4fc-4356-8511-410304785cd0\") " pod="openshift-marketplace/certified-operators-s2p5t" Nov 27 07:40:09 crc kubenswrapper[4971]: I1127 07:40:09.966318 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v48tv\" (UniqueName: \"kubernetes.io/projected/5c65eba2-f4fc-4356-8511-410304785cd0-kube-api-access-v48tv\") pod \"certified-operators-s2p5t\" (UID: \"5c65eba2-f4fc-4356-8511-410304785cd0\") " pod="openshift-marketplace/certified-operators-s2p5t" Nov 27 07:40:10 crc kubenswrapper[4971]: I1127 07:40:10.116018 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s2p5t" Nov 27 07:40:10 crc kubenswrapper[4971]: I1127 07:40:10.400853 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-s2p5t"] Nov 27 07:40:10 crc kubenswrapper[4971]: I1127 07:40:10.672097 4971 generic.go:334] "Generic (PLEG): container finished" podID="5c65eba2-f4fc-4356-8511-410304785cd0" containerID="04e78fdd3a18d87b95119f286fa6814709e75d345e5ad78aae3e899bbf40152d" exitCode=0 Nov 27 07:40:10 crc kubenswrapper[4971]: I1127 07:40:10.672145 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s2p5t" event={"ID":"5c65eba2-f4fc-4356-8511-410304785cd0","Type":"ContainerDied","Data":"04e78fdd3a18d87b95119f286fa6814709e75d345e5ad78aae3e899bbf40152d"} Nov 27 07:40:10 crc kubenswrapper[4971]: I1127 07:40:10.672448 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s2p5t" event={"ID":"5c65eba2-f4fc-4356-8511-410304785cd0","Type":"ContainerStarted","Data":"439600324a871536a72d96ece340ee9941f5b920f8d21f78bc181646348602e0"} Nov 27 07:40:12 crc kubenswrapper[4971]: I1127 07:40:12.688661 4971 generic.go:334] "Generic (PLEG): container finished" podID="5c65eba2-f4fc-4356-8511-410304785cd0" containerID="c26dd04e46ba614c712686e642afab8dde3a43ce6477dbb8a081ec24ea76726a" exitCode=0 Nov 27 07:40:12 crc kubenswrapper[4971]: I1127 07:40:12.689168 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s2p5t" event={"ID":"5c65eba2-f4fc-4356-8511-410304785cd0","Type":"ContainerDied","Data":"c26dd04e46ba614c712686e642afab8dde3a43ce6477dbb8a081ec24ea76726a"} Nov 27 07:40:13 crc kubenswrapper[4971]: I1127 07:40:13.700285 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s2p5t" event={"ID":"5c65eba2-f4fc-4356-8511-410304785cd0","Type":"ContainerStarted","Data":"a934f62374f16b0caeeefb395be62ec24d9096c4404ef01e2ca56ecae0c4f2f0"} Nov 27 07:40:13 crc kubenswrapper[4971]: I1127 07:40:13.728118 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-s2p5t" podStartSLOduration=2.29516423 podStartE2EDuration="4.728097291s" podCreationTimestamp="2025-11-27 07:40:09 +0000 UTC" firstStartedPulling="2025-11-27 07:40:10.67325231 +0000 UTC m=+2848.865296228" lastFinishedPulling="2025-11-27 07:40:13.106185371 +0000 UTC m=+2851.298229289" observedRunningTime="2025-11-27 07:40:13.726168176 +0000 UTC m=+2851.918212114" watchObservedRunningTime="2025-11-27 07:40:13.728097291 +0000 UTC m=+2851.920141209" Nov 27 07:40:20 crc kubenswrapper[4971]: I1127 07:40:20.116231 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-s2p5t" Nov 27 07:40:20 crc kubenswrapper[4971]: I1127 07:40:20.116594 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-s2p5t" Nov 27 07:40:20 crc kubenswrapper[4971]: I1127 07:40:20.161231 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-s2p5t" Nov 27 07:40:20 crc kubenswrapper[4971]: I1127 07:40:20.819680 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-s2p5t" Nov 27 07:40:20 crc kubenswrapper[4971]: I1127 07:40:20.947546 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-s2p5t"] Nov 27 07:40:22 crc kubenswrapper[4971]: I1127 07:40:22.781044 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-s2p5t" podUID="5c65eba2-f4fc-4356-8511-410304785cd0" containerName="registry-server" containerID="cri-o://a934f62374f16b0caeeefb395be62ec24d9096c4404ef01e2ca56ecae0c4f2f0" gracePeriod=2 Nov 27 07:40:23 crc kubenswrapper[4971]: I1127 07:40:23.180649 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s2p5t" Nov 27 07:40:23 crc kubenswrapper[4971]: I1127 07:40:23.366520 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c65eba2-f4fc-4356-8511-410304785cd0-utilities\") pod \"5c65eba2-f4fc-4356-8511-410304785cd0\" (UID: \"5c65eba2-f4fc-4356-8511-410304785cd0\") " Nov 27 07:40:23 crc kubenswrapper[4971]: I1127 07:40:23.367498 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c65eba2-f4fc-4356-8511-410304785cd0-utilities" (OuterVolumeSpecName: "utilities") pod "5c65eba2-f4fc-4356-8511-410304785cd0" (UID: "5c65eba2-f4fc-4356-8511-410304785cd0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:40:23 crc kubenswrapper[4971]: I1127 07:40:23.368009 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v48tv\" (UniqueName: \"kubernetes.io/projected/5c65eba2-f4fc-4356-8511-410304785cd0-kube-api-access-v48tv\") pod \"5c65eba2-f4fc-4356-8511-410304785cd0\" (UID: \"5c65eba2-f4fc-4356-8511-410304785cd0\") " Nov 27 07:40:23 crc kubenswrapper[4971]: I1127 07:40:23.368084 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c65eba2-f4fc-4356-8511-410304785cd0-catalog-content\") pod \"5c65eba2-f4fc-4356-8511-410304785cd0\" (UID: \"5c65eba2-f4fc-4356-8511-410304785cd0\") " Nov 27 07:40:23 crc kubenswrapper[4971]: I1127 07:40:23.372247 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c65eba2-f4fc-4356-8511-410304785cd0-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 07:40:23 crc kubenswrapper[4971]: I1127 07:40:23.374604 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c65eba2-f4fc-4356-8511-410304785cd0-kube-api-access-v48tv" (OuterVolumeSpecName: "kube-api-access-v48tv") pod "5c65eba2-f4fc-4356-8511-410304785cd0" (UID: "5c65eba2-f4fc-4356-8511-410304785cd0"). InnerVolumeSpecName "kube-api-access-v48tv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:40:23 crc kubenswrapper[4971]: I1127 07:40:23.417987 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c65eba2-f4fc-4356-8511-410304785cd0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5c65eba2-f4fc-4356-8511-410304785cd0" (UID: "5c65eba2-f4fc-4356-8511-410304785cd0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:40:23 crc kubenswrapper[4971]: I1127 07:40:23.473624 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v48tv\" (UniqueName: \"kubernetes.io/projected/5c65eba2-f4fc-4356-8511-410304785cd0-kube-api-access-v48tv\") on node \"crc\" DevicePath \"\"" Nov 27 07:40:23 crc kubenswrapper[4971]: I1127 07:40:23.473660 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c65eba2-f4fc-4356-8511-410304785cd0-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 07:40:23 crc kubenswrapper[4971]: I1127 07:40:23.800214 4971 generic.go:334] "Generic (PLEG): container finished" podID="5c65eba2-f4fc-4356-8511-410304785cd0" containerID="a934f62374f16b0caeeefb395be62ec24d9096c4404ef01e2ca56ecae0c4f2f0" exitCode=0 Nov 27 07:40:23 crc kubenswrapper[4971]: I1127 07:40:23.800297 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s2p5t" event={"ID":"5c65eba2-f4fc-4356-8511-410304785cd0","Type":"ContainerDied","Data":"a934f62374f16b0caeeefb395be62ec24d9096c4404ef01e2ca56ecae0c4f2f0"} Nov 27 07:40:23 crc kubenswrapper[4971]: I1127 07:40:23.800347 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s2p5t" event={"ID":"5c65eba2-f4fc-4356-8511-410304785cd0","Type":"ContainerDied","Data":"439600324a871536a72d96ece340ee9941f5b920f8d21f78bc181646348602e0"} Nov 27 07:40:23 crc kubenswrapper[4971]: I1127 07:40:23.800384 4971 scope.go:117] "RemoveContainer" containerID="a934f62374f16b0caeeefb395be62ec24d9096c4404ef01e2ca56ecae0c4f2f0" Nov 27 07:40:23 crc kubenswrapper[4971]: I1127 07:40:23.800650 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s2p5t" Nov 27 07:40:23 crc kubenswrapper[4971]: I1127 07:40:23.824629 4971 scope.go:117] "RemoveContainer" containerID="c26dd04e46ba614c712686e642afab8dde3a43ce6477dbb8a081ec24ea76726a" Nov 27 07:40:23 crc kubenswrapper[4971]: I1127 07:40:23.849544 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-s2p5t"] Nov 27 07:40:23 crc kubenswrapper[4971]: I1127 07:40:23.856173 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-s2p5t"] Nov 27 07:40:23 crc kubenswrapper[4971]: I1127 07:40:23.869061 4971 scope.go:117] "RemoveContainer" containerID="04e78fdd3a18d87b95119f286fa6814709e75d345e5ad78aae3e899bbf40152d" Nov 27 07:40:23 crc kubenswrapper[4971]: I1127 07:40:23.890662 4971 scope.go:117] "RemoveContainer" containerID="a934f62374f16b0caeeefb395be62ec24d9096c4404ef01e2ca56ecae0c4f2f0" Nov 27 07:40:23 crc kubenswrapper[4971]: E1127 07:40:23.891473 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a934f62374f16b0caeeefb395be62ec24d9096c4404ef01e2ca56ecae0c4f2f0\": container with ID starting with a934f62374f16b0caeeefb395be62ec24d9096c4404ef01e2ca56ecae0c4f2f0 not found: ID does not exist" containerID="a934f62374f16b0caeeefb395be62ec24d9096c4404ef01e2ca56ecae0c4f2f0" Nov 27 07:40:23 crc kubenswrapper[4971]: I1127 07:40:23.891583 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a934f62374f16b0caeeefb395be62ec24d9096c4404ef01e2ca56ecae0c4f2f0"} err="failed to get container status \"a934f62374f16b0caeeefb395be62ec24d9096c4404ef01e2ca56ecae0c4f2f0\": rpc error: code = NotFound desc = could not find container \"a934f62374f16b0caeeefb395be62ec24d9096c4404ef01e2ca56ecae0c4f2f0\": container with ID starting with a934f62374f16b0caeeefb395be62ec24d9096c4404ef01e2ca56ecae0c4f2f0 not found: ID does not exist" Nov 27 07:40:23 crc kubenswrapper[4971]: I1127 07:40:23.891662 4971 scope.go:117] "RemoveContainer" containerID="c26dd04e46ba614c712686e642afab8dde3a43ce6477dbb8a081ec24ea76726a" Nov 27 07:40:23 crc kubenswrapper[4971]: E1127 07:40:23.892299 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c26dd04e46ba614c712686e642afab8dde3a43ce6477dbb8a081ec24ea76726a\": container with ID starting with c26dd04e46ba614c712686e642afab8dde3a43ce6477dbb8a081ec24ea76726a not found: ID does not exist" containerID="c26dd04e46ba614c712686e642afab8dde3a43ce6477dbb8a081ec24ea76726a" Nov 27 07:40:23 crc kubenswrapper[4971]: I1127 07:40:23.892378 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c26dd04e46ba614c712686e642afab8dde3a43ce6477dbb8a081ec24ea76726a"} err="failed to get container status \"c26dd04e46ba614c712686e642afab8dde3a43ce6477dbb8a081ec24ea76726a\": rpc error: code = NotFound desc = could not find container \"c26dd04e46ba614c712686e642afab8dde3a43ce6477dbb8a081ec24ea76726a\": container with ID starting with c26dd04e46ba614c712686e642afab8dde3a43ce6477dbb8a081ec24ea76726a not found: ID does not exist" Nov 27 07:40:23 crc kubenswrapper[4971]: I1127 07:40:23.892450 4971 scope.go:117] "RemoveContainer" containerID="04e78fdd3a18d87b95119f286fa6814709e75d345e5ad78aae3e899bbf40152d" Nov 27 07:40:23 crc kubenswrapper[4971]: E1127 07:40:23.892784 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04e78fdd3a18d87b95119f286fa6814709e75d345e5ad78aae3e899bbf40152d\": container with ID starting with 04e78fdd3a18d87b95119f286fa6814709e75d345e5ad78aae3e899bbf40152d not found: ID does not exist" containerID="04e78fdd3a18d87b95119f286fa6814709e75d345e5ad78aae3e899bbf40152d" Nov 27 07:40:23 crc kubenswrapper[4971]: I1127 07:40:23.892875 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04e78fdd3a18d87b95119f286fa6814709e75d345e5ad78aae3e899bbf40152d"} err="failed to get container status \"04e78fdd3a18d87b95119f286fa6814709e75d345e5ad78aae3e899bbf40152d\": rpc error: code = NotFound desc = could not find container \"04e78fdd3a18d87b95119f286fa6814709e75d345e5ad78aae3e899bbf40152d\": container with ID starting with 04e78fdd3a18d87b95119f286fa6814709e75d345e5ad78aae3e899bbf40152d not found: ID does not exist" Nov 27 07:40:24 crc kubenswrapper[4971]: I1127 07:40:24.559111 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c65eba2-f4fc-4356-8511-410304785cd0" path="/var/lib/kubelet/pods/5c65eba2-f4fc-4356-8511-410304785cd0/volumes" Nov 27 07:40:26 crc kubenswrapper[4971]: I1127 07:40:26.413047 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 07:40:26 crc kubenswrapper[4971]: I1127 07:40:26.413118 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 07:40:56 crc kubenswrapper[4971]: I1127 07:40:56.413400 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 07:40:56 crc kubenswrapper[4971]: I1127 07:40:56.414227 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 07:41:26 crc kubenswrapper[4971]: I1127 07:41:26.413339 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 07:41:26 crc kubenswrapper[4971]: I1127 07:41:26.414855 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 07:41:26 crc kubenswrapper[4971]: I1127 07:41:26.414946 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 07:41:26 crc kubenswrapper[4971]: I1127 07:41:26.415775 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"57f5aba5ce0ca112914b3a5119727b4d09c3fed417999b800c0ae9c7a63553ba"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 07:41:26 crc kubenswrapper[4971]: I1127 07:41:26.415851 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://57f5aba5ce0ca112914b3a5119727b4d09c3fed417999b800c0ae9c7a63553ba" gracePeriod=600 Nov 27 07:41:26 crc kubenswrapper[4971]: E1127 07:41:26.540934 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:41:27 crc kubenswrapper[4971]: I1127 07:41:27.359524 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="57f5aba5ce0ca112914b3a5119727b4d09c3fed417999b800c0ae9c7a63553ba" exitCode=0 Nov 27 07:41:27 crc kubenswrapper[4971]: I1127 07:41:27.359833 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"57f5aba5ce0ca112914b3a5119727b4d09c3fed417999b800c0ae9c7a63553ba"} Nov 27 07:41:27 crc kubenswrapper[4971]: I1127 07:41:27.359939 4971 scope.go:117] "RemoveContainer" containerID="8a20f41bfce62977b3f58e351a0fb72fcc23df5b5d10de3a2ad75a2995b30ba9" Nov 27 07:41:27 crc kubenswrapper[4971]: I1127 07:41:27.360661 4971 scope.go:117] "RemoveContainer" containerID="57f5aba5ce0ca112914b3a5119727b4d09c3fed417999b800c0ae9c7a63553ba" Nov 27 07:41:27 crc kubenswrapper[4971]: E1127 07:41:27.360954 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:41:40 crc kubenswrapper[4971]: I1127 07:41:40.550753 4971 scope.go:117] "RemoveContainer" containerID="57f5aba5ce0ca112914b3a5119727b4d09c3fed417999b800c0ae9c7a63553ba" Nov 27 07:41:40 crc kubenswrapper[4971]: E1127 07:41:40.551925 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:41:53 crc kubenswrapper[4971]: I1127 07:41:53.550834 4971 scope.go:117] "RemoveContainer" containerID="57f5aba5ce0ca112914b3a5119727b4d09c3fed417999b800c0ae9c7a63553ba" Nov 27 07:41:53 crc kubenswrapper[4971]: E1127 07:41:53.551982 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:42:06 crc kubenswrapper[4971]: I1127 07:42:06.550691 4971 scope.go:117] "RemoveContainer" containerID="57f5aba5ce0ca112914b3a5119727b4d09c3fed417999b800c0ae9c7a63553ba" Nov 27 07:42:06 crc kubenswrapper[4971]: E1127 07:42:06.551510 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:42:17 crc kubenswrapper[4971]: I1127 07:42:17.551041 4971 scope.go:117] "RemoveContainer" containerID="57f5aba5ce0ca112914b3a5119727b4d09c3fed417999b800c0ae9c7a63553ba" Nov 27 07:42:17 crc kubenswrapper[4971]: E1127 07:42:17.552551 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:42:28 crc kubenswrapper[4971]: I1127 07:42:28.550707 4971 scope.go:117] "RemoveContainer" containerID="57f5aba5ce0ca112914b3a5119727b4d09c3fed417999b800c0ae9c7a63553ba" Nov 27 07:42:28 crc kubenswrapper[4971]: E1127 07:42:28.551416 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:42:35 crc kubenswrapper[4971]: I1127 07:42:35.313519 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-ql4wr"] Nov 27 07:42:35 crc kubenswrapper[4971]: E1127 07:42:35.314561 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c65eba2-f4fc-4356-8511-410304785cd0" containerName="extract-utilities" Nov 27 07:42:35 crc kubenswrapper[4971]: I1127 07:42:35.314587 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c65eba2-f4fc-4356-8511-410304785cd0" containerName="extract-utilities" Nov 27 07:42:35 crc kubenswrapper[4971]: E1127 07:42:35.314602 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c65eba2-f4fc-4356-8511-410304785cd0" containerName="extract-content" Nov 27 07:42:35 crc kubenswrapper[4971]: I1127 07:42:35.314611 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c65eba2-f4fc-4356-8511-410304785cd0" containerName="extract-content" Nov 27 07:42:35 crc kubenswrapper[4971]: E1127 07:42:35.314634 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c65eba2-f4fc-4356-8511-410304785cd0" containerName="registry-server" Nov 27 07:42:35 crc kubenswrapper[4971]: I1127 07:42:35.314642 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c65eba2-f4fc-4356-8511-410304785cd0" containerName="registry-server" Nov 27 07:42:35 crc kubenswrapper[4971]: I1127 07:42:35.314865 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c65eba2-f4fc-4356-8511-410304785cd0" containerName="registry-server" Nov 27 07:42:35 crc kubenswrapper[4971]: I1127 07:42:35.316221 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ql4wr" Nov 27 07:42:35 crc kubenswrapper[4971]: I1127 07:42:35.328258 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ql4wr"] Nov 27 07:42:35 crc kubenswrapper[4971]: I1127 07:42:35.483115 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pd6k8\" (UniqueName: \"kubernetes.io/projected/d3118496-96ac-4f8c-af9f-7ab25e3ec5e9-kube-api-access-pd6k8\") pod \"community-operators-ql4wr\" (UID: \"d3118496-96ac-4f8c-af9f-7ab25e3ec5e9\") " pod="openshift-marketplace/community-operators-ql4wr" Nov 27 07:42:35 crc kubenswrapper[4971]: I1127 07:42:35.483294 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3118496-96ac-4f8c-af9f-7ab25e3ec5e9-utilities\") pod \"community-operators-ql4wr\" (UID: \"d3118496-96ac-4f8c-af9f-7ab25e3ec5e9\") " pod="openshift-marketplace/community-operators-ql4wr" Nov 27 07:42:35 crc kubenswrapper[4971]: I1127 07:42:35.483340 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3118496-96ac-4f8c-af9f-7ab25e3ec5e9-catalog-content\") pod \"community-operators-ql4wr\" (UID: \"d3118496-96ac-4f8c-af9f-7ab25e3ec5e9\") " pod="openshift-marketplace/community-operators-ql4wr" Nov 27 07:42:35 crc kubenswrapper[4971]: I1127 07:42:35.584419 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pd6k8\" (UniqueName: \"kubernetes.io/projected/d3118496-96ac-4f8c-af9f-7ab25e3ec5e9-kube-api-access-pd6k8\") pod \"community-operators-ql4wr\" (UID: \"d3118496-96ac-4f8c-af9f-7ab25e3ec5e9\") " pod="openshift-marketplace/community-operators-ql4wr" Nov 27 07:42:35 crc kubenswrapper[4971]: I1127 07:42:35.584806 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3118496-96ac-4f8c-af9f-7ab25e3ec5e9-utilities\") pod \"community-operators-ql4wr\" (UID: \"d3118496-96ac-4f8c-af9f-7ab25e3ec5e9\") " pod="openshift-marketplace/community-operators-ql4wr" Nov 27 07:42:35 crc kubenswrapper[4971]: I1127 07:42:35.584856 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3118496-96ac-4f8c-af9f-7ab25e3ec5e9-catalog-content\") pod \"community-operators-ql4wr\" (UID: \"d3118496-96ac-4f8c-af9f-7ab25e3ec5e9\") " pod="openshift-marketplace/community-operators-ql4wr" Nov 27 07:42:35 crc kubenswrapper[4971]: I1127 07:42:35.585324 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3118496-96ac-4f8c-af9f-7ab25e3ec5e9-utilities\") pod \"community-operators-ql4wr\" (UID: \"d3118496-96ac-4f8c-af9f-7ab25e3ec5e9\") " pod="openshift-marketplace/community-operators-ql4wr" Nov 27 07:42:35 crc kubenswrapper[4971]: I1127 07:42:35.585372 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3118496-96ac-4f8c-af9f-7ab25e3ec5e9-catalog-content\") pod \"community-operators-ql4wr\" (UID: \"d3118496-96ac-4f8c-af9f-7ab25e3ec5e9\") " pod="openshift-marketplace/community-operators-ql4wr" Nov 27 07:42:35 crc kubenswrapper[4971]: I1127 07:42:35.609966 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pd6k8\" (UniqueName: \"kubernetes.io/projected/d3118496-96ac-4f8c-af9f-7ab25e3ec5e9-kube-api-access-pd6k8\") pod \"community-operators-ql4wr\" (UID: \"d3118496-96ac-4f8c-af9f-7ab25e3ec5e9\") " pod="openshift-marketplace/community-operators-ql4wr" Nov 27 07:42:35 crc kubenswrapper[4971]: I1127 07:42:35.654945 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ql4wr" Nov 27 07:42:35 crc kubenswrapper[4971]: I1127 07:42:35.978175 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ql4wr"] Nov 27 07:42:36 crc kubenswrapper[4971]: I1127 07:42:36.956729 4971 generic.go:334] "Generic (PLEG): container finished" podID="d3118496-96ac-4f8c-af9f-7ab25e3ec5e9" containerID="d81b57440f086bd8ef33ef7bfe70c661824f85e6c000429d16e280e517b6d7e2" exitCode=0 Nov 27 07:42:36 crc kubenswrapper[4971]: I1127 07:42:36.956893 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ql4wr" event={"ID":"d3118496-96ac-4f8c-af9f-7ab25e3ec5e9","Type":"ContainerDied","Data":"d81b57440f086bd8ef33ef7bfe70c661824f85e6c000429d16e280e517b6d7e2"} Nov 27 07:42:36 crc kubenswrapper[4971]: I1127 07:42:36.957087 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ql4wr" event={"ID":"d3118496-96ac-4f8c-af9f-7ab25e3ec5e9","Type":"ContainerStarted","Data":"57b111d1f4ff08adde4f41bffefcd04d7f660a8219a30eb5b61f1b55c8542be0"} Nov 27 07:42:37 crc kubenswrapper[4971]: I1127 07:42:37.969369 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ql4wr" event={"ID":"d3118496-96ac-4f8c-af9f-7ab25e3ec5e9","Type":"ContainerStarted","Data":"5d80de523077ab0ad82423ad60c59ade9fc0de09a39ec292e9fd3655ec43a89c"} Nov 27 07:42:38 crc kubenswrapper[4971]: I1127 07:42:38.116037 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-h4nfz"] Nov 27 07:42:38 crc kubenswrapper[4971]: I1127 07:42:38.118827 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h4nfz" Nov 27 07:42:38 crc kubenswrapper[4971]: I1127 07:42:38.125908 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gln72\" (UniqueName: \"kubernetes.io/projected/4e0870f9-abc4-4e9c-9ee6-28479c5b63d3-kube-api-access-gln72\") pod \"redhat-marketplace-h4nfz\" (UID: \"4e0870f9-abc4-4e9c-9ee6-28479c5b63d3\") " pod="openshift-marketplace/redhat-marketplace-h4nfz" Nov 27 07:42:38 crc kubenswrapper[4971]: I1127 07:42:38.126034 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e0870f9-abc4-4e9c-9ee6-28479c5b63d3-catalog-content\") pod \"redhat-marketplace-h4nfz\" (UID: \"4e0870f9-abc4-4e9c-9ee6-28479c5b63d3\") " pod="openshift-marketplace/redhat-marketplace-h4nfz" Nov 27 07:42:38 crc kubenswrapper[4971]: I1127 07:42:38.126093 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e0870f9-abc4-4e9c-9ee6-28479c5b63d3-utilities\") pod \"redhat-marketplace-h4nfz\" (UID: \"4e0870f9-abc4-4e9c-9ee6-28479c5b63d3\") " pod="openshift-marketplace/redhat-marketplace-h4nfz" Nov 27 07:42:38 crc kubenswrapper[4971]: I1127 07:42:38.142232 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-h4nfz"] Nov 27 07:42:38 crc kubenswrapper[4971]: I1127 07:42:38.228263 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e0870f9-abc4-4e9c-9ee6-28479c5b63d3-utilities\") pod \"redhat-marketplace-h4nfz\" (UID: \"4e0870f9-abc4-4e9c-9ee6-28479c5b63d3\") " pod="openshift-marketplace/redhat-marketplace-h4nfz" Nov 27 07:42:38 crc kubenswrapper[4971]: I1127 07:42:38.228384 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gln72\" (UniqueName: \"kubernetes.io/projected/4e0870f9-abc4-4e9c-9ee6-28479c5b63d3-kube-api-access-gln72\") pod \"redhat-marketplace-h4nfz\" (UID: \"4e0870f9-abc4-4e9c-9ee6-28479c5b63d3\") " pod="openshift-marketplace/redhat-marketplace-h4nfz" Nov 27 07:42:38 crc kubenswrapper[4971]: I1127 07:42:38.228610 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e0870f9-abc4-4e9c-9ee6-28479c5b63d3-catalog-content\") pod \"redhat-marketplace-h4nfz\" (UID: \"4e0870f9-abc4-4e9c-9ee6-28479c5b63d3\") " pod="openshift-marketplace/redhat-marketplace-h4nfz" Nov 27 07:42:38 crc kubenswrapper[4971]: I1127 07:42:38.228817 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e0870f9-abc4-4e9c-9ee6-28479c5b63d3-utilities\") pod \"redhat-marketplace-h4nfz\" (UID: \"4e0870f9-abc4-4e9c-9ee6-28479c5b63d3\") " pod="openshift-marketplace/redhat-marketplace-h4nfz" Nov 27 07:42:38 crc kubenswrapper[4971]: I1127 07:42:38.229199 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e0870f9-abc4-4e9c-9ee6-28479c5b63d3-catalog-content\") pod \"redhat-marketplace-h4nfz\" (UID: \"4e0870f9-abc4-4e9c-9ee6-28479c5b63d3\") " pod="openshift-marketplace/redhat-marketplace-h4nfz" Nov 27 07:42:38 crc kubenswrapper[4971]: I1127 07:42:38.260187 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gln72\" (UniqueName: \"kubernetes.io/projected/4e0870f9-abc4-4e9c-9ee6-28479c5b63d3-kube-api-access-gln72\") pod \"redhat-marketplace-h4nfz\" (UID: \"4e0870f9-abc4-4e9c-9ee6-28479c5b63d3\") " pod="openshift-marketplace/redhat-marketplace-h4nfz" Nov 27 07:42:38 crc kubenswrapper[4971]: I1127 07:42:38.501870 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h4nfz" Nov 27 07:42:38 crc kubenswrapper[4971]: I1127 07:42:38.733552 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-h4nfz"] Nov 27 07:42:38 crc kubenswrapper[4971]: I1127 07:42:38.982794 4971 generic.go:334] "Generic (PLEG): container finished" podID="d3118496-96ac-4f8c-af9f-7ab25e3ec5e9" containerID="5d80de523077ab0ad82423ad60c59ade9fc0de09a39ec292e9fd3655ec43a89c" exitCode=0 Nov 27 07:42:38 crc kubenswrapper[4971]: I1127 07:42:38.982871 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ql4wr" event={"ID":"d3118496-96ac-4f8c-af9f-7ab25e3ec5e9","Type":"ContainerDied","Data":"5d80de523077ab0ad82423ad60c59ade9fc0de09a39ec292e9fd3655ec43a89c"} Nov 27 07:42:38 crc kubenswrapper[4971]: I1127 07:42:38.985059 4971 generic.go:334] "Generic (PLEG): container finished" podID="4e0870f9-abc4-4e9c-9ee6-28479c5b63d3" containerID="fe280fa672b2f9a1ce42f8f10e2fbac03c98077af461b26f01cc658c75a6d579" exitCode=0 Nov 27 07:42:38 crc kubenswrapper[4971]: I1127 07:42:38.985085 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h4nfz" event={"ID":"4e0870f9-abc4-4e9c-9ee6-28479c5b63d3","Type":"ContainerDied","Data":"fe280fa672b2f9a1ce42f8f10e2fbac03c98077af461b26f01cc658c75a6d579"} Nov 27 07:42:38 crc kubenswrapper[4971]: I1127 07:42:38.985102 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h4nfz" event={"ID":"4e0870f9-abc4-4e9c-9ee6-28479c5b63d3","Type":"ContainerStarted","Data":"3c688c642aeb129b9ba981344f7701901ab793918ccfcced11c811a6f133b45d"} Nov 27 07:42:39 crc kubenswrapper[4971]: I1127 07:42:39.998961 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ql4wr" event={"ID":"d3118496-96ac-4f8c-af9f-7ab25e3ec5e9","Type":"ContainerStarted","Data":"56265e0df91fbe47f3fd78f82bb6ee9293be7676b209b46b0d0a89919b0b8f96"} Nov 27 07:42:40 crc kubenswrapper[4971]: I1127 07:42:40.024686 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-ql4wr" podStartSLOduration=2.508212443 podStartE2EDuration="5.024668162s" podCreationTimestamp="2025-11-27 07:42:35 +0000 UTC" firstStartedPulling="2025-11-27 07:42:36.958241409 +0000 UTC m=+2995.150285327" lastFinishedPulling="2025-11-27 07:42:39.474697108 +0000 UTC m=+2997.666741046" observedRunningTime="2025-11-27 07:42:40.022297523 +0000 UTC m=+2998.214341441" watchObservedRunningTime="2025-11-27 07:42:40.024668162 +0000 UTC m=+2998.216712080" Nov 27 07:42:41 crc kubenswrapper[4971]: I1127 07:42:41.010382 4971 generic.go:334] "Generic (PLEG): container finished" podID="4e0870f9-abc4-4e9c-9ee6-28479c5b63d3" containerID="5a00dd90f98dd5855ac63269a09fc1a1bcead6aebc71a095ae0304a787680879" exitCode=0 Nov 27 07:42:41 crc kubenswrapper[4971]: I1127 07:42:41.010491 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h4nfz" event={"ID":"4e0870f9-abc4-4e9c-9ee6-28479c5b63d3","Type":"ContainerDied","Data":"5a00dd90f98dd5855ac63269a09fc1a1bcead6aebc71a095ae0304a787680879"} Nov 27 07:42:42 crc kubenswrapper[4971]: I1127 07:42:42.028126 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h4nfz" event={"ID":"4e0870f9-abc4-4e9c-9ee6-28479c5b63d3","Type":"ContainerStarted","Data":"f29941f8a2fd27df9a47895727077c01f01ae268acf92d365107476fd88a8897"} Nov 27 07:42:42 crc kubenswrapper[4971]: I1127 07:42:42.057523 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-h4nfz" podStartSLOduration=1.4885850299999999 podStartE2EDuration="4.05749132s" podCreationTimestamp="2025-11-27 07:42:38 +0000 UTC" firstStartedPulling="2025-11-27 07:42:38.988086992 +0000 UTC m=+2997.180130910" lastFinishedPulling="2025-11-27 07:42:41.556993282 +0000 UTC m=+2999.749037200" observedRunningTime="2025-11-27 07:42:42.053816454 +0000 UTC m=+3000.245860412" watchObservedRunningTime="2025-11-27 07:42:42.05749132 +0000 UTC m=+3000.249535238" Nov 27 07:42:43 crc kubenswrapper[4971]: I1127 07:42:43.550450 4971 scope.go:117] "RemoveContainer" containerID="57f5aba5ce0ca112914b3a5119727b4d09c3fed417999b800c0ae9c7a63553ba" Nov 27 07:42:43 crc kubenswrapper[4971]: E1127 07:42:43.551264 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:42:45 crc kubenswrapper[4971]: I1127 07:42:45.655782 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-ql4wr" Nov 27 07:42:45 crc kubenswrapper[4971]: I1127 07:42:45.655848 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-ql4wr" Nov 27 07:42:45 crc kubenswrapper[4971]: I1127 07:42:45.745656 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-ql4wr" Nov 27 07:42:46 crc kubenswrapper[4971]: I1127 07:42:46.120311 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-ql4wr" Nov 27 07:42:46 crc kubenswrapper[4971]: I1127 07:42:46.177311 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ql4wr"] Nov 27 07:42:48 crc kubenswrapper[4971]: I1127 07:42:48.088172 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-ql4wr" podUID="d3118496-96ac-4f8c-af9f-7ab25e3ec5e9" containerName="registry-server" containerID="cri-o://56265e0df91fbe47f3fd78f82bb6ee9293be7676b209b46b0d0a89919b0b8f96" gracePeriod=2 Nov 27 07:42:48 crc kubenswrapper[4971]: I1127 07:42:48.502815 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-h4nfz" Nov 27 07:42:48 crc kubenswrapper[4971]: I1127 07:42:48.503827 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-h4nfz" Nov 27 07:42:48 crc kubenswrapper[4971]: I1127 07:42:48.559116 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-h4nfz" Nov 27 07:42:49 crc kubenswrapper[4971]: I1127 07:42:49.053588 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ql4wr" Nov 27 07:42:49 crc kubenswrapper[4971]: I1127 07:42:49.099844 4971 generic.go:334] "Generic (PLEG): container finished" podID="d3118496-96ac-4f8c-af9f-7ab25e3ec5e9" containerID="56265e0df91fbe47f3fd78f82bb6ee9293be7676b209b46b0d0a89919b0b8f96" exitCode=0 Nov 27 07:42:49 crc kubenswrapper[4971]: I1127 07:42:49.100826 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ql4wr" Nov 27 07:42:49 crc kubenswrapper[4971]: I1127 07:42:49.101232 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ql4wr" event={"ID":"d3118496-96ac-4f8c-af9f-7ab25e3ec5e9","Type":"ContainerDied","Data":"56265e0df91fbe47f3fd78f82bb6ee9293be7676b209b46b0d0a89919b0b8f96"} Nov 27 07:42:49 crc kubenswrapper[4971]: I1127 07:42:49.101259 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ql4wr" event={"ID":"d3118496-96ac-4f8c-af9f-7ab25e3ec5e9","Type":"ContainerDied","Data":"57b111d1f4ff08adde4f41bffefcd04d7f660a8219a30eb5b61f1b55c8542be0"} Nov 27 07:42:49 crc kubenswrapper[4971]: I1127 07:42:49.101278 4971 scope.go:117] "RemoveContainer" containerID="56265e0df91fbe47f3fd78f82bb6ee9293be7676b209b46b0d0a89919b0b8f96" Nov 27 07:42:49 crc kubenswrapper[4971]: I1127 07:42:49.153502 4971 scope.go:117] "RemoveContainer" containerID="5d80de523077ab0ad82423ad60c59ade9fc0de09a39ec292e9fd3655ec43a89c" Nov 27 07:42:49 crc kubenswrapper[4971]: I1127 07:42:49.173011 4971 scope.go:117] "RemoveContainer" containerID="d81b57440f086bd8ef33ef7bfe70c661824f85e6c000429d16e280e517b6d7e2" Nov 27 07:42:49 crc kubenswrapper[4971]: I1127 07:42:49.173200 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-h4nfz" Nov 27 07:42:49 crc kubenswrapper[4971]: I1127 07:42:49.202302 4971 scope.go:117] "RemoveContainer" containerID="56265e0df91fbe47f3fd78f82bb6ee9293be7676b209b46b0d0a89919b0b8f96" Nov 27 07:42:49 crc kubenswrapper[4971]: E1127 07:42:49.210755 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"56265e0df91fbe47f3fd78f82bb6ee9293be7676b209b46b0d0a89919b0b8f96\": container with ID starting with 56265e0df91fbe47f3fd78f82bb6ee9293be7676b209b46b0d0a89919b0b8f96 not found: ID does not exist" containerID="56265e0df91fbe47f3fd78f82bb6ee9293be7676b209b46b0d0a89919b0b8f96" Nov 27 07:42:49 crc kubenswrapper[4971]: I1127 07:42:49.210802 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56265e0df91fbe47f3fd78f82bb6ee9293be7676b209b46b0d0a89919b0b8f96"} err="failed to get container status \"56265e0df91fbe47f3fd78f82bb6ee9293be7676b209b46b0d0a89919b0b8f96\": rpc error: code = NotFound desc = could not find container \"56265e0df91fbe47f3fd78f82bb6ee9293be7676b209b46b0d0a89919b0b8f96\": container with ID starting with 56265e0df91fbe47f3fd78f82bb6ee9293be7676b209b46b0d0a89919b0b8f96 not found: ID does not exist" Nov 27 07:42:49 crc kubenswrapper[4971]: I1127 07:42:49.210833 4971 scope.go:117] "RemoveContainer" containerID="5d80de523077ab0ad82423ad60c59ade9fc0de09a39ec292e9fd3655ec43a89c" Nov 27 07:42:49 crc kubenswrapper[4971]: I1127 07:42:49.211300 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pd6k8\" (UniqueName: \"kubernetes.io/projected/d3118496-96ac-4f8c-af9f-7ab25e3ec5e9-kube-api-access-pd6k8\") pod \"d3118496-96ac-4f8c-af9f-7ab25e3ec5e9\" (UID: \"d3118496-96ac-4f8c-af9f-7ab25e3ec5e9\") " Nov 27 07:42:49 crc kubenswrapper[4971]: I1127 07:42:49.211413 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3118496-96ac-4f8c-af9f-7ab25e3ec5e9-utilities\") pod \"d3118496-96ac-4f8c-af9f-7ab25e3ec5e9\" (UID: \"d3118496-96ac-4f8c-af9f-7ab25e3ec5e9\") " Nov 27 07:42:49 crc kubenswrapper[4971]: I1127 07:42:49.211448 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3118496-96ac-4f8c-af9f-7ab25e3ec5e9-catalog-content\") pod \"d3118496-96ac-4f8c-af9f-7ab25e3ec5e9\" (UID: \"d3118496-96ac-4f8c-af9f-7ab25e3ec5e9\") " Nov 27 07:42:49 crc kubenswrapper[4971]: E1127 07:42:49.211828 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d80de523077ab0ad82423ad60c59ade9fc0de09a39ec292e9fd3655ec43a89c\": container with ID starting with 5d80de523077ab0ad82423ad60c59ade9fc0de09a39ec292e9fd3655ec43a89c not found: ID does not exist" containerID="5d80de523077ab0ad82423ad60c59ade9fc0de09a39ec292e9fd3655ec43a89c" Nov 27 07:42:49 crc kubenswrapper[4971]: I1127 07:42:49.211845 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d80de523077ab0ad82423ad60c59ade9fc0de09a39ec292e9fd3655ec43a89c"} err="failed to get container status \"5d80de523077ab0ad82423ad60c59ade9fc0de09a39ec292e9fd3655ec43a89c\": rpc error: code = NotFound desc = could not find container \"5d80de523077ab0ad82423ad60c59ade9fc0de09a39ec292e9fd3655ec43a89c\": container with ID starting with 5d80de523077ab0ad82423ad60c59ade9fc0de09a39ec292e9fd3655ec43a89c not found: ID does not exist" Nov 27 07:42:49 crc kubenswrapper[4971]: I1127 07:42:49.211869 4971 scope.go:117] "RemoveContainer" containerID="d81b57440f086bd8ef33ef7bfe70c661824f85e6c000429d16e280e517b6d7e2" Nov 27 07:42:49 crc kubenswrapper[4971]: I1127 07:42:49.212586 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d3118496-96ac-4f8c-af9f-7ab25e3ec5e9-utilities" (OuterVolumeSpecName: "utilities") pod "d3118496-96ac-4f8c-af9f-7ab25e3ec5e9" (UID: "d3118496-96ac-4f8c-af9f-7ab25e3ec5e9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:42:49 crc kubenswrapper[4971]: E1127 07:42:49.213112 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d81b57440f086bd8ef33ef7bfe70c661824f85e6c000429d16e280e517b6d7e2\": container with ID starting with d81b57440f086bd8ef33ef7bfe70c661824f85e6c000429d16e280e517b6d7e2 not found: ID does not exist" containerID="d81b57440f086bd8ef33ef7bfe70c661824f85e6c000429d16e280e517b6d7e2" Nov 27 07:42:49 crc kubenswrapper[4971]: I1127 07:42:49.213130 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d81b57440f086bd8ef33ef7bfe70c661824f85e6c000429d16e280e517b6d7e2"} err="failed to get container status \"d81b57440f086bd8ef33ef7bfe70c661824f85e6c000429d16e280e517b6d7e2\": rpc error: code = NotFound desc = could not find container \"d81b57440f086bd8ef33ef7bfe70c661824f85e6c000429d16e280e517b6d7e2\": container with ID starting with d81b57440f086bd8ef33ef7bfe70c661824f85e6c000429d16e280e517b6d7e2 not found: ID does not exist" Nov 27 07:42:49 crc kubenswrapper[4971]: I1127 07:42:49.218876 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3118496-96ac-4f8c-af9f-7ab25e3ec5e9-kube-api-access-pd6k8" (OuterVolumeSpecName: "kube-api-access-pd6k8") pod "d3118496-96ac-4f8c-af9f-7ab25e3ec5e9" (UID: "d3118496-96ac-4f8c-af9f-7ab25e3ec5e9"). InnerVolumeSpecName "kube-api-access-pd6k8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:42:49 crc kubenswrapper[4971]: I1127 07:42:49.271852 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d3118496-96ac-4f8c-af9f-7ab25e3ec5e9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d3118496-96ac-4f8c-af9f-7ab25e3ec5e9" (UID: "d3118496-96ac-4f8c-af9f-7ab25e3ec5e9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:42:49 crc kubenswrapper[4971]: I1127 07:42:49.316397 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pd6k8\" (UniqueName: \"kubernetes.io/projected/d3118496-96ac-4f8c-af9f-7ab25e3ec5e9-kube-api-access-pd6k8\") on node \"crc\" DevicePath \"\"" Nov 27 07:42:49 crc kubenswrapper[4971]: I1127 07:42:49.316443 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3118496-96ac-4f8c-af9f-7ab25e3ec5e9-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 07:42:49 crc kubenswrapper[4971]: I1127 07:42:49.316455 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3118496-96ac-4f8c-af9f-7ab25e3ec5e9-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 07:42:49 crc kubenswrapper[4971]: I1127 07:42:49.429109 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ql4wr"] Nov 27 07:42:49 crc kubenswrapper[4971]: I1127 07:42:49.434369 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-ql4wr"] Nov 27 07:42:50 crc kubenswrapper[4971]: I1127 07:42:50.561266 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3118496-96ac-4f8c-af9f-7ab25e3ec5e9" path="/var/lib/kubelet/pods/d3118496-96ac-4f8c-af9f-7ab25e3ec5e9/volumes" Nov 27 07:42:51 crc kubenswrapper[4971]: I1127 07:42:51.192390 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-h4nfz"] Nov 27 07:42:52 crc kubenswrapper[4971]: I1127 07:42:52.120180 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-h4nfz" podUID="4e0870f9-abc4-4e9c-9ee6-28479c5b63d3" containerName="registry-server" containerID="cri-o://f29941f8a2fd27df9a47895727077c01f01ae268acf92d365107476fd88a8897" gracePeriod=2 Nov 27 07:42:52 crc kubenswrapper[4971]: I1127 07:42:52.512252 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h4nfz" Nov 27 07:42:52 crc kubenswrapper[4971]: I1127 07:42:52.663099 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e0870f9-abc4-4e9c-9ee6-28479c5b63d3-catalog-content\") pod \"4e0870f9-abc4-4e9c-9ee6-28479c5b63d3\" (UID: \"4e0870f9-abc4-4e9c-9ee6-28479c5b63d3\") " Nov 27 07:42:52 crc kubenswrapper[4971]: I1127 07:42:52.663227 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gln72\" (UniqueName: \"kubernetes.io/projected/4e0870f9-abc4-4e9c-9ee6-28479c5b63d3-kube-api-access-gln72\") pod \"4e0870f9-abc4-4e9c-9ee6-28479c5b63d3\" (UID: \"4e0870f9-abc4-4e9c-9ee6-28479c5b63d3\") " Nov 27 07:42:52 crc kubenswrapper[4971]: I1127 07:42:52.663286 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e0870f9-abc4-4e9c-9ee6-28479c5b63d3-utilities\") pod \"4e0870f9-abc4-4e9c-9ee6-28479c5b63d3\" (UID: \"4e0870f9-abc4-4e9c-9ee6-28479c5b63d3\") " Nov 27 07:42:52 crc kubenswrapper[4971]: I1127 07:42:52.664289 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e0870f9-abc4-4e9c-9ee6-28479c5b63d3-utilities" (OuterVolumeSpecName: "utilities") pod "4e0870f9-abc4-4e9c-9ee6-28479c5b63d3" (UID: "4e0870f9-abc4-4e9c-9ee6-28479c5b63d3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:42:52 crc kubenswrapper[4971]: I1127 07:42:52.668517 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e0870f9-abc4-4e9c-9ee6-28479c5b63d3-kube-api-access-gln72" (OuterVolumeSpecName: "kube-api-access-gln72") pod "4e0870f9-abc4-4e9c-9ee6-28479c5b63d3" (UID: "4e0870f9-abc4-4e9c-9ee6-28479c5b63d3"). InnerVolumeSpecName "kube-api-access-gln72". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:42:52 crc kubenswrapper[4971]: I1127 07:42:52.682605 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e0870f9-abc4-4e9c-9ee6-28479c5b63d3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4e0870f9-abc4-4e9c-9ee6-28479c5b63d3" (UID: "4e0870f9-abc4-4e9c-9ee6-28479c5b63d3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:42:52 crc kubenswrapper[4971]: I1127 07:42:52.765248 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e0870f9-abc4-4e9c-9ee6-28479c5b63d3-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 07:42:52 crc kubenswrapper[4971]: I1127 07:42:52.765292 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e0870f9-abc4-4e9c-9ee6-28479c5b63d3-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 07:42:52 crc kubenswrapper[4971]: I1127 07:42:52.765314 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gln72\" (UniqueName: \"kubernetes.io/projected/4e0870f9-abc4-4e9c-9ee6-28479c5b63d3-kube-api-access-gln72\") on node \"crc\" DevicePath \"\"" Nov 27 07:42:53 crc kubenswrapper[4971]: I1127 07:42:53.131373 4971 generic.go:334] "Generic (PLEG): container finished" podID="4e0870f9-abc4-4e9c-9ee6-28479c5b63d3" containerID="f29941f8a2fd27df9a47895727077c01f01ae268acf92d365107476fd88a8897" exitCode=0 Nov 27 07:42:53 crc kubenswrapper[4971]: I1127 07:42:53.131432 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h4nfz" event={"ID":"4e0870f9-abc4-4e9c-9ee6-28479c5b63d3","Type":"ContainerDied","Data":"f29941f8a2fd27df9a47895727077c01f01ae268acf92d365107476fd88a8897"} Nov 27 07:42:53 crc kubenswrapper[4971]: I1127 07:42:53.131477 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h4nfz" event={"ID":"4e0870f9-abc4-4e9c-9ee6-28479c5b63d3","Type":"ContainerDied","Data":"3c688c642aeb129b9ba981344f7701901ab793918ccfcced11c811a6f133b45d"} Nov 27 07:42:53 crc kubenswrapper[4971]: I1127 07:42:53.131522 4971 scope.go:117] "RemoveContainer" containerID="f29941f8a2fd27df9a47895727077c01f01ae268acf92d365107476fd88a8897" Nov 27 07:42:53 crc kubenswrapper[4971]: I1127 07:42:53.132278 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h4nfz" Nov 27 07:42:53 crc kubenswrapper[4971]: I1127 07:42:53.155178 4971 scope.go:117] "RemoveContainer" containerID="5a00dd90f98dd5855ac63269a09fc1a1bcead6aebc71a095ae0304a787680879" Nov 27 07:42:53 crc kubenswrapper[4971]: I1127 07:42:53.179454 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-h4nfz"] Nov 27 07:42:53 crc kubenswrapper[4971]: I1127 07:42:53.182995 4971 scope.go:117] "RemoveContainer" containerID="fe280fa672b2f9a1ce42f8f10e2fbac03c98077af461b26f01cc658c75a6d579" Nov 27 07:42:53 crc kubenswrapper[4971]: I1127 07:42:53.187582 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-h4nfz"] Nov 27 07:42:53 crc kubenswrapper[4971]: I1127 07:42:53.260911 4971 scope.go:117] "RemoveContainer" containerID="f29941f8a2fd27df9a47895727077c01f01ae268acf92d365107476fd88a8897" Nov 27 07:42:53 crc kubenswrapper[4971]: E1127 07:42:53.262297 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f29941f8a2fd27df9a47895727077c01f01ae268acf92d365107476fd88a8897\": container with ID starting with f29941f8a2fd27df9a47895727077c01f01ae268acf92d365107476fd88a8897 not found: ID does not exist" containerID="f29941f8a2fd27df9a47895727077c01f01ae268acf92d365107476fd88a8897" Nov 27 07:42:53 crc kubenswrapper[4971]: I1127 07:42:53.262349 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f29941f8a2fd27df9a47895727077c01f01ae268acf92d365107476fd88a8897"} err="failed to get container status \"f29941f8a2fd27df9a47895727077c01f01ae268acf92d365107476fd88a8897\": rpc error: code = NotFound desc = could not find container \"f29941f8a2fd27df9a47895727077c01f01ae268acf92d365107476fd88a8897\": container with ID starting with f29941f8a2fd27df9a47895727077c01f01ae268acf92d365107476fd88a8897 not found: ID does not exist" Nov 27 07:42:53 crc kubenswrapper[4971]: I1127 07:42:53.262386 4971 scope.go:117] "RemoveContainer" containerID="5a00dd90f98dd5855ac63269a09fc1a1bcead6aebc71a095ae0304a787680879" Nov 27 07:42:53 crc kubenswrapper[4971]: E1127 07:42:53.263551 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a00dd90f98dd5855ac63269a09fc1a1bcead6aebc71a095ae0304a787680879\": container with ID starting with 5a00dd90f98dd5855ac63269a09fc1a1bcead6aebc71a095ae0304a787680879 not found: ID does not exist" containerID="5a00dd90f98dd5855ac63269a09fc1a1bcead6aebc71a095ae0304a787680879" Nov 27 07:42:53 crc kubenswrapper[4971]: I1127 07:42:53.263608 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a00dd90f98dd5855ac63269a09fc1a1bcead6aebc71a095ae0304a787680879"} err="failed to get container status \"5a00dd90f98dd5855ac63269a09fc1a1bcead6aebc71a095ae0304a787680879\": rpc error: code = NotFound desc = could not find container \"5a00dd90f98dd5855ac63269a09fc1a1bcead6aebc71a095ae0304a787680879\": container with ID starting with 5a00dd90f98dd5855ac63269a09fc1a1bcead6aebc71a095ae0304a787680879 not found: ID does not exist" Nov 27 07:42:53 crc kubenswrapper[4971]: I1127 07:42:53.263648 4971 scope.go:117] "RemoveContainer" containerID="fe280fa672b2f9a1ce42f8f10e2fbac03c98077af461b26f01cc658c75a6d579" Nov 27 07:42:53 crc kubenswrapper[4971]: E1127 07:42:53.269406 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe280fa672b2f9a1ce42f8f10e2fbac03c98077af461b26f01cc658c75a6d579\": container with ID starting with fe280fa672b2f9a1ce42f8f10e2fbac03c98077af461b26f01cc658c75a6d579 not found: ID does not exist" containerID="fe280fa672b2f9a1ce42f8f10e2fbac03c98077af461b26f01cc658c75a6d579" Nov 27 07:42:53 crc kubenswrapper[4971]: I1127 07:42:53.269634 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe280fa672b2f9a1ce42f8f10e2fbac03c98077af461b26f01cc658c75a6d579"} err="failed to get container status \"fe280fa672b2f9a1ce42f8f10e2fbac03c98077af461b26f01cc658c75a6d579\": rpc error: code = NotFound desc = could not find container \"fe280fa672b2f9a1ce42f8f10e2fbac03c98077af461b26f01cc658c75a6d579\": container with ID starting with fe280fa672b2f9a1ce42f8f10e2fbac03c98077af461b26f01cc658c75a6d579 not found: ID does not exist" Nov 27 07:42:54 crc kubenswrapper[4971]: I1127 07:42:54.550300 4971 scope.go:117] "RemoveContainer" containerID="57f5aba5ce0ca112914b3a5119727b4d09c3fed417999b800c0ae9c7a63553ba" Nov 27 07:42:54 crc kubenswrapper[4971]: E1127 07:42:54.550871 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:42:54 crc kubenswrapper[4971]: I1127 07:42:54.560420 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e0870f9-abc4-4e9c-9ee6-28479c5b63d3" path="/var/lib/kubelet/pods/4e0870f9-abc4-4e9c-9ee6-28479c5b63d3/volumes" Nov 27 07:43:09 crc kubenswrapper[4971]: I1127 07:43:09.550583 4971 scope.go:117] "RemoveContainer" containerID="57f5aba5ce0ca112914b3a5119727b4d09c3fed417999b800c0ae9c7a63553ba" Nov 27 07:43:09 crc kubenswrapper[4971]: E1127 07:43:09.551246 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:43:24 crc kubenswrapper[4971]: I1127 07:43:24.550791 4971 scope.go:117] "RemoveContainer" containerID="57f5aba5ce0ca112914b3a5119727b4d09c3fed417999b800c0ae9c7a63553ba" Nov 27 07:43:24 crc kubenswrapper[4971]: E1127 07:43:24.552111 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:43:36 crc kubenswrapper[4971]: I1127 07:43:36.551274 4971 scope.go:117] "RemoveContainer" containerID="57f5aba5ce0ca112914b3a5119727b4d09c3fed417999b800c0ae9c7a63553ba" Nov 27 07:43:36 crc kubenswrapper[4971]: E1127 07:43:36.552021 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:43:51 crc kubenswrapper[4971]: I1127 07:43:51.550250 4971 scope.go:117] "RemoveContainer" containerID="57f5aba5ce0ca112914b3a5119727b4d09c3fed417999b800c0ae9c7a63553ba" Nov 27 07:43:51 crc kubenswrapper[4971]: E1127 07:43:51.550868 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:44:06 crc kubenswrapper[4971]: I1127 07:44:06.550901 4971 scope.go:117] "RemoveContainer" containerID="57f5aba5ce0ca112914b3a5119727b4d09c3fed417999b800c0ae9c7a63553ba" Nov 27 07:44:06 crc kubenswrapper[4971]: E1127 07:44:06.551631 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:44:20 crc kubenswrapper[4971]: I1127 07:44:20.550634 4971 scope.go:117] "RemoveContainer" containerID="57f5aba5ce0ca112914b3a5119727b4d09c3fed417999b800c0ae9c7a63553ba" Nov 27 07:44:20 crc kubenswrapper[4971]: E1127 07:44:20.553289 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:44:33 crc kubenswrapper[4971]: I1127 07:44:33.551621 4971 scope.go:117] "RemoveContainer" containerID="57f5aba5ce0ca112914b3a5119727b4d09c3fed417999b800c0ae9c7a63553ba" Nov 27 07:44:33 crc kubenswrapper[4971]: E1127 07:44:33.567499 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:44:44 crc kubenswrapper[4971]: I1127 07:44:44.551013 4971 scope.go:117] "RemoveContainer" containerID="57f5aba5ce0ca112914b3a5119727b4d09c3fed417999b800c0ae9c7a63553ba" Nov 27 07:44:44 crc kubenswrapper[4971]: E1127 07:44:44.551721 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:44:56 crc kubenswrapper[4971]: I1127 07:44:56.551077 4971 scope.go:117] "RemoveContainer" containerID="57f5aba5ce0ca112914b3a5119727b4d09c3fed417999b800c0ae9c7a63553ba" Nov 27 07:44:56 crc kubenswrapper[4971]: E1127 07:44:56.552597 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:45:00 crc kubenswrapper[4971]: I1127 07:45:00.156905 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403825-8f48b"] Nov 27 07:45:00 crc kubenswrapper[4971]: E1127 07:45:00.157591 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3118496-96ac-4f8c-af9f-7ab25e3ec5e9" containerName="extract-utilities" Nov 27 07:45:00 crc kubenswrapper[4971]: I1127 07:45:00.157605 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3118496-96ac-4f8c-af9f-7ab25e3ec5e9" containerName="extract-utilities" Nov 27 07:45:00 crc kubenswrapper[4971]: E1127 07:45:00.157619 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e0870f9-abc4-4e9c-9ee6-28479c5b63d3" containerName="extract-content" Nov 27 07:45:00 crc kubenswrapper[4971]: I1127 07:45:00.157625 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e0870f9-abc4-4e9c-9ee6-28479c5b63d3" containerName="extract-content" Nov 27 07:45:00 crc kubenswrapper[4971]: E1127 07:45:00.157648 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e0870f9-abc4-4e9c-9ee6-28479c5b63d3" containerName="registry-server" Nov 27 07:45:00 crc kubenswrapper[4971]: I1127 07:45:00.157655 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e0870f9-abc4-4e9c-9ee6-28479c5b63d3" containerName="registry-server" Nov 27 07:45:00 crc kubenswrapper[4971]: E1127 07:45:00.157666 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3118496-96ac-4f8c-af9f-7ab25e3ec5e9" containerName="registry-server" Nov 27 07:45:00 crc kubenswrapper[4971]: I1127 07:45:00.157671 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3118496-96ac-4f8c-af9f-7ab25e3ec5e9" containerName="registry-server" Nov 27 07:45:00 crc kubenswrapper[4971]: E1127 07:45:00.157678 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e0870f9-abc4-4e9c-9ee6-28479c5b63d3" containerName="extract-utilities" Nov 27 07:45:00 crc kubenswrapper[4971]: I1127 07:45:00.157684 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e0870f9-abc4-4e9c-9ee6-28479c5b63d3" containerName="extract-utilities" Nov 27 07:45:00 crc kubenswrapper[4971]: E1127 07:45:00.157695 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3118496-96ac-4f8c-af9f-7ab25e3ec5e9" containerName="extract-content" Nov 27 07:45:00 crc kubenswrapper[4971]: I1127 07:45:00.157701 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3118496-96ac-4f8c-af9f-7ab25e3ec5e9" containerName="extract-content" Nov 27 07:45:00 crc kubenswrapper[4971]: I1127 07:45:00.157855 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3118496-96ac-4f8c-af9f-7ab25e3ec5e9" containerName="registry-server" Nov 27 07:45:00 crc kubenswrapper[4971]: I1127 07:45:00.157873 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e0870f9-abc4-4e9c-9ee6-28479c5b63d3" containerName="registry-server" Nov 27 07:45:00 crc kubenswrapper[4971]: I1127 07:45:00.158436 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403825-8f48b" Nov 27 07:45:00 crc kubenswrapper[4971]: I1127 07:45:00.161187 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 27 07:45:00 crc kubenswrapper[4971]: I1127 07:45:00.161957 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 27 07:45:00 crc kubenswrapper[4971]: I1127 07:45:00.164501 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403825-8f48b"] Nov 27 07:45:00 crc kubenswrapper[4971]: I1127 07:45:00.270850 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8562a4b0-3958-4e0d-bb1b-f98624c8e0a0-secret-volume\") pod \"collect-profiles-29403825-8f48b\" (UID: \"8562a4b0-3958-4e0d-bb1b-f98624c8e0a0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403825-8f48b" Nov 27 07:45:00 crc kubenswrapper[4971]: I1127 07:45:00.270960 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jwv9\" (UniqueName: \"kubernetes.io/projected/8562a4b0-3958-4e0d-bb1b-f98624c8e0a0-kube-api-access-2jwv9\") pod \"collect-profiles-29403825-8f48b\" (UID: \"8562a4b0-3958-4e0d-bb1b-f98624c8e0a0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403825-8f48b" Nov 27 07:45:00 crc kubenswrapper[4971]: I1127 07:45:00.270991 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8562a4b0-3958-4e0d-bb1b-f98624c8e0a0-config-volume\") pod \"collect-profiles-29403825-8f48b\" (UID: \"8562a4b0-3958-4e0d-bb1b-f98624c8e0a0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403825-8f48b" Nov 27 07:45:00 crc kubenswrapper[4971]: I1127 07:45:00.372325 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8562a4b0-3958-4e0d-bb1b-f98624c8e0a0-secret-volume\") pod \"collect-profiles-29403825-8f48b\" (UID: \"8562a4b0-3958-4e0d-bb1b-f98624c8e0a0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403825-8f48b" Nov 27 07:45:00 crc kubenswrapper[4971]: I1127 07:45:00.372403 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jwv9\" (UniqueName: \"kubernetes.io/projected/8562a4b0-3958-4e0d-bb1b-f98624c8e0a0-kube-api-access-2jwv9\") pod \"collect-profiles-29403825-8f48b\" (UID: \"8562a4b0-3958-4e0d-bb1b-f98624c8e0a0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403825-8f48b" Nov 27 07:45:00 crc kubenswrapper[4971]: I1127 07:45:00.372445 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8562a4b0-3958-4e0d-bb1b-f98624c8e0a0-config-volume\") pod \"collect-profiles-29403825-8f48b\" (UID: \"8562a4b0-3958-4e0d-bb1b-f98624c8e0a0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403825-8f48b" Nov 27 07:45:00 crc kubenswrapper[4971]: I1127 07:45:00.374003 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8562a4b0-3958-4e0d-bb1b-f98624c8e0a0-config-volume\") pod \"collect-profiles-29403825-8f48b\" (UID: \"8562a4b0-3958-4e0d-bb1b-f98624c8e0a0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403825-8f48b" Nov 27 07:45:00 crc kubenswrapper[4971]: I1127 07:45:00.380443 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8562a4b0-3958-4e0d-bb1b-f98624c8e0a0-secret-volume\") pod \"collect-profiles-29403825-8f48b\" (UID: \"8562a4b0-3958-4e0d-bb1b-f98624c8e0a0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403825-8f48b" Nov 27 07:45:00 crc kubenswrapper[4971]: I1127 07:45:00.406478 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jwv9\" (UniqueName: \"kubernetes.io/projected/8562a4b0-3958-4e0d-bb1b-f98624c8e0a0-kube-api-access-2jwv9\") pod \"collect-profiles-29403825-8f48b\" (UID: \"8562a4b0-3958-4e0d-bb1b-f98624c8e0a0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403825-8f48b" Nov 27 07:45:00 crc kubenswrapper[4971]: I1127 07:45:00.478467 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403825-8f48b" Nov 27 07:45:00 crc kubenswrapper[4971]: I1127 07:45:00.906408 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403825-8f48b"] Nov 27 07:45:01 crc kubenswrapper[4971]: I1127 07:45:01.015460 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403825-8f48b" event={"ID":"8562a4b0-3958-4e0d-bb1b-f98624c8e0a0","Type":"ContainerStarted","Data":"3f064f17e2dc92ab0c032d6ef54002e1d23a3c4b932ded214ddb55c5b6bdd65e"} Nov 27 07:45:02 crc kubenswrapper[4971]: I1127 07:45:02.025153 4971 generic.go:334] "Generic (PLEG): container finished" podID="8562a4b0-3958-4e0d-bb1b-f98624c8e0a0" containerID="c28e1eb4431d10612d0132b96a91d3622d0af46c80f61fcc774274ce1fc4a912" exitCode=0 Nov 27 07:45:02 crc kubenswrapper[4971]: I1127 07:45:02.025199 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403825-8f48b" event={"ID":"8562a4b0-3958-4e0d-bb1b-f98624c8e0a0","Type":"ContainerDied","Data":"c28e1eb4431d10612d0132b96a91d3622d0af46c80f61fcc774274ce1fc4a912"} Nov 27 07:45:03 crc kubenswrapper[4971]: I1127 07:45:03.309272 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403825-8f48b" Nov 27 07:45:03 crc kubenswrapper[4971]: I1127 07:45:03.430358 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8562a4b0-3958-4e0d-bb1b-f98624c8e0a0-config-volume\") pod \"8562a4b0-3958-4e0d-bb1b-f98624c8e0a0\" (UID: \"8562a4b0-3958-4e0d-bb1b-f98624c8e0a0\") " Nov 27 07:45:03 crc kubenswrapper[4971]: I1127 07:45:03.430629 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8562a4b0-3958-4e0d-bb1b-f98624c8e0a0-secret-volume\") pod \"8562a4b0-3958-4e0d-bb1b-f98624c8e0a0\" (UID: \"8562a4b0-3958-4e0d-bb1b-f98624c8e0a0\") " Nov 27 07:45:03 crc kubenswrapper[4971]: I1127 07:45:03.430704 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2jwv9\" (UniqueName: \"kubernetes.io/projected/8562a4b0-3958-4e0d-bb1b-f98624c8e0a0-kube-api-access-2jwv9\") pod \"8562a4b0-3958-4e0d-bb1b-f98624c8e0a0\" (UID: \"8562a4b0-3958-4e0d-bb1b-f98624c8e0a0\") " Nov 27 07:45:03 crc kubenswrapper[4971]: I1127 07:45:03.431734 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8562a4b0-3958-4e0d-bb1b-f98624c8e0a0-config-volume" (OuterVolumeSpecName: "config-volume") pod "8562a4b0-3958-4e0d-bb1b-f98624c8e0a0" (UID: "8562a4b0-3958-4e0d-bb1b-f98624c8e0a0"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 07:45:03 crc kubenswrapper[4971]: I1127 07:45:03.437902 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8562a4b0-3958-4e0d-bb1b-f98624c8e0a0-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "8562a4b0-3958-4e0d-bb1b-f98624c8e0a0" (UID: "8562a4b0-3958-4e0d-bb1b-f98624c8e0a0"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 07:45:03 crc kubenswrapper[4971]: I1127 07:45:03.438799 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8562a4b0-3958-4e0d-bb1b-f98624c8e0a0-kube-api-access-2jwv9" (OuterVolumeSpecName: "kube-api-access-2jwv9") pod "8562a4b0-3958-4e0d-bb1b-f98624c8e0a0" (UID: "8562a4b0-3958-4e0d-bb1b-f98624c8e0a0"). InnerVolumeSpecName "kube-api-access-2jwv9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:45:03 crc kubenswrapper[4971]: I1127 07:45:03.532016 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2jwv9\" (UniqueName: \"kubernetes.io/projected/8562a4b0-3958-4e0d-bb1b-f98624c8e0a0-kube-api-access-2jwv9\") on node \"crc\" DevicePath \"\"" Nov 27 07:45:03 crc kubenswrapper[4971]: I1127 07:45:03.532059 4971 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8562a4b0-3958-4e0d-bb1b-f98624c8e0a0-config-volume\") on node \"crc\" DevicePath \"\"" Nov 27 07:45:03 crc kubenswrapper[4971]: I1127 07:45:03.532067 4971 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8562a4b0-3958-4e0d-bb1b-f98624c8e0a0-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 27 07:45:04 crc kubenswrapper[4971]: I1127 07:45:04.042755 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403825-8f48b" event={"ID":"8562a4b0-3958-4e0d-bb1b-f98624c8e0a0","Type":"ContainerDied","Data":"3f064f17e2dc92ab0c032d6ef54002e1d23a3c4b932ded214ddb55c5b6bdd65e"} Nov 27 07:45:04 crc kubenswrapper[4971]: I1127 07:45:04.042805 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3f064f17e2dc92ab0c032d6ef54002e1d23a3c4b932ded214ddb55c5b6bdd65e" Nov 27 07:45:04 crc kubenswrapper[4971]: I1127 07:45:04.042822 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403825-8f48b" Nov 27 07:45:04 crc kubenswrapper[4971]: I1127 07:45:04.396922 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403780-v8rfr"] Nov 27 07:45:04 crc kubenswrapper[4971]: I1127 07:45:04.404698 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403780-v8rfr"] Nov 27 07:45:04 crc kubenswrapper[4971]: I1127 07:45:04.572111 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b8e584a-c179-4555-a366-1be20fe8ceff" path="/var/lib/kubelet/pods/1b8e584a-c179-4555-a366-1be20fe8ceff/volumes" Nov 27 07:45:11 crc kubenswrapper[4971]: I1127 07:45:11.550188 4971 scope.go:117] "RemoveContainer" containerID="57f5aba5ce0ca112914b3a5119727b4d09c3fed417999b800c0ae9c7a63553ba" Nov 27 07:45:11 crc kubenswrapper[4971]: E1127 07:45:11.550944 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:45:26 crc kubenswrapper[4971]: I1127 07:45:26.550331 4971 scope.go:117] "RemoveContainer" containerID="57f5aba5ce0ca112914b3a5119727b4d09c3fed417999b800c0ae9c7a63553ba" Nov 27 07:45:26 crc kubenswrapper[4971]: E1127 07:45:26.551071 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:45:39 crc kubenswrapper[4971]: I1127 07:45:39.550840 4971 scope.go:117] "RemoveContainer" containerID="57f5aba5ce0ca112914b3a5119727b4d09c3fed417999b800c0ae9c7a63553ba" Nov 27 07:45:39 crc kubenswrapper[4971]: E1127 07:45:39.551749 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:45:50 crc kubenswrapper[4971]: I1127 07:45:50.898362 4971 scope.go:117] "RemoveContainer" containerID="73d80397be3db23d8723608eb13ba4e74e54d2bf24b60748b7c9caebfff69f29" Nov 27 07:45:52 crc kubenswrapper[4971]: I1127 07:45:52.559668 4971 scope.go:117] "RemoveContainer" containerID="57f5aba5ce0ca112914b3a5119727b4d09c3fed417999b800c0ae9c7a63553ba" Nov 27 07:45:52 crc kubenswrapper[4971]: E1127 07:45:52.561585 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:46:07 crc kubenswrapper[4971]: I1127 07:46:07.551500 4971 scope.go:117] "RemoveContainer" containerID="57f5aba5ce0ca112914b3a5119727b4d09c3fed417999b800c0ae9c7a63553ba" Nov 27 07:46:07 crc kubenswrapper[4971]: E1127 07:46:07.553147 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:46:19 crc kubenswrapper[4971]: I1127 07:46:19.550511 4971 scope.go:117] "RemoveContainer" containerID="57f5aba5ce0ca112914b3a5119727b4d09c3fed417999b800c0ae9c7a63553ba" Nov 27 07:46:19 crc kubenswrapper[4971]: E1127 07:46:19.551214 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:46:34 crc kubenswrapper[4971]: I1127 07:46:34.551214 4971 scope.go:117] "RemoveContainer" containerID="57f5aba5ce0ca112914b3a5119727b4d09c3fed417999b800c0ae9c7a63553ba" Nov 27 07:46:34 crc kubenswrapper[4971]: I1127 07:46:34.861522 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"6997d0f598181cb1d1e5b044e2cbe23beb9ee620d82bd962670348cd0c073626"} Nov 27 07:48:53 crc kubenswrapper[4971]: I1127 07:48:53.656379 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-m452v"] Nov 27 07:48:53 crc kubenswrapper[4971]: E1127 07:48:53.657350 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8562a4b0-3958-4e0d-bb1b-f98624c8e0a0" containerName="collect-profiles" Nov 27 07:48:53 crc kubenswrapper[4971]: I1127 07:48:53.657365 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="8562a4b0-3958-4e0d-bb1b-f98624c8e0a0" containerName="collect-profiles" Nov 27 07:48:53 crc kubenswrapper[4971]: I1127 07:48:53.657555 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="8562a4b0-3958-4e0d-bb1b-f98624c8e0a0" containerName="collect-profiles" Nov 27 07:48:53 crc kubenswrapper[4971]: I1127 07:48:53.659526 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m452v" Nov 27 07:48:53 crc kubenswrapper[4971]: I1127 07:48:53.680995 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m452v"] Nov 27 07:48:53 crc kubenswrapper[4971]: I1127 07:48:53.835626 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e-utilities\") pod \"redhat-operators-m452v\" (UID: \"7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e\") " pod="openshift-marketplace/redhat-operators-m452v" Nov 27 07:48:53 crc kubenswrapper[4971]: I1127 07:48:53.835717 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tx4h2\" (UniqueName: \"kubernetes.io/projected/7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e-kube-api-access-tx4h2\") pod \"redhat-operators-m452v\" (UID: \"7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e\") " pod="openshift-marketplace/redhat-operators-m452v" Nov 27 07:48:53 crc kubenswrapper[4971]: I1127 07:48:53.835765 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e-catalog-content\") pod \"redhat-operators-m452v\" (UID: \"7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e\") " pod="openshift-marketplace/redhat-operators-m452v" Nov 27 07:48:53 crc kubenswrapper[4971]: I1127 07:48:53.938061 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e-utilities\") pod \"redhat-operators-m452v\" (UID: \"7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e\") " pod="openshift-marketplace/redhat-operators-m452v" Nov 27 07:48:53 crc kubenswrapper[4971]: I1127 07:48:53.938166 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tx4h2\" (UniqueName: \"kubernetes.io/projected/7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e-kube-api-access-tx4h2\") pod \"redhat-operators-m452v\" (UID: \"7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e\") " pod="openshift-marketplace/redhat-operators-m452v" Nov 27 07:48:53 crc kubenswrapper[4971]: I1127 07:48:53.938216 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e-catalog-content\") pod \"redhat-operators-m452v\" (UID: \"7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e\") " pod="openshift-marketplace/redhat-operators-m452v" Nov 27 07:48:53 crc kubenswrapper[4971]: I1127 07:48:53.939269 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e-utilities\") pod \"redhat-operators-m452v\" (UID: \"7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e\") " pod="openshift-marketplace/redhat-operators-m452v" Nov 27 07:48:53 crc kubenswrapper[4971]: I1127 07:48:53.939287 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e-catalog-content\") pod \"redhat-operators-m452v\" (UID: \"7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e\") " pod="openshift-marketplace/redhat-operators-m452v" Nov 27 07:48:53 crc kubenswrapper[4971]: I1127 07:48:53.963582 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tx4h2\" (UniqueName: \"kubernetes.io/projected/7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e-kube-api-access-tx4h2\") pod \"redhat-operators-m452v\" (UID: \"7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e\") " pod="openshift-marketplace/redhat-operators-m452v" Nov 27 07:48:53 crc kubenswrapper[4971]: I1127 07:48:53.984934 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m452v" Nov 27 07:48:54 crc kubenswrapper[4971]: I1127 07:48:54.520093 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m452v"] Nov 27 07:48:55 crc kubenswrapper[4971]: I1127 07:48:55.126141 4971 generic.go:334] "Generic (PLEG): container finished" podID="7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e" containerID="4f95cf4b7d98f5379230d19027add7e135e9030ae2f6221ba712f5474413e69c" exitCode=0 Nov 27 07:48:55 crc kubenswrapper[4971]: I1127 07:48:55.126274 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m452v" event={"ID":"7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e","Type":"ContainerDied","Data":"4f95cf4b7d98f5379230d19027add7e135e9030ae2f6221ba712f5474413e69c"} Nov 27 07:48:55 crc kubenswrapper[4971]: I1127 07:48:55.126656 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m452v" event={"ID":"7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e","Type":"ContainerStarted","Data":"f789b92214589caf47fb1dc3e3cc228541728ffb2fa17e7811fb750268d49026"} Nov 27 07:48:55 crc kubenswrapper[4971]: I1127 07:48:55.128102 4971 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 27 07:48:56 crc kubenswrapper[4971]: I1127 07:48:56.140497 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m452v" event={"ID":"7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e","Type":"ContainerStarted","Data":"f82ad9867de80a912aca2641007b04fe97a0a4c932f023bb8ce2b60080e5a66b"} Nov 27 07:48:56 crc kubenswrapper[4971]: I1127 07:48:56.413457 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 07:48:56 crc kubenswrapper[4971]: I1127 07:48:56.413580 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 07:48:57 crc kubenswrapper[4971]: I1127 07:48:57.149073 4971 generic.go:334] "Generic (PLEG): container finished" podID="7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e" containerID="f82ad9867de80a912aca2641007b04fe97a0a4c932f023bb8ce2b60080e5a66b" exitCode=0 Nov 27 07:48:57 crc kubenswrapper[4971]: I1127 07:48:57.149127 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m452v" event={"ID":"7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e","Type":"ContainerDied","Data":"f82ad9867de80a912aca2641007b04fe97a0a4c932f023bb8ce2b60080e5a66b"} Nov 27 07:48:58 crc kubenswrapper[4971]: I1127 07:48:58.160515 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m452v" event={"ID":"7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e","Type":"ContainerStarted","Data":"746ea410a8b562b11ac81823f3a6fd02d0e5b7da829fe2bac5bebf5e4eb90888"} Nov 27 07:48:58 crc kubenswrapper[4971]: I1127 07:48:58.184178 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-m452v" podStartSLOduration=2.70885301 podStartE2EDuration="5.184149854s" podCreationTimestamp="2025-11-27 07:48:53 +0000 UTC" firstStartedPulling="2025-11-27 07:48:55.127841266 +0000 UTC m=+3373.319885184" lastFinishedPulling="2025-11-27 07:48:57.60313811 +0000 UTC m=+3375.795182028" observedRunningTime="2025-11-27 07:48:58.181413885 +0000 UTC m=+3376.373457803" watchObservedRunningTime="2025-11-27 07:48:58.184149854 +0000 UTC m=+3376.376193772" Nov 27 07:49:03 crc kubenswrapper[4971]: I1127 07:49:03.986001 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-m452v" Nov 27 07:49:03 crc kubenswrapper[4971]: I1127 07:49:03.986306 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-m452v" Nov 27 07:49:04 crc kubenswrapper[4971]: I1127 07:49:04.031376 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-m452v" Nov 27 07:49:04 crc kubenswrapper[4971]: I1127 07:49:04.257830 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-m452v" Nov 27 07:49:04 crc kubenswrapper[4971]: I1127 07:49:04.322071 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-m452v"] Nov 27 07:49:06 crc kubenswrapper[4971]: I1127 07:49:06.218474 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-m452v" podUID="7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e" containerName="registry-server" containerID="cri-o://746ea410a8b562b11ac81823f3a6fd02d0e5b7da829fe2bac5bebf5e4eb90888" gracePeriod=2 Nov 27 07:49:06 crc kubenswrapper[4971]: I1127 07:49:06.671723 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m452v" Nov 27 07:49:06 crc kubenswrapper[4971]: I1127 07:49:06.776383 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e-utilities\") pod \"7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e\" (UID: \"7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e\") " Nov 27 07:49:06 crc kubenswrapper[4971]: I1127 07:49:06.776497 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tx4h2\" (UniqueName: \"kubernetes.io/projected/7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e-kube-api-access-tx4h2\") pod \"7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e\" (UID: \"7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e\") " Nov 27 07:49:06 crc kubenswrapper[4971]: I1127 07:49:06.776658 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e-catalog-content\") pod \"7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e\" (UID: \"7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e\") " Nov 27 07:49:06 crc kubenswrapper[4971]: I1127 07:49:06.777439 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e-utilities" (OuterVolumeSpecName: "utilities") pod "7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e" (UID: "7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:49:06 crc kubenswrapper[4971]: I1127 07:49:06.784121 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e-kube-api-access-tx4h2" (OuterVolumeSpecName: "kube-api-access-tx4h2") pod "7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e" (UID: "7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e"). InnerVolumeSpecName "kube-api-access-tx4h2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:49:06 crc kubenswrapper[4971]: I1127 07:49:06.881630 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 07:49:06 crc kubenswrapper[4971]: I1127 07:49:06.881707 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tx4h2\" (UniqueName: \"kubernetes.io/projected/7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e-kube-api-access-tx4h2\") on node \"crc\" DevicePath \"\"" Nov 27 07:49:07 crc kubenswrapper[4971]: I1127 07:49:07.229428 4971 generic.go:334] "Generic (PLEG): container finished" podID="7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e" containerID="746ea410a8b562b11ac81823f3a6fd02d0e5b7da829fe2bac5bebf5e4eb90888" exitCode=0 Nov 27 07:49:07 crc kubenswrapper[4971]: I1127 07:49:07.230633 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m452v" event={"ID":"7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e","Type":"ContainerDied","Data":"746ea410a8b562b11ac81823f3a6fd02d0e5b7da829fe2bac5bebf5e4eb90888"} Nov 27 07:49:07 crc kubenswrapper[4971]: I1127 07:49:07.230824 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m452v" event={"ID":"7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e","Type":"ContainerDied","Data":"f789b92214589caf47fb1dc3e3cc228541728ffb2fa17e7811fb750268d49026"} Nov 27 07:49:07 crc kubenswrapper[4971]: I1127 07:49:07.230766 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m452v" Nov 27 07:49:07 crc kubenswrapper[4971]: I1127 07:49:07.230907 4971 scope.go:117] "RemoveContainer" containerID="746ea410a8b562b11ac81823f3a6fd02d0e5b7da829fe2bac5bebf5e4eb90888" Nov 27 07:49:07 crc kubenswrapper[4971]: I1127 07:49:07.250423 4971 scope.go:117] "RemoveContainer" containerID="f82ad9867de80a912aca2641007b04fe97a0a4c932f023bb8ce2b60080e5a66b" Nov 27 07:49:07 crc kubenswrapper[4971]: I1127 07:49:07.301640 4971 scope.go:117] "RemoveContainer" containerID="4f95cf4b7d98f5379230d19027add7e135e9030ae2f6221ba712f5474413e69c" Nov 27 07:49:07 crc kubenswrapper[4971]: I1127 07:49:07.318401 4971 scope.go:117] "RemoveContainer" containerID="746ea410a8b562b11ac81823f3a6fd02d0e5b7da829fe2bac5bebf5e4eb90888" Nov 27 07:49:07 crc kubenswrapper[4971]: E1127 07:49:07.318882 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"746ea410a8b562b11ac81823f3a6fd02d0e5b7da829fe2bac5bebf5e4eb90888\": container with ID starting with 746ea410a8b562b11ac81823f3a6fd02d0e5b7da829fe2bac5bebf5e4eb90888 not found: ID does not exist" containerID="746ea410a8b562b11ac81823f3a6fd02d0e5b7da829fe2bac5bebf5e4eb90888" Nov 27 07:49:07 crc kubenswrapper[4971]: I1127 07:49:07.318925 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"746ea410a8b562b11ac81823f3a6fd02d0e5b7da829fe2bac5bebf5e4eb90888"} err="failed to get container status \"746ea410a8b562b11ac81823f3a6fd02d0e5b7da829fe2bac5bebf5e4eb90888\": rpc error: code = NotFound desc = could not find container \"746ea410a8b562b11ac81823f3a6fd02d0e5b7da829fe2bac5bebf5e4eb90888\": container with ID starting with 746ea410a8b562b11ac81823f3a6fd02d0e5b7da829fe2bac5bebf5e4eb90888 not found: ID does not exist" Nov 27 07:49:07 crc kubenswrapper[4971]: I1127 07:49:07.318956 4971 scope.go:117] "RemoveContainer" containerID="f82ad9867de80a912aca2641007b04fe97a0a4c932f023bb8ce2b60080e5a66b" Nov 27 07:49:07 crc kubenswrapper[4971]: E1127 07:49:07.319383 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f82ad9867de80a912aca2641007b04fe97a0a4c932f023bb8ce2b60080e5a66b\": container with ID starting with f82ad9867de80a912aca2641007b04fe97a0a4c932f023bb8ce2b60080e5a66b not found: ID does not exist" containerID="f82ad9867de80a912aca2641007b04fe97a0a4c932f023bb8ce2b60080e5a66b" Nov 27 07:49:07 crc kubenswrapper[4971]: I1127 07:49:07.319419 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f82ad9867de80a912aca2641007b04fe97a0a4c932f023bb8ce2b60080e5a66b"} err="failed to get container status \"f82ad9867de80a912aca2641007b04fe97a0a4c932f023bb8ce2b60080e5a66b\": rpc error: code = NotFound desc = could not find container \"f82ad9867de80a912aca2641007b04fe97a0a4c932f023bb8ce2b60080e5a66b\": container with ID starting with f82ad9867de80a912aca2641007b04fe97a0a4c932f023bb8ce2b60080e5a66b not found: ID does not exist" Nov 27 07:49:07 crc kubenswrapper[4971]: I1127 07:49:07.319440 4971 scope.go:117] "RemoveContainer" containerID="4f95cf4b7d98f5379230d19027add7e135e9030ae2f6221ba712f5474413e69c" Nov 27 07:49:07 crc kubenswrapper[4971]: E1127 07:49:07.319923 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4f95cf4b7d98f5379230d19027add7e135e9030ae2f6221ba712f5474413e69c\": container with ID starting with 4f95cf4b7d98f5379230d19027add7e135e9030ae2f6221ba712f5474413e69c not found: ID does not exist" containerID="4f95cf4b7d98f5379230d19027add7e135e9030ae2f6221ba712f5474413e69c" Nov 27 07:49:07 crc kubenswrapper[4971]: I1127 07:49:07.319956 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f95cf4b7d98f5379230d19027add7e135e9030ae2f6221ba712f5474413e69c"} err="failed to get container status \"4f95cf4b7d98f5379230d19027add7e135e9030ae2f6221ba712f5474413e69c\": rpc error: code = NotFound desc = could not find container \"4f95cf4b7d98f5379230d19027add7e135e9030ae2f6221ba712f5474413e69c\": container with ID starting with 4f95cf4b7d98f5379230d19027add7e135e9030ae2f6221ba712f5474413e69c not found: ID does not exist" Nov 27 07:49:07 crc kubenswrapper[4971]: I1127 07:49:07.822907 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e" (UID: "7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:49:07 crc kubenswrapper[4971]: I1127 07:49:07.863212 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-m452v"] Nov 27 07:49:07 crc kubenswrapper[4971]: I1127 07:49:07.869289 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-m452v"] Nov 27 07:49:07 crc kubenswrapper[4971]: I1127 07:49:07.898642 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 07:49:08 crc kubenswrapper[4971]: I1127 07:49:08.565418 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e" path="/var/lib/kubelet/pods/7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e/volumes" Nov 27 07:49:26 crc kubenswrapper[4971]: I1127 07:49:26.413136 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 07:49:26 crc kubenswrapper[4971]: I1127 07:49:26.413824 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 07:49:56 crc kubenswrapper[4971]: I1127 07:49:56.413136 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 07:49:56 crc kubenswrapper[4971]: I1127 07:49:56.414684 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 07:49:56 crc kubenswrapper[4971]: I1127 07:49:56.414746 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 07:49:56 crc kubenswrapper[4971]: I1127 07:49:56.415593 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6997d0f598181cb1d1e5b044e2cbe23beb9ee620d82bd962670348cd0c073626"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 07:49:56 crc kubenswrapper[4971]: I1127 07:49:56.415669 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://6997d0f598181cb1d1e5b044e2cbe23beb9ee620d82bd962670348cd0c073626" gracePeriod=600 Nov 27 07:49:56 crc kubenswrapper[4971]: I1127 07:49:56.682615 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="6997d0f598181cb1d1e5b044e2cbe23beb9ee620d82bd962670348cd0c073626" exitCode=0 Nov 27 07:49:56 crc kubenswrapper[4971]: I1127 07:49:56.682662 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"6997d0f598181cb1d1e5b044e2cbe23beb9ee620d82bd962670348cd0c073626"} Nov 27 07:49:56 crc kubenswrapper[4971]: I1127 07:49:56.684103 4971 scope.go:117] "RemoveContainer" containerID="57f5aba5ce0ca112914b3a5119727b4d09c3fed417999b800c0ae9c7a63553ba" Nov 27 07:49:57 crc kubenswrapper[4971]: I1127 07:49:57.696715 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"3a8813f2f14eb2d16fd345841429ed6871b301a5aa14294cc225c677f1f6c863"} Nov 27 07:51:33 crc kubenswrapper[4971]: I1127 07:51:33.858428 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-s7jvc"] Nov 27 07:51:33 crc kubenswrapper[4971]: E1127 07:51:33.859639 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e" containerName="extract-utilities" Nov 27 07:51:33 crc kubenswrapper[4971]: I1127 07:51:33.859656 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e" containerName="extract-utilities" Nov 27 07:51:33 crc kubenswrapper[4971]: E1127 07:51:33.859680 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e" containerName="extract-content" Nov 27 07:51:33 crc kubenswrapper[4971]: I1127 07:51:33.859689 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e" containerName="extract-content" Nov 27 07:51:33 crc kubenswrapper[4971]: E1127 07:51:33.859711 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e" containerName="registry-server" Nov 27 07:51:33 crc kubenswrapper[4971]: I1127 07:51:33.859720 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e" containerName="registry-server" Nov 27 07:51:33 crc kubenswrapper[4971]: I1127 07:51:33.859944 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="7810d63a-3dfd-41b7-a7b9-5b21e96dfd1e" containerName="registry-server" Nov 27 07:51:33 crc kubenswrapper[4971]: I1127 07:51:33.861343 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s7jvc" Nov 27 07:51:33 crc kubenswrapper[4971]: I1127 07:51:33.881441 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-s7jvc"] Nov 27 07:51:33 crc kubenswrapper[4971]: I1127 07:51:33.980824 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c79ef807-77e8-472c-aa5f-d92ce5648992-utilities\") pod \"certified-operators-s7jvc\" (UID: \"c79ef807-77e8-472c-aa5f-d92ce5648992\") " pod="openshift-marketplace/certified-operators-s7jvc" Nov 27 07:51:33 crc kubenswrapper[4971]: I1127 07:51:33.980980 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c79ef807-77e8-472c-aa5f-d92ce5648992-catalog-content\") pod \"certified-operators-s7jvc\" (UID: \"c79ef807-77e8-472c-aa5f-d92ce5648992\") " pod="openshift-marketplace/certified-operators-s7jvc" Nov 27 07:51:33 crc kubenswrapper[4971]: I1127 07:51:33.981024 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4n56v\" (UniqueName: \"kubernetes.io/projected/c79ef807-77e8-472c-aa5f-d92ce5648992-kube-api-access-4n56v\") pod \"certified-operators-s7jvc\" (UID: \"c79ef807-77e8-472c-aa5f-d92ce5648992\") " pod="openshift-marketplace/certified-operators-s7jvc" Nov 27 07:51:34 crc kubenswrapper[4971]: I1127 07:51:34.082478 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c79ef807-77e8-472c-aa5f-d92ce5648992-utilities\") pod \"certified-operators-s7jvc\" (UID: \"c79ef807-77e8-472c-aa5f-d92ce5648992\") " pod="openshift-marketplace/certified-operators-s7jvc" Nov 27 07:51:34 crc kubenswrapper[4971]: I1127 07:51:34.082567 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c79ef807-77e8-472c-aa5f-d92ce5648992-catalog-content\") pod \"certified-operators-s7jvc\" (UID: \"c79ef807-77e8-472c-aa5f-d92ce5648992\") " pod="openshift-marketplace/certified-operators-s7jvc" Nov 27 07:51:34 crc kubenswrapper[4971]: I1127 07:51:34.082629 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4n56v\" (UniqueName: \"kubernetes.io/projected/c79ef807-77e8-472c-aa5f-d92ce5648992-kube-api-access-4n56v\") pod \"certified-operators-s7jvc\" (UID: \"c79ef807-77e8-472c-aa5f-d92ce5648992\") " pod="openshift-marketplace/certified-operators-s7jvc" Nov 27 07:51:34 crc kubenswrapper[4971]: I1127 07:51:34.083460 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c79ef807-77e8-472c-aa5f-d92ce5648992-utilities\") pod \"certified-operators-s7jvc\" (UID: \"c79ef807-77e8-472c-aa5f-d92ce5648992\") " pod="openshift-marketplace/certified-operators-s7jvc" Nov 27 07:51:34 crc kubenswrapper[4971]: I1127 07:51:34.083646 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c79ef807-77e8-472c-aa5f-d92ce5648992-catalog-content\") pod \"certified-operators-s7jvc\" (UID: \"c79ef807-77e8-472c-aa5f-d92ce5648992\") " pod="openshift-marketplace/certified-operators-s7jvc" Nov 27 07:51:34 crc kubenswrapper[4971]: I1127 07:51:34.109044 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4n56v\" (UniqueName: \"kubernetes.io/projected/c79ef807-77e8-472c-aa5f-d92ce5648992-kube-api-access-4n56v\") pod \"certified-operators-s7jvc\" (UID: \"c79ef807-77e8-472c-aa5f-d92ce5648992\") " pod="openshift-marketplace/certified-operators-s7jvc" Nov 27 07:51:34 crc kubenswrapper[4971]: I1127 07:51:34.226143 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s7jvc" Nov 27 07:51:34 crc kubenswrapper[4971]: I1127 07:51:34.760451 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-s7jvc"] Nov 27 07:51:35 crc kubenswrapper[4971]: I1127 07:51:35.606277 4971 generic.go:334] "Generic (PLEG): container finished" podID="c79ef807-77e8-472c-aa5f-d92ce5648992" containerID="7fcc71dc11f27c6dab33923f3a0c86388273fc0434eedc0e6627639bab6962c0" exitCode=0 Nov 27 07:51:35 crc kubenswrapper[4971]: I1127 07:51:35.606362 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s7jvc" event={"ID":"c79ef807-77e8-472c-aa5f-d92ce5648992","Type":"ContainerDied","Data":"7fcc71dc11f27c6dab33923f3a0c86388273fc0434eedc0e6627639bab6962c0"} Nov 27 07:51:35 crc kubenswrapper[4971]: I1127 07:51:35.606819 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s7jvc" event={"ID":"c79ef807-77e8-472c-aa5f-d92ce5648992","Type":"ContainerStarted","Data":"ad950b859bbb4a7b13f1aef3b36209581bee0811c6cb50afa7e3e70a36c9cdcb"} Nov 27 07:51:36 crc kubenswrapper[4971]: I1127 07:51:36.615507 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s7jvc" event={"ID":"c79ef807-77e8-472c-aa5f-d92ce5648992","Type":"ContainerStarted","Data":"12fbfff4e6042d4e7176df444cf659adffe05ea034b2e7ce7299749b0493c007"} Nov 27 07:51:37 crc kubenswrapper[4971]: I1127 07:51:37.633440 4971 generic.go:334] "Generic (PLEG): container finished" podID="c79ef807-77e8-472c-aa5f-d92ce5648992" containerID="12fbfff4e6042d4e7176df444cf659adffe05ea034b2e7ce7299749b0493c007" exitCode=0 Nov 27 07:51:37 crc kubenswrapper[4971]: I1127 07:51:37.633506 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s7jvc" event={"ID":"c79ef807-77e8-472c-aa5f-d92ce5648992","Type":"ContainerDied","Data":"12fbfff4e6042d4e7176df444cf659adffe05ea034b2e7ce7299749b0493c007"} Nov 27 07:51:38 crc kubenswrapper[4971]: I1127 07:51:38.645078 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s7jvc" event={"ID":"c79ef807-77e8-472c-aa5f-d92ce5648992","Type":"ContainerStarted","Data":"f3cce2d0b3ac19bf6e54db5bbcecda8b74cb7e74dec3dd5b10b18dc073fd39ea"} Nov 27 07:51:38 crc kubenswrapper[4971]: I1127 07:51:38.674352 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-s7jvc" podStartSLOduration=3.191062247 podStartE2EDuration="5.674312959s" podCreationTimestamp="2025-11-27 07:51:33 +0000 UTC" firstStartedPulling="2025-11-27 07:51:35.609551332 +0000 UTC m=+3533.801595250" lastFinishedPulling="2025-11-27 07:51:38.092801994 +0000 UTC m=+3536.284845962" observedRunningTime="2025-11-27 07:51:38.668457162 +0000 UTC m=+3536.860501090" watchObservedRunningTime="2025-11-27 07:51:38.674312959 +0000 UTC m=+3536.866356877" Nov 27 07:51:44 crc kubenswrapper[4971]: I1127 07:51:44.226377 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-s7jvc" Nov 27 07:51:44 crc kubenswrapper[4971]: I1127 07:51:44.228840 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-s7jvc" Nov 27 07:51:44 crc kubenswrapper[4971]: I1127 07:51:44.284295 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-s7jvc" Nov 27 07:51:44 crc kubenswrapper[4971]: I1127 07:51:44.777457 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-s7jvc" Nov 27 07:51:44 crc kubenswrapper[4971]: I1127 07:51:44.853445 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-s7jvc"] Nov 27 07:51:46 crc kubenswrapper[4971]: I1127 07:51:46.738663 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-s7jvc" podUID="c79ef807-77e8-472c-aa5f-d92ce5648992" containerName="registry-server" containerID="cri-o://f3cce2d0b3ac19bf6e54db5bbcecda8b74cb7e74dec3dd5b10b18dc073fd39ea" gracePeriod=2 Nov 27 07:51:47 crc kubenswrapper[4971]: I1127 07:51:47.747846 4971 generic.go:334] "Generic (PLEG): container finished" podID="c79ef807-77e8-472c-aa5f-d92ce5648992" containerID="f3cce2d0b3ac19bf6e54db5bbcecda8b74cb7e74dec3dd5b10b18dc073fd39ea" exitCode=0 Nov 27 07:51:47 crc kubenswrapper[4971]: I1127 07:51:47.747959 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s7jvc" event={"ID":"c79ef807-77e8-472c-aa5f-d92ce5648992","Type":"ContainerDied","Data":"f3cce2d0b3ac19bf6e54db5bbcecda8b74cb7e74dec3dd5b10b18dc073fd39ea"} Nov 27 07:51:47 crc kubenswrapper[4971]: I1127 07:51:47.748323 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s7jvc" event={"ID":"c79ef807-77e8-472c-aa5f-d92ce5648992","Type":"ContainerDied","Data":"ad950b859bbb4a7b13f1aef3b36209581bee0811c6cb50afa7e3e70a36c9cdcb"} Nov 27 07:51:47 crc kubenswrapper[4971]: I1127 07:51:47.748347 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ad950b859bbb4a7b13f1aef3b36209581bee0811c6cb50afa7e3e70a36c9cdcb" Nov 27 07:51:47 crc kubenswrapper[4971]: I1127 07:51:47.751208 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s7jvc" Nov 27 07:51:47 crc kubenswrapper[4971]: I1127 07:51:47.914713 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c79ef807-77e8-472c-aa5f-d92ce5648992-utilities\") pod \"c79ef807-77e8-472c-aa5f-d92ce5648992\" (UID: \"c79ef807-77e8-472c-aa5f-d92ce5648992\") " Nov 27 07:51:47 crc kubenswrapper[4971]: I1127 07:51:47.914847 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c79ef807-77e8-472c-aa5f-d92ce5648992-catalog-content\") pod \"c79ef807-77e8-472c-aa5f-d92ce5648992\" (UID: \"c79ef807-77e8-472c-aa5f-d92ce5648992\") " Nov 27 07:51:47 crc kubenswrapper[4971]: I1127 07:51:47.914891 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4n56v\" (UniqueName: \"kubernetes.io/projected/c79ef807-77e8-472c-aa5f-d92ce5648992-kube-api-access-4n56v\") pod \"c79ef807-77e8-472c-aa5f-d92ce5648992\" (UID: \"c79ef807-77e8-472c-aa5f-d92ce5648992\") " Nov 27 07:51:47 crc kubenswrapper[4971]: I1127 07:51:47.915782 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c79ef807-77e8-472c-aa5f-d92ce5648992-utilities" (OuterVolumeSpecName: "utilities") pod "c79ef807-77e8-472c-aa5f-d92ce5648992" (UID: "c79ef807-77e8-472c-aa5f-d92ce5648992"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:51:47 crc kubenswrapper[4971]: I1127 07:51:47.920762 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c79ef807-77e8-472c-aa5f-d92ce5648992-kube-api-access-4n56v" (OuterVolumeSpecName: "kube-api-access-4n56v") pod "c79ef807-77e8-472c-aa5f-d92ce5648992" (UID: "c79ef807-77e8-472c-aa5f-d92ce5648992"). InnerVolumeSpecName "kube-api-access-4n56v". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:51:47 crc kubenswrapper[4971]: I1127 07:51:47.962763 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c79ef807-77e8-472c-aa5f-d92ce5648992-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c79ef807-77e8-472c-aa5f-d92ce5648992" (UID: "c79ef807-77e8-472c-aa5f-d92ce5648992"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:51:48 crc kubenswrapper[4971]: I1127 07:51:48.016491 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c79ef807-77e8-472c-aa5f-d92ce5648992-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 07:51:48 crc kubenswrapper[4971]: I1127 07:51:48.016546 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c79ef807-77e8-472c-aa5f-d92ce5648992-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 07:51:48 crc kubenswrapper[4971]: I1127 07:51:48.016559 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4n56v\" (UniqueName: \"kubernetes.io/projected/c79ef807-77e8-472c-aa5f-d92ce5648992-kube-api-access-4n56v\") on node \"crc\" DevicePath \"\"" Nov 27 07:51:48 crc kubenswrapper[4971]: I1127 07:51:48.760064 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s7jvc" Nov 27 07:51:48 crc kubenswrapper[4971]: I1127 07:51:48.793439 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-s7jvc"] Nov 27 07:51:48 crc kubenswrapper[4971]: I1127 07:51:48.806357 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-s7jvc"] Nov 27 07:51:50 crc kubenswrapper[4971]: I1127 07:51:50.566526 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c79ef807-77e8-472c-aa5f-d92ce5648992" path="/var/lib/kubelet/pods/c79ef807-77e8-472c-aa5f-d92ce5648992/volumes" Nov 27 07:51:56 crc kubenswrapper[4971]: I1127 07:51:56.413485 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 07:51:56 crc kubenswrapper[4971]: I1127 07:51:56.414500 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 07:52:26 crc kubenswrapper[4971]: I1127 07:52:26.414159 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 07:52:26 crc kubenswrapper[4971]: I1127 07:52:26.415446 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 07:52:56 crc kubenswrapper[4971]: I1127 07:52:56.413264 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 07:52:56 crc kubenswrapper[4971]: I1127 07:52:56.414052 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 07:52:56 crc kubenswrapper[4971]: I1127 07:52:56.414132 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 07:52:56 crc kubenswrapper[4971]: I1127 07:52:56.415375 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3a8813f2f14eb2d16fd345841429ed6871b301a5aa14294cc225c677f1f6c863"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 07:52:56 crc kubenswrapper[4971]: I1127 07:52:56.415491 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://3a8813f2f14eb2d16fd345841429ed6871b301a5aa14294cc225c677f1f6c863" gracePeriod=600 Nov 27 07:52:56 crc kubenswrapper[4971]: E1127 07:52:56.562644 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:52:57 crc kubenswrapper[4971]: I1127 07:52:57.392871 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="3a8813f2f14eb2d16fd345841429ed6871b301a5aa14294cc225c677f1f6c863" exitCode=0 Nov 27 07:52:57 crc kubenswrapper[4971]: I1127 07:52:57.392926 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"3a8813f2f14eb2d16fd345841429ed6871b301a5aa14294cc225c677f1f6c863"} Nov 27 07:52:57 crc kubenswrapper[4971]: I1127 07:52:57.392981 4971 scope.go:117] "RemoveContainer" containerID="6997d0f598181cb1d1e5b044e2cbe23beb9ee620d82bd962670348cd0c073626" Nov 27 07:52:57 crc kubenswrapper[4971]: I1127 07:52:57.393611 4971 scope.go:117] "RemoveContainer" containerID="3a8813f2f14eb2d16fd345841429ed6871b301a5aa14294cc225c677f1f6c863" Nov 27 07:52:57 crc kubenswrapper[4971]: E1127 07:52:57.393969 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:53:08 crc kubenswrapper[4971]: I1127 07:53:08.550009 4971 scope.go:117] "RemoveContainer" containerID="3a8813f2f14eb2d16fd345841429ed6871b301a5aa14294cc225c677f1f6c863" Nov 27 07:53:08 crc kubenswrapper[4971]: E1127 07:53:08.550881 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:53:19 crc kubenswrapper[4971]: I1127 07:53:19.551276 4971 scope.go:117] "RemoveContainer" containerID="3a8813f2f14eb2d16fd345841429ed6871b301a5aa14294cc225c677f1f6c863" Nov 27 07:53:19 crc kubenswrapper[4971]: E1127 07:53:19.552181 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:53:28 crc kubenswrapper[4971]: I1127 07:53:28.495615 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-62mbs"] Nov 27 07:53:28 crc kubenswrapper[4971]: E1127 07:53:28.496778 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c79ef807-77e8-472c-aa5f-d92ce5648992" containerName="extract-content" Nov 27 07:53:28 crc kubenswrapper[4971]: I1127 07:53:28.496794 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="c79ef807-77e8-472c-aa5f-d92ce5648992" containerName="extract-content" Nov 27 07:53:28 crc kubenswrapper[4971]: E1127 07:53:28.496810 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c79ef807-77e8-472c-aa5f-d92ce5648992" containerName="registry-server" Nov 27 07:53:28 crc kubenswrapper[4971]: I1127 07:53:28.496817 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="c79ef807-77e8-472c-aa5f-d92ce5648992" containerName="registry-server" Nov 27 07:53:28 crc kubenswrapper[4971]: E1127 07:53:28.496835 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c79ef807-77e8-472c-aa5f-d92ce5648992" containerName="extract-utilities" Nov 27 07:53:28 crc kubenswrapper[4971]: I1127 07:53:28.496842 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="c79ef807-77e8-472c-aa5f-d92ce5648992" containerName="extract-utilities" Nov 27 07:53:28 crc kubenswrapper[4971]: I1127 07:53:28.497011 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="c79ef807-77e8-472c-aa5f-d92ce5648992" containerName="registry-server" Nov 27 07:53:28 crc kubenswrapper[4971]: I1127 07:53:28.498092 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-62mbs" Nov 27 07:53:28 crc kubenswrapper[4971]: I1127 07:53:28.510136 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-62mbs"] Nov 27 07:53:28 crc kubenswrapper[4971]: I1127 07:53:28.525274 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9zw6\" (UniqueName: \"kubernetes.io/projected/a1b77f9d-dd8e-4574-a652-f99d4dfad8ed-kube-api-access-w9zw6\") pod \"redhat-marketplace-62mbs\" (UID: \"a1b77f9d-dd8e-4574-a652-f99d4dfad8ed\") " pod="openshift-marketplace/redhat-marketplace-62mbs" Nov 27 07:53:28 crc kubenswrapper[4971]: I1127 07:53:28.525343 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a1b77f9d-dd8e-4574-a652-f99d4dfad8ed-catalog-content\") pod \"redhat-marketplace-62mbs\" (UID: \"a1b77f9d-dd8e-4574-a652-f99d4dfad8ed\") " pod="openshift-marketplace/redhat-marketplace-62mbs" Nov 27 07:53:28 crc kubenswrapper[4971]: I1127 07:53:28.525473 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a1b77f9d-dd8e-4574-a652-f99d4dfad8ed-utilities\") pod \"redhat-marketplace-62mbs\" (UID: \"a1b77f9d-dd8e-4574-a652-f99d4dfad8ed\") " pod="openshift-marketplace/redhat-marketplace-62mbs" Nov 27 07:53:28 crc kubenswrapper[4971]: I1127 07:53:28.626865 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a1b77f9d-dd8e-4574-a652-f99d4dfad8ed-utilities\") pod \"redhat-marketplace-62mbs\" (UID: \"a1b77f9d-dd8e-4574-a652-f99d4dfad8ed\") " pod="openshift-marketplace/redhat-marketplace-62mbs" Nov 27 07:53:28 crc kubenswrapper[4971]: I1127 07:53:28.627067 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9zw6\" (UniqueName: \"kubernetes.io/projected/a1b77f9d-dd8e-4574-a652-f99d4dfad8ed-kube-api-access-w9zw6\") pod \"redhat-marketplace-62mbs\" (UID: \"a1b77f9d-dd8e-4574-a652-f99d4dfad8ed\") " pod="openshift-marketplace/redhat-marketplace-62mbs" Nov 27 07:53:28 crc kubenswrapper[4971]: I1127 07:53:28.627102 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a1b77f9d-dd8e-4574-a652-f99d4dfad8ed-catalog-content\") pod \"redhat-marketplace-62mbs\" (UID: \"a1b77f9d-dd8e-4574-a652-f99d4dfad8ed\") " pod="openshift-marketplace/redhat-marketplace-62mbs" Nov 27 07:53:28 crc kubenswrapper[4971]: I1127 07:53:28.627496 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a1b77f9d-dd8e-4574-a652-f99d4dfad8ed-utilities\") pod \"redhat-marketplace-62mbs\" (UID: \"a1b77f9d-dd8e-4574-a652-f99d4dfad8ed\") " pod="openshift-marketplace/redhat-marketplace-62mbs" Nov 27 07:53:28 crc kubenswrapper[4971]: I1127 07:53:28.628606 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a1b77f9d-dd8e-4574-a652-f99d4dfad8ed-catalog-content\") pod \"redhat-marketplace-62mbs\" (UID: \"a1b77f9d-dd8e-4574-a652-f99d4dfad8ed\") " pod="openshift-marketplace/redhat-marketplace-62mbs" Nov 27 07:53:28 crc kubenswrapper[4971]: I1127 07:53:28.651171 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9zw6\" (UniqueName: \"kubernetes.io/projected/a1b77f9d-dd8e-4574-a652-f99d4dfad8ed-kube-api-access-w9zw6\") pod \"redhat-marketplace-62mbs\" (UID: \"a1b77f9d-dd8e-4574-a652-f99d4dfad8ed\") " pod="openshift-marketplace/redhat-marketplace-62mbs" Nov 27 07:53:28 crc kubenswrapper[4971]: I1127 07:53:28.826926 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-62mbs" Nov 27 07:53:29 crc kubenswrapper[4971]: I1127 07:53:29.331137 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-62mbs"] Nov 27 07:53:29 crc kubenswrapper[4971]: I1127 07:53:29.708040 4971 generic.go:334] "Generic (PLEG): container finished" podID="a1b77f9d-dd8e-4574-a652-f99d4dfad8ed" containerID="4e378692279c3e406be0071d4f8c724ace057dea654bbe58b9d4966f3ea8e677" exitCode=0 Nov 27 07:53:29 crc kubenswrapper[4971]: I1127 07:53:29.708092 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-62mbs" event={"ID":"a1b77f9d-dd8e-4574-a652-f99d4dfad8ed","Type":"ContainerDied","Data":"4e378692279c3e406be0071d4f8c724ace057dea654bbe58b9d4966f3ea8e677"} Nov 27 07:53:29 crc kubenswrapper[4971]: I1127 07:53:29.708122 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-62mbs" event={"ID":"a1b77f9d-dd8e-4574-a652-f99d4dfad8ed","Type":"ContainerStarted","Data":"7da17075dd139ac806b3c61cf0ff9eb9effa9620ed2a76f228ccc05e05542097"} Nov 27 07:53:30 crc kubenswrapper[4971]: I1127 07:53:30.550668 4971 scope.go:117] "RemoveContainer" containerID="3a8813f2f14eb2d16fd345841429ed6871b301a5aa14294cc225c677f1f6c863" Nov 27 07:53:30 crc kubenswrapper[4971]: E1127 07:53:30.550957 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:53:30 crc kubenswrapper[4971]: I1127 07:53:30.718184 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-62mbs" event={"ID":"a1b77f9d-dd8e-4574-a652-f99d4dfad8ed","Type":"ContainerStarted","Data":"31e36bb2c7858b6ff0c21c91c53eac3bc618f75163f866f385d9a5ea5576025c"} Nov 27 07:53:31 crc kubenswrapper[4971]: I1127 07:53:31.727421 4971 generic.go:334] "Generic (PLEG): container finished" podID="a1b77f9d-dd8e-4574-a652-f99d4dfad8ed" containerID="31e36bb2c7858b6ff0c21c91c53eac3bc618f75163f866f385d9a5ea5576025c" exitCode=0 Nov 27 07:53:31 crc kubenswrapper[4971]: I1127 07:53:31.727518 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-62mbs" event={"ID":"a1b77f9d-dd8e-4574-a652-f99d4dfad8ed","Type":"ContainerDied","Data":"31e36bb2c7858b6ff0c21c91c53eac3bc618f75163f866f385d9a5ea5576025c"} Nov 27 07:53:32 crc kubenswrapper[4971]: I1127 07:53:32.737979 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-62mbs" event={"ID":"a1b77f9d-dd8e-4574-a652-f99d4dfad8ed","Type":"ContainerStarted","Data":"e72794a576244652b9acc541caf367155d51fa3f48380dd66d1172dc39ba7664"} Nov 27 07:53:32 crc kubenswrapper[4971]: I1127 07:53:32.765211 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-62mbs" podStartSLOduration=1.940152335 podStartE2EDuration="4.765187259s" podCreationTimestamp="2025-11-27 07:53:28 +0000 UTC" firstStartedPulling="2025-11-27 07:53:29.710342725 +0000 UTC m=+3647.902386643" lastFinishedPulling="2025-11-27 07:53:32.535377649 +0000 UTC m=+3650.727421567" observedRunningTime="2025-11-27 07:53:32.758849277 +0000 UTC m=+3650.950893195" watchObservedRunningTime="2025-11-27 07:53:32.765187259 +0000 UTC m=+3650.957231177" Nov 27 07:53:34 crc kubenswrapper[4971]: I1127 07:53:34.871829 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9cbl8"] Nov 27 07:53:34 crc kubenswrapper[4971]: I1127 07:53:34.878209 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9cbl8" Nov 27 07:53:34 crc kubenswrapper[4971]: I1127 07:53:34.883276 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9cbl8"] Nov 27 07:53:35 crc kubenswrapper[4971]: I1127 07:53:35.031315 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtqcv\" (UniqueName: \"kubernetes.io/projected/83c05e9d-37de-4f86-82e3-9b7a20c70314-kube-api-access-xtqcv\") pod \"community-operators-9cbl8\" (UID: \"83c05e9d-37de-4f86-82e3-9b7a20c70314\") " pod="openshift-marketplace/community-operators-9cbl8" Nov 27 07:53:35 crc kubenswrapper[4971]: I1127 07:53:35.031610 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83c05e9d-37de-4f86-82e3-9b7a20c70314-utilities\") pod \"community-operators-9cbl8\" (UID: \"83c05e9d-37de-4f86-82e3-9b7a20c70314\") " pod="openshift-marketplace/community-operators-9cbl8" Nov 27 07:53:35 crc kubenswrapper[4971]: I1127 07:53:35.031922 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83c05e9d-37de-4f86-82e3-9b7a20c70314-catalog-content\") pod \"community-operators-9cbl8\" (UID: \"83c05e9d-37de-4f86-82e3-9b7a20c70314\") " pod="openshift-marketplace/community-operators-9cbl8" Nov 27 07:53:35 crc kubenswrapper[4971]: I1127 07:53:35.132885 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83c05e9d-37de-4f86-82e3-9b7a20c70314-utilities\") pod \"community-operators-9cbl8\" (UID: \"83c05e9d-37de-4f86-82e3-9b7a20c70314\") " pod="openshift-marketplace/community-operators-9cbl8" Nov 27 07:53:35 crc kubenswrapper[4971]: I1127 07:53:35.132990 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83c05e9d-37de-4f86-82e3-9b7a20c70314-catalog-content\") pod \"community-operators-9cbl8\" (UID: \"83c05e9d-37de-4f86-82e3-9b7a20c70314\") " pod="openshift-marketplace/community-operators-9cbl8" Nov 27 07:53:35 crc kubenswrapper[4971]: I1127 07:53:35.133043 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtqcv\" (UniqueName: \"kubernetes.io/projected/83c05e9d-37de-4f86-82e3-9b7a20c70314-kube-api-access-xtqcv\") pod \"community-operators-9cbl8\" (UID: \"83c05e9d-37de-4f86-82e3-9b7a20c70314\") " pod="openshift-marketplace/community-operators-9cbl8" Nov 27 07:53:35 crc kubenswrapper[4971]: I1127 07:53:35.133955 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83c05e9d-37de-4f86-82e3-9b7a20c70314-utilities\") pod \"community-operators-9cbl8\" (UID: \"83c05e9d-37de-4f86-82e3-9b7a20c70314\") " pod="openshift-marketplace/community-operators-9cbl8" Nov 27 07:53:35 crc kubenswrapper[4971]: I1127 07:53:35.134113 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83c05e9d-37de-4f86-82e3-9b7a20c70314-catalog-content\") pod \"community-operators-9cbl8\" (UID: \"83c05e9d-37de-4f86-82e3-9b7a20c70314\") " pod="openshift-marketplace/community-operators-9cbl8" Nov 27 07:53:35 crc kubenswrapper[4971]: I1127 07:53:35.152750 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtqcv\" (UniqueName: \"kubernetes.io/projected/83c05e9d-37de-4f86-82e3-9b7a20c70314-kube-api-access-xtqcv\") pod \"community-operators-9cbl8\" (UID: \"83c05e9d-37de-4f86-82e3-9b7a20c70314\") " pod="openshift-marketplace/community-operators-9cbl8" Nov 27 07:53:35 crc kubenswrapper[4971]: I1127 07:53:35.200855 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9cbl8" Nov 27 07:53:35 crc kubenswrapper[4971]: I1127 07:53:35.730220 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9cbl8"] Nov 27 07:53:35 crc kubenswrapper[4971]: I1127 07:53:35.764377 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9cbl8" event={"ID":"83c05e9d-37de-4f86-82e3-9b7a20c70314","Type":"ContainerStarted","Data":"f4003a546ab7157f1cdcfd90ebc78991cca57e165c9e64830e042bcf2da944d3"} Nov 27 07:53:36 crc kubenswrapper[4971]: I1127 07:53:36.772924 4971 generic.go:334] "Generic (PLEG): container finished" podID="83c05e9d-37de-4f86-82e3-9b7a20c70314" containerID="76effd6c5633610f3785c13c9d02725b7f498ea783f8ecb9a66f5f9a574e4b79" exitCode=0 Nov 27 07:53:36 crc kubenswrapper[4971]: I1127 07:53:36.773019 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9cbl8" event={"ID":"83c05e9d-37de-4f86-82e3-9b7a20c70314","Type":"ContainerDied","Data":"76effd6c5633610f3785c13c9d02725b7f498ea783f8ecb9a66f5f9a574e4b79"} Nov 27 07:53:37 crc kubenswrapper[4971]: I1127 07:53:37.786131 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9cbl8" event={"ID":"83c05e9d-37de-4f86-82e3-9b7a20c70314","Type":"ContainerStarted","Data":"3537061fd83f5696d14c14800e785c64fc58c954491c25e89f3ec46e4d25a359"} Nov 27 07:53:38 crc kubenswrapper[4971]: I1127 07:53:38.797570 4971 generic.go:334] "Generic (PLEG): container finished" podID="83c05e9d-37de-4f86-82e3-9b7a20c70314" containerID="3537061fd83f5696d14c14800e785c64fc58c954491c25e89f3ec46e4d25a359" exitCode=0 Nov 27 07:53:38 crc kubenswrapper[4971]: I1127 07:53:38.797667 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9cbl8" event={"ID":"83c05e9d-37de-4f86-82e3-9b7a20c70314","Type":"ContainerDied","Data":"3537061fd83f5696d14c14800e785c64fc58c954491c25e89f3ec46e4d25a359"} Nov 27 07:53:38 crc kubenswrapper[4971]: I1127 07:53:38.829306 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-62mbs" Nov 27 07:53:38 crc kubenswrapper[4971]: I1127 07:53:38.829383 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-62mbs" Nov 27 07:53:38 crc kubenswrapper[4971]: I1127 07:53:38.883963 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-62mbs" Nov 27 07:53:39 crc kubenswrapper[4971]: I1127 07:53:39.809575 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9cbl8" event={"ID":"83c05e9d-37de-4f86-82e3-9b7a20c70314","Type":"ContainerStarted","Data":"3f8e39219ba65b7b591d208ad27c9d04ebc40aa912ef30b48ae15c08cee897d0"} Nov 27 07:53:39 crc kubenswrapper[4971]: I1127 07:53:39.841459 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9cbl8" podStartSLOduration=3.314957651 podStartE2EDuration="5.841433959s" podCreationTimestamp="2025-11-27 07:53:34 +0000 UTC" firstStartedPulling="2025-11-27 07:53:36.77488231 +0000 UTC m=+3654.966926228" lastFinishedPulling="2025-11-27 07:53:39.301358628 +0000 UTC m=+3657.493402536" observedRunningTime="2025-11-27 07:53:39.832237286 +0000 UTC m=+3658.024281214" watchObservedRunningTime="2025-11-27 07:53:39.841433959 +0000 UTC m=+3658.033477907" Nov 27 07:53:39 crc kubenswrapper[4971]: I1127 07:53:39.865201 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-62mbs" Nov 27 07:53:41 crc kubenswrapper[4971]: I1127 07:53:41.260676 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-62mbs"] Nov 27 07:53:41 crc kubenswrapper[4971]: I1127 07:53:41.834131 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-62mbs" podUID="a1b77f9d-dd8e-4574-a652-f99d4dfad8ed" containerName="registry-server" containerID="cri-o://e72794a576244652b9acc541caf367155d51fa3f48380dd66d1172dc39ba7664" gracePeriod=2 Nov 27 07:53:41 crc kubenswrapper[4971]: E1127 07:53:41.872986 4971 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda1b77f9d_dd8e_4574_a652_f99d4dfad8ed.slice/crio-e72794a576244652b9acc541caf367155d51fa3f48380dd66d1172dc39ba7664.scope\": RecentStats: unable to find data in memory cache]" Nov 27 07:53:42 crc kubenswrapper[4971]: I1127 07:53:42.269051 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-62mbs" Nov 27 07:53:42 crc kubenswrapper[4971]: I1127 07:53:42.370288 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9zw6\" (UniqueName: \"kubernetes.io/projected/a1b77f9d-dd8e-4574-a652-f99d4dfad8ed-kube-api-access-w9zw6\") pod \"a1b77f9d-dd8e-4574-a652-f99d4dfad8ed\" (UID: \"a1b77f9d-dd8e-4574-a652-f99d4dfad8ed\") " Nov 27 07:53:42 crc kubenswrapper[4971]: I1127 07:53:42.370833 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a1b77f9d-dd8e-4574-a652-f99d4dfad8ed-utilities\") pod \"a1b77f9d-dd8e-4574-a652-f99d4dfad8ed\" (UID: \"a1b77f9d-dd8e-4574-a652-f99d4dfad8ed\") " Nov 27 07:53:42 crc kubenswrapper[4971]: I1127 07:53:42.370867 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a1b77f9d-dd8e-4574-a652-f99d4dfad8ed-catalog-content\") pod \"a1b77f9d-dd8e-4574-a652-f99d4dfad8ed\" (UID: \"a1b77f9d-dd8e-4574-a652-f99d4dfad8ed\") " Nov 27 07:53:42 crc kubenswrapper[4971]: I1127 07:53:42.373947 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a1b77f9d-dd8e-4574-a652-f99d4dfad8ed-utilities" (OuterVolumeSpecName: "utilities") pod "a1b77f9d-dd8e-4574-a652-f99d4dfad8ed" (UID: "a1b77f9d-dd8e-4574-a652-f99d4dfad8ed"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:53:42 crc kubenswrapper[4971]: I1127 07:53:42.406898 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1b77f9d-dd8e-4574-a652-f99d4dfad8ed-kube-api-access-w9zw6" (OuterVolumeSpecName: "kube-api-access-w9zw6") pod "a1b77f9d-dd8e-4574-a652-f99d4dfad8ed" (UID: "a1b77f9d-dd8e-4574-a652-f99d4dfad8ed"). InnerVolumeSpecName "kube-api-access-w9zw6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:53:42 crc kubenswrapper[4971]: I1127 07:53:42.431708 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a1b77f9d-dd8e-4574-a652-f99d4dfad8ed-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a1b77f9d-dd8e-4574-a652-f99d4dfad8ed" (UID: "a1b77f9d-dd8e-4574-a652-f99d4dfad8ed"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:53:42 crc kubenswrapper[4971]: I1127 07:53:42.473770 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9zw6\" (UniqueName: \"kubernetes.io/projected/a1b77f9d-dd8e-4574-a652-f99d4dfad8ed-kube-api-access-w9zw6\") on node \"crc\" DevicePath \"\"" Nov 27 07:53:42 crc kubenswrapper[4971]: I1127 07:53:42.473838 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a1b77f9d-dd8e-4574-a652-f99d4dfad8ed-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 07:53:42 crc kubenswrapper[4971]: I1127 07:53:42.473856 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a1b77f9d-dd8e-4574-a652-f99d4dfad8ed-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 07:53:42 crc kubenswrapper[4971]: I1127 07:53:42.844475 4971 generic.go:334] "Generic (PLEG): container finished" podID="a1b77f9d-dd8e-4574-a652-f99d4dfad8ed" containerID="e72794a576244652b9acc541caf367155d51fa3f48380dd66d1172dc39ba7664" exitCode=0 Nov 27 07:53:42 crc kubenswrapper[4971]: I1127 07:53:42.844536 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-62mbs" event={"ID":"a1b77f9d-dd8e-4574-a652-f99d4dfad8ed","Type":"ContainerDied","Data":"e72794a576244652b9acc541caf367155d51fa3f48380dd66d1172dc39ba7664"} Nov 27 07:53:42 crc kubenswrapper[4971]: I1127 07:53:42.844598 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-62mbs" event={"ID":"a1b77f9d-dd8e-4574-a652-f99d4dfad8ed","Type":"ContainerDied","Data":"7da17075dd139ac806b3c61cf0ff9eb9effa9620ed2a76f228ccc05e05542097"} Nov 27 07:53:42 crc kubenswrapper[4971]: I1127 07:53:42.844629 4971 scope.go:117] "RemoveContainer" containerID="e72794a576244652b9acc541caf367155d51fa3f48380dd66d1172dc39ba7664" Nov 27 07:53:42 crc kubenswrapper[4971]: I1127 07:53:42.844867 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-62mbs" Nov 27 07:53:42 crc kubenswrapper[4971]: I1127 07:53:42.867679 4971 scope.go:117] "RemoveContainer" containerID="31e36bb2c7858b6ff0c21c91c53eac3bc618f75163f866f385d9a5ea5576025c" Nov 27 07:53:42 crc kubenswrapper[4971]: I1127 07:53:42.892156 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-62mbs"] Nov 27 07:53:42 crc kubenswrapper[4971]: I1127 07:53:42.905138 4971 scope.go:117] "RemoveContainer" containerID="4e378692279c3e406be0071d4f8c724ace057dea654bbe58b9d4966f3ea8e677" Nov 27 07:53:42 crc kubenswrapper[4971]: I1127 07:53:42.911490 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-62mbs"] Nov 27 07:53:42 crc kubenswrapper[4971]: I1127 07:53:42.924860 4971 scope.go:117] "RemoveContainer" containerID="e72794a576244652b9acc541caf367155d51fa3f48380dd66d1172dc39ba7664" Nov 27 07:53:42 crc kubenswrapper[4971]: E1127 07:53:42.930945 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e72794a576244652b9acc541caf367155d51fa3f48380dd66d1172dc39ba7664\": container with ID starting with e72794a576244652b9acc541caf367155d51fa3f48380dd66d1172dc39ba7664 not found: ID does not exist" containerID="e72794a576244652b9acc541caf367155d51fa3f48380dd66d1172dc39ba7664" Nov 27 07:53:42 crc kubenswrapper[4971]: I1127 07:53:42.930981 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e72794a576244652b9acc541caf367155d51fa3f48380dd66d1172dc39ba7664"} err="failed to get container status \"e72794a576244652b9acc541caf367155d51fa3f48380dd66d1172dc39ba7664\": rpc error: code = NotFound desc = could not find container \"e72794a576244652b9acc541caf367155d51fa3f48380dd66d1172dc39ba7664\": container with ID starting with e72794a576244652b9acc541caf367155d51fa3f48380dd66d1172dc39ba7664 not found: ID does not exist" Nov 27 07:53:42 crc kubenswrapper[4971]: I1127 07:53:42.931005 4971 scope.go:117] "RemoveContainer" containerID="31e36bb2c7858b6ff0c21c91c53eac3bc618f75163f866f385d9a5ea5576025c" Nov 27 07:53:42 crc kubenswrapper[4971]: E1127 07:53:42.931543 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31e36bb2c7858b6ff0c21c91c53eac3bc618f75163f866f385d9a5ea5576025c\": container with ID starting with 31e36bb2c7858b6ff0c21c91c53eac3bc618f75163f866f385d9a5ea5576025c not found: ID does not exist" containerID="31e36bb2c7858b6ff0c21c91c53eac3bc618f75163f866f385d9a5ea5576025c" Nov 27 07:53:42 crc kubenswrapper[4971]: I1127 07:53:42.931569 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31e36bb2c7858b6ff0c21c91c53eac3bc618f75163f866f385d9a5ea5576025c"} err="failed to get container status \"31e36bb2c7858b6ff0c21c91c53eac3bc618f75163f866f385d9a5ea5576025c\": rpc error: code = NotFound desc = could not find container \"31e36bb2c7858b6ff0c21c91c53eac3bc618f75163f866f385d9a5ea5576025c\": container with ID starting with 31e36bb2c7858b6ff0c21c91c53eac3bc618f75163f866f385d9a5ea5576025c not found: ID does not exist" Nov 27 07:53:42 crc kubenswrapper[4971]: I1127 07:53:42.931583 4971 scope.go:117] "RemoveContainer" containerID="4e378692279c3e406be0071d4f8c724ace057dea654bbe58b9d4966f3ea8e677" Nov 27 07:53:42 crc kubenswrapper[4971]: E1127 07:53:42.931983 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e378692279c3e406be0071d4f8c724ace057dea654bbe58b9d4966f3ea8e677\": container with ID starting with 4e378692279c3e406be0071d4f8c724ace057dea654bbe58b9d4966f3ea8e677 not found: ID does not exist" containerID="4e378692279c3e406be0071d4f8c724ace057dea654bbe58b9d4966f3ea8e677" Nov 27 07:53:42 crc kubenswrapper[4971]: I1127 07:53:42.932108 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e378692279c3e406be0071d4f8c724ace057dea654bbe58b9d4966f3ea8e677"} err="failed to get container status \"4e378692279c3e406be0071d4f8c724ace057dea654bbe58b9d4966f3ea8e677\": rpc error: code = NotFound desc = could not find container \"4e378692279c3e406be0071d4f8c724ace057dea654bbe58b9d4966f3ea8e677\": container with ID starting with 4e378692279c3e406be0071d4f8c724ace057dea654bbe58b9d4966f3ea8e677 not found: ID does not exist" Nov 27 07:53:43 crc kubenswrapper[4971]: I1127 07:53:43.550795 4971 scope.go:117] "RemoveContainer" containerID="3a8813f2f14eb2d16fd345841429ed6871b301a5aa14294cc225c677f1f6c863" Nov 27 07:53:43 crc kubenswrapper[4971]: E1127 07:53:43.551363 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:53:44 crc kubenswrapper[4971]: I1127 07:53:44.564339 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1b77f9d-dd8e-4574-a652-f99d4dfad8ed" path="/var/lib/kubelet/pods/a1b77f9d-dd8e-4574-a652-f99d4dfad8ed/volumes" Nov 27 07:53:45 crc kubenswrapper[4971]: I1127 07:53:45.202909 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9cbl8" Nov 27 07:53:45 crc kubenswrapper[4971]: I1127 07:53:45.202973 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9cbl8" Nov 27 07:53:45 crc kubenswrapper[4971]: I1127 07:53:45.252756 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9cbl8" Nov 27 07:53:45 crc kubenswrapper[4971]: I1127 07:53:45.939197 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9cbl8" Nov 27 07:53:46 crc kubenswrapper[4971]: I1127 07:53:46.257590 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9cbl8"] Nov 27 07:53:47 crc kubenswrapper[4971]: I1127 07:53:47.892232 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-9cbl8" podUID="83c05e9d-37de-4f86-82e3-9b7a20c70314" containerName="registry-server" containerID="cri-o://3f8e39219ba65b7b591d208ad27c9d04ebc40aa912ef30b48ae15c08cee897d0" gracePeriod=2 Nov 27 07:53:48 crc kubenswrapper[4971]: I1127 07:53:48.363594 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9cbl8" Nov 27 07:53:48 crc kubenswrapper[4971]: I1127 07:53:48.474613 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83c05e9d-37de-4f86-82e3-9b7a20c70314-catalog-content\") pod \"83c05e9d-37de-4f86-82e3-9b7a20c70314\" (UID: \"83c05e9d-37de-4f86-82e3-9b7a20c70314\") " Nov 27 07:53:48 crc kubenswrapper[4971]: I1127 07:53:48.474796 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83c05e9d-37de-4f86-82e3-9b7a20c70314-utilities\") pod \"83c05e9d-37de-4f86-82e3-9b7a20c70314\" (UID: \"83c05e9d-37de-4f86-82e3-9b7a20c70314\") " Nov 27 07:53:48 crc kubenswrapper[4971]: I1127 07:53:48.474898 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xtqcv\" (UniqueName: \"kubernetes.io/projected/83c05e9d-37de-4f86-82e3-9b7a20c70314-kube-api-access-xtqcv\") pod \"83c05e9d-37de-4f86-82e3-9b7a20c70314\" (UID: \"83c05e9d-37de-4f86-82e3-9b7a20c70314\") " Nov 27 07:53:48 crc kubenswrapper[4971]: I1127 07:53:48.476065 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/83c05e9d-37de-4f86-82e3-9b7a20c70314-utilities" (OuterVolumeSpecName: "utilities") pod "83c05e9d-37de-4f86-82e3-9b7a20c70314" (UID: "83c05e9d-37de-4f86-82e3-9b7a20c70314"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:53:48 crc kubenswrapper[4971]: I1127 07:53:48.482678 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83c05e9d-37de-4f86-82e3-9b7a20c70314-kube-api-access-xtqcv" (OuterVolumeSpecName: "kube-api-access-xtqcv") pod "83c05e9d-37de-4f86-82e3-9b7a20c70314" (UID: "83c05e9d-37de-4f86-82e3-9b7a20c70314"). InnerVolumeSpecName "kube-api-access-xtqcv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 07:53:48 crc kubenswrapper[4971]: I1127 07:53:48.535266 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/83c05e9d-37de-4f86-82e3-9b7a20c70314-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "83c05e9d-37de-4f86-82e3-9b7a20c70314" (UID: "83c05e9d-37de-4f86-82e3-9b7a20c70314"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 07:53:48 crc kubenswrapper[4971]: I1127 07:53:48.577193 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83c05e9d-37de-4f86-82e3-9b7a20c70314-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 07:53:48 crc kubenswrapper[4971]: I1127 07:53:48.577230 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83c05e9d-37de-4f86-82e3-9b7a20c70314-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 07:53:48 crc kubenswrapper[4971]: I1127 07:53:48.577242 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xtqcv\" (UniqueName: \"kubernetes.io/projected/83c05e9d-37de-4f86-82e3-9b7a20c70314-kube-api-access-xtqcv\") on node \"crc\" DevicePath \"\"" Nov 27 07:53:48 crc kubenswrapper[4971]: I1127 07:53:48.906601 4971 generic.go:334] "Generic (PLEG): container finished" podID="83c05e9d-37de-4f86-82e3-9b7a20c70314" containerID="3f8e39219ba65b7b591d208ad27c9d04ebc40aa912ef30b48ae15c08cee897d0" exitCode=0 Nov 27 07:53:48 crc kubenswrapper[4971]: I1127 07:53:48.906695 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9cbl8" event={"ID":"83c05e9d-37de-4f86-82e3-9b7a20c70314","Type":"ContainerDied","Data":"3f8e39219ba65b7b591d208ad27c9d04ebc40aa912ef30b48ae15c08cee897d0"} Nov 27 07:53:48 crc kubenswrapper[4971]: I1127 07:53:48.906732 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9cbl8" event={"ID":"83c05e9d-37de-4f86-82e3-9b7a20c70314","Type":"ContainerDied","Data":"f4003a546ab7157f1cdcfd90ebc78991cca57e165c9e64830e042bcf2da944d3"} Nov 27 07:53:48 crc kubenswrapper[4971]: I1127 07:53:48.906788 4971 scope.go:117] "RemoveContainer" containerID="3f8e39219ba65b7b591d208ad27c9d04ebc40aa912ef30b48ae15c08cee897d0" Nov 27 07:53:48 crc kubenswrapper[4971]: I1127 07:53:48.906980 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9cbl8" Nov 27 07:53:48 crc kubenswrapper[4971]: I1127 07:53:48.935569 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9cbl8"] Nov 27 07:53:48 crc kubenswrapper[4971]: I1127 07:53:48.941855 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-9cbl8"] Nov 27 07:53:48 crc kubenswrapper[4971]: I1127 07:53:48.945425 4971 scope.go:117] "RemoveContainer" containerID="3537061fd83f5696d14c14800e785c64fc58c954491c25e89f3ec46e4d25a359" Nov 27 07:53:48 crc kubenswrapper[4971]: I1127 07:53:48.982261 4971 scope.go:117] "RemoveContainer" containerID="76effd6c5633610f3785c13c9d02725b7f498ea783f8ecb9a66f5f9a574e4b79" Nov 27 07:53:49 crc kubenswrapper[4971]: I1127 07:53:49.006199 4971 scope.go:117] "RemoveContainer" containerID="3f8e39219ba65b7b591d208ad27c9d04ebc40aa912ef30b48ae15c08cee897d0" Nov 27 07:53:49 crc kubenswrapper[4971]: E1127 07:53:49.007221 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f8e39219ba65b7b591d208ad27c9d04ebc40aa912ef30b48ae15c08cee897d0\": container with ID starting with 3f8e39219ba65b7b591d208ad27c9d04ebc40aa912ef30b48ae15c08cee897d0 not found: ID does not exist" containerID="3f8e39219ba65b7b591d208ad27c9d04ebc40aa912ef30b48ae15c08cee897d0" Nov 27 07:53:49 crc kubenswrapper[4971]: I1127 07:53:49.007279 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f8e39219ba65b7b591d208ad27c9d04ebc40aa912ef30b48ae15c08cee897d0"} err="failed to get container status \"3f8e39219ba65b7b591d208ad27c9d04ebc40aa912ef30b48ae15c08cee897d0\": rpc error: code = NotFound desc = could not find container \"3f8e39219ba65b7b591d208ad27c9d04ebc40aa912ef30b48ae15c08cee897d0\": container with ID starting with 3f8e39219ba65b7b591d208ad27c9d04ebc40aa912ef30b48ae15c08cee897d0 not found: ID does not exist" Nov 27 07:53:49 crc kubenswrapper[4971]: I1127 07:53:49.007323 4971 scope.go:117] "RemoveContainer" containerID="3537061fd83f5696d14c14800e785c64fc58c954491c25e89f3ec46e4d25a359" Nov 27 07:53:49 crc kubenswrapper[4971]: E1127 07:53:49.008575 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3537061fd83f5696d14c14800e785c64fc58c954491c25e89f3ec46e4d25a359\": container with ID starting with 3537061fd83f5696d14c14800e785c64fc58c954491c25e89f3ec46e4d25a359 not found: ID does not exist" containerID="3537061fd83f5696d14c14800e785c64fc58c954491c25e89f3ec46e4d25a359" Nov 27 07:53:49 crc kubenswrapper[4971]: I1127 07:53:49.008621 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3537061fd83f5696d14c14800e785c64fc58c954491c25e89f3ec46e4d25a359"} err="failed to get container status \"3537061fd83f5696d14c14800e785c64fc58c954491c25e89f3ec46e4d25a359\": rpc error: code = NotFound desc = could not find container \"3537061fd83f5696d14c14800e785c64fc58c954491c25e89f3ec46e4d25a359\": container with ID starting with 3537061fd83f5696d14c14800e785c64fc58c954491c25e89f3ec46e4d25a359 not found: ID does not exist" Nov 27 07:53:49 crc kubenswrapper[4971]: I1127 07:53:49.008648 4971 scope.go:117] "RemoveContainer" containerID="76effd6c5633610f3785c13c9d02725b7f498ea783f8ecb9a66f5f9a574e4b79" Nov 27 07:53:49 crc kubenswrapper[4971]: E1127 07:53:49.009484 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76effd6c5633610f3785c13c9d02725b7f498ea783f8ecb9a66f5f9a574e4b79\": container with ID starting with 76effd6c5633610f3785c13c9d02725b7f498ea783f8ecb9a66f5f9a574e4b79 not found: ID does not exist" containerID="76effd6c5633610f3785c13c9d02725b7f498ea783f8ecb9a66f5f9a574e4b79" Nov 27 07:53:49 crc kubenswrapper[4971]: I1127 07:53:49.009548 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76effd6c5633610f3785c13c9d02725b7f498ea783f8ecb9a66f5f9a574e4b79"} err="failed to get container status \"76effd6c5633610f3785c13c9d02725b7f498ea783f8ecb9a66f5f9a574e4b79\": rpc error: code = NotFound desc = could not find container \"76effd6c5633610f3785c13c9d02725b7f498ea783f8ecb9a66f5f9a574e4b79\": container with ID starting with 76effd6c5633610f3785c13c9d02725b7f498ea783f8ecb9a66f5f9a574e4b79 not found: ID does not exist" Nov 27 07:53:50 crc kubenswrapper[4971]: I1127 07:53:50.559985 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83c05e9d-37de-4f86-82e3-9b7a20c70314" path="/var/lib/kubelet/pods/83c05e9d-37de-4f86-82e3-9b7a20c70314/volumes" Nov 27 07:53:52 crc kubenswrapper[4971]: E1127 07:53:52.066328 4971 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda1b77f9d_dd8e_4574_a652_f99d4dfad8ed.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda1b77f9d_dd8e_4574_a652_f99d4dfad8ed.slice/crio-7da17075dd139ac806b3c61cf0ff9eb9effa9620ed2a76f228ccc05e05542097\": RecentStats: unable to find data in memory cache]" Nov 27 07:53:55 crc kubenswrapper[4971]: I1127 07:53:55.550255 4971 scope.go:117] "RemoveContainer" containerID="3a8813f2f14eb2d16fd345841429ed6871b301a5aa14294cc225c677f1f6c863" Nov 27 07:53:55 crc kubenswrapper[4971]: E1127 07:53:55.550815 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:54:02 crc kubenswrapper[4971]: E1127 07:54:02.304303 4971 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda1b77f9d_dd8e_4574_a652_f99d4dfad8ed.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda1b77f9d_dd8e_4574_a652_f99d4dfad8ed.slice/crio-7da17075dd139ac806b3c61cf0ff9eb9effa9620ed2a76f228ccc05e05542097\": RecentStats: unable to find data in memory cache]" Nov 27 07:54:09 crc kubenswrapper[4971]: I1127 07:54:09.550596 4971 scope.go:117] "RemoveContainer" containerID="3a8813f2f14eb2d16fd345841429ed6871b301a5aa14294cc225c677f1f6c863" Nov 27 07:54:09 crc kubenswrapper[4971]: E1127 07:54:09.551299 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:54:12 crc kubenswrapper[4971]: E1127 07:54:12.524955 4971 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda1b77f9d_dd8e_4574_a652_f99d4dfad8ed.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda1b77f9d_dd8e_4574_a652_f99d4dfad8ed.slice/crio-7da17075dd139ac806b3c61cf0ff9eb9effa9620ed2a76f228ccc05e05542097\": RecentStats: unable to find data in memory cache]" Nov 27 07:54:22 crc kubenswrapper[4971]: E1127 07:54:22.733414 4971 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda1b77f9d_dd8e_4574_a652_f99d4dfad8ed.slice/crio-7da17075dd139ac806b3c61cf0ff9eb9effa9620ed2a76f228ccc05e05542097\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda1b77f9d_dd8e_4574_a652_f99d4dfad8ed.slice\": RecentStats: unable to find data in memory cache]" Nov 27 07:54:24 crc kubenswrapper[4971]: I1127 07:54:24.551438 4971 scope.go:117] "RemoveContainer" containerID="3a8813f2f14eb2d16fd345841429ed6871b301a5aa14294cc225c677f1f6c863" Nov 27 07:54:24 crc kubenswrapper[4971]: E1127 07:54:24.552121 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:54:32 crc kubenswrapper[4971]: E1127 07:54:32.973096 4971 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda1b77f9d_dd8e_4574_a652_f99d4dfad8ed.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda1b77f9d_dd8e_4574_a652_f99d4dfad8ed.slice/crio-7da17075dd139ac806b3c61cf0ff9eb9effa9620ed2a76f228ccc05e05542097\": RecentStats: unable to find data in memory cache]" Nov 27 07:54:35 crc kubenswrapper[4971]: I1127 07:54:35.550502 4971 scope.go:117] "RemoveContainer" containerID="3a8813f2f14eb2d16fd345841429ed6871b301a5aa14294cc225c677f1f6c863" Nov 27 07:54:35 crc kubenswrapper[4971]: E1127 07:54:35.551316 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:54:49 crc kubenswrapper[4971]: I1127 07:54:49.550948 4971 scope.go:117] "RemoveContainer" containerID="3a8813f2f14eb2d16fd345841429ed6871b301a5aa14294cc225c677f1f6c863" Nov 27 07:54:49 crc kubenswrapper[4971]: E1127 07:54:49.551811 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:55:03 crc kubenswrapper[4971]: I1127 07:55:03.550467 4971 scope.go:117] "RemoveContainer" containerID="3a8813f2f14eb2d16fd345841429ed6871b301a5aa14294cc225c677f1f6c863" Nov 27 07:55:03 crc kubenswrapper[4971]: E1127 07:55:03.551998 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:55:16 crc kubenswrapper[4971]: I1127 07:55:16.550927 4971 scope.go:117] "RemoveContainer" containerID="3a8813f2f14eb2d16fd345841429ed6871b301a5aa14294cc225c677f1f6c863" Nov 27 07:55:16 crc kubenswrapper[4971]: E1127 07:55:16.552763 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:55:27 crc kubenswrapper[4971]: I1127 07:55:27.549897 4971 scope.go:117] "RemoveContainer" containerID="3a8813f2f14eb2d16fd345841429ed6871b301a5aa14294cc225c677f1f6c863" Nov 27 07:55:27 crc kubenswrapper[4971]: E1127 07:55:27.550697 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:55:42 crc kubenswrapper[4971]: I1127 07:55:42.554572 4971 scope.go:117] "RemoveContainer" containerID="3a8813f2f14eb2d16fd345841429ed6871b301a5aa14294cc225c677f1f6c863" Nov 27 07:55:42 crc kubenswrapper[4971]: E1127 07:55:42.567606 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:55:55 crc kubenswrapper[4971]: I1127 07:55:55.550531 4971 scope.go:117] "RemoveContainer" containerID="3a8813f2f14eb2d16fd345841429ed6871b301a5aa14294cc225c677f1f6c863" Nov 27 07:55:55 crc kubenswrapper[4971]: E1127 07:55:55.551512 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:56:09 crc kubenswrapper[4971]: I1127 07:56:09.551318 4971 scope.go:117] "RemoveContainer" containerID="3a8813f2f14eb2d16fd345841429ed6871b301a5aa14294cc225c677f1f6c863" Nov 27 07:56:09 crc kubenswrapper[4971]: E1127 07:56:09.552179 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:56:24 crc kubenswrapper[4971]: I1127 07:56:24.551059 4971 scope.go:117] "RemoveContainer" containerID="3a8813f2f14eb2d16fd345841429ed6871b301a5aa14294cc225c677f1f6c863" Nov 27 07:56:24 crc kubenswrapper[4971]: E1127 07:56:24.552238 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:56:37 crc kubenswrapper[4971]: I1127 07:56:37.550192 4971 scope.go:117] "RemoveContainer" containerID="3a8813f2f14eb2d16fd345841429ed6871b301a5aa14294cc225c677f1f6c863" Nov 27 07:56:37 crc kubenswrapper[4971]: E1127 07:56:37.550848 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:56:49 crc kubenswrapper[4971]: I1127 07:56:49.550387 4971 scope.go:117] "RemoveContainer" containerID="3a8813f2f14eb2d16fd345841429ed6871b301a5aa14294cc225c677f1f6c863" Nov 27 07:56:49 crc kubenswrapper[4971]: E1127 07:56:49.551361 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:57:01 crc kubenswrapper[4971]: I1127 07:57:01.550395 4971 scope.go:117] "RemoveContainer" containerID="3a8813f2f14eb2d16fd345841429ed6871b301a5aa14294cc225c677f1f6c863" Nov 27 07:57:01 crc kubenswrapper[4971]: E1127 07:57:01.553143 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:57:15 crc kubenswrapper[4971]: I1127 07:57:15.551911 4971 scope.go:117] "RemoveContainer" containerID="3a8813f2f14eb2d16fd345841429ed6871b301a5aa14294cc225c677f1f6c863" Nov 27 07:57:15 crc kubenswrapper[4971]: E1127 07:57:15.553430 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:57:27 crc kubenswrapper[4971]: I1127 07:57:27.550884 4971 scope.go:117] "RemoveContainer" containerID="3a8813f2f14eb2d16fd345841429ed6871b301a5aa14294cc225c677f1f6c863" Nov 27 07:57:27 crc kubenswrapper[4971]: E1127 07:57:27.552092 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:57:42 crc kubenswrapper[4971]: I1127 07:57:42.554225 4971 scope.go:117] "RemoveContainer" containerID="3a8813f2f14eb2d16fd345841429ed6871b301a5aa14294cc225c677f1f6c863" Nov 27 07:57:42 crc kubenswrapper[4971]: E1127 07:57:42.555732 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 07:57:51 crc kubenswrapper[4971]: I1127 07:57:51.236683 4971 scope.go:117] "RemoveContainer" containerID="7fcc71dc11f27c6dab33923f3a0c86388273fc0434eedc0e6627639bab6962c0" Nov 27 07:57:51 crc kubenswrapper[4971]: I1127 07:57:51.260141 4971 scope.go:117] "RemoveContainer" containerID="f3cce2d0b3ac19bf6e54db5bbcecda8b74cb7e74dec3dd5b10b18dc073fd39ea" Nov 27 07:57:51 crc kubenswrapper[4971]: I1127 07:57:51.291324 4971 scope.go:117] "RemoveContainer" containerID="12fbfff4e6042d4e7176df444cf659adffe05ea034b2e7ce7299749b0493c007" Nov 27 07:57:56 crc kubenswrapper[4971]: I1127 07:57:56.551031 4971 scope.go:117] "RemoveContainer" containerID="3a8813f2f14eb2d16fd345841429ed6871b301a5aa14294cc225c677f1f6c863" Nov 27 07:57:57 crc kubenswrapper[4971]: I1127 07:57:57.245584 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"2f816c0bb2be43107714feedadec18053ad453801121cb429b2fc5ed0df8ec95"} Nov 27 07:59:56 crc kubenswrapper[4971]: I1127 07:59:56.413471 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 07:59:56 crc kubenswrapper[4971]: I1127 07:59:56.414254 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 08:00:00 crc kubenswrapper[4971]: I1127 08:00:00.185174 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403840-rbjnc"] Nov 27 08:00:00 crc kubenswrapper[4971]: E1127 08:00:00.186096 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83c05e9d-37de-4f86-82e3-9b7a20c70314" containerName="registry-server" Nov 27 08:00:00 crc kubenswrapper[4971]: I1127 08:00:00.186119 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="83c05e9d-37de-4f86-82e3-9b7a20c70314" containerName="registry-server" Nov 27 08:00:00 crc kubenswrapper[4971]: E1127 08:00:00.186140 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83c05e9d-37de-4f86-82e3-9b7a20c70314" containerName="extract-utilities" Nov 27 08:00:00 crc kubenswrapper[4971]: I1127 08:00:00.186152 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="83c05e9d-37de-4f86-82e3-9b7a20c70314" containerName="extract-utilities" Nov 27 08:00:00 crc kubenswrapper[4971]: E1127 08:00:00.186169 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1b77f9d-dd8e-4574-a652-f99d4dfad8ed" containerName="extract-utilities" Nov 27 08:00:00 crc kubenswrapper[4971]: I1127 08:00:00.186178 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1b77f9d-dd8e-4574-a652-f99d4dfad8ed" containerName="extract-utilities" Nov 27 08:00:00 crc kubenswrapper[4971]: E1127 08:00:00.186200 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1b77f9d-dd8e-4574-a652-f99d4dfad8ed" containerName="registry-server" Nov 27 08:00:00 crc kubenswrapper[4971]: I1127 08:00:00.186208 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1b77f9d-dd8e-4574-a652-f99d4dfad8ed" containerName="registry-server" Nov 27 08:00:00 crc kubenswrapper[4971]: E1127 08:00:00.186224 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1b77f9d-dd8e-4574-a652-f99d4dfad8ed" containerName="extract-content" Nov 27 08:00:00 crc kubenswrapper[4971]: I1127 08:00:00.186231 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1b77f9d-dd8e-4574-a652-f99d4dfad8ed" containerName="extract-content" Nov 27 08:00:00 crc kubenswrapper[4971]: E1127 08:00:00.186249 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83c05e9d-37de-4f86-82e3-9b7a20c70314" containerName="extract-content" Nov 27 08:00:00 crc kubenswrapper[4971]: I1127 08:00:00.186256 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="83c05e9d-37de-4f86-82e3-9b7a20c70314" containerName="extract-content" Nov 27 08:00:00 crc kubenswrapper[4971]: I1127 08:00:00.186422 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1b77f9d-dd8e-4574-a652-f99d4dfad8ed" containerName="registry-server" Nov 27 08:00:00 crc kubenswrapper[4971]: I1127 08:00:00.186447 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="83c05e9d-37de-4f86-82e3-9b7a20c70314" containerName="registry-server" Nov 27 08:00:00 crc kubenswrapper[4971]: I1127 08:00:00.187245 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403840-rbjnc" Nov 27 08:00:00 crc kubenswrapper[4971]: I1127 08:00:00.189671 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 27 08:00:00 crc kubenswrapper[4971]: I1127 08:00:00.190417 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 27 08:00:00 crc kubenswrapper[4971]: I1127 08:00:00.199774 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403840-rbjnc"] Nov 27 08:00:00 crc kubenswrapper[4971]: I1127 08:00:00.311859 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gqdkm\" (UniqueName: \"kubernetes.io/projected/19e3b6ec-43b6-4fa4-81f6-51c5385ae246-kube-api-access-gqdkm\") pod \"collect-profiles-29403840-rbjnc\" (UID: \"19e3b6ec-43b6-4fa4-81f6-51c5385ae246\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403840-rbjnc" Nov 27 08:00:00 crc kubenswrapper[4971]: I1127 08:00:00.312299 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/19e3b6ec-43b6-4fa4-81f6-51c5385ae246-config-volume\") pod \"collect-profiles-29403840-rbjnc\" (UID: \"19e3b6ec-43b6-4fa4-81f6-51c5385ae246\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403840-rbjnc" Nov 27 08:00:00 crc kubenswrapper[4971]: I1127 08:00:00.312404 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/19e3b6ec-43b6-4fa4-81f6-51c5385ae246-secret-volume\") pod \"collect-profiles-29403840-rbjnc\" (UID: \"19e3b6ec-43b6-4fa4-81f6-51c5385ae246\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403840-rbjnc" Nov 27 08:00:00 crc kubenswrapper[4971]: I1127 08:00:00.414320 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/19e3b6ec-43b6-4fa4-81f6-51c5385ae246-config-volume\") pod \"collect-profiles-29403840-rbjnc\" (UID: \"19e3b6ec-43b6-4fa4-81f6-51c5385ae246\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403840-rbjnc" Nov 27 08:00:00 crc kubenswrapper[4971]: I1127 08:00:00.414496 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/19e3b6ec-43b6-4fa4-81f6-51c5385ae246-secret-volume\") pod \"collect-profiles-29403840-rbjnc\" (UID: \"19e3b6ec-43b6-4fa4-81f6-51c5385ae246\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403840-rbjnc" Nov 27 08:00:00 crc kubenswrapper[4971]: I1127 08:00:00.414553 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gqdkm\" (UniqueName: \"kubernetes.io/projected/19e3b6ec-43b6-4fa4-81f6-51c5385ae246-kube-api-access-gqdkm\") pod \"collect-profiles-29403840-rbjnc\" (UID: \"19e3b6ec-43b6-4fa4-81f6-51c5385ae246\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403840-rbjnc" Nov 27 08:00:00 crc kubenswrapper[4971]: I1127 08:00:00.415463 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/19e3b6ec-43b6-4fa4-81f6-51c5385ae246-config-volume\") pod \"collect-profiles-29403840-rbjnc\" (UID: \"19e3b6ec-43b6-4fa4-81f6-51c5385ae246\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403840-rbjnc" Nov 27 08:00:00 crc kubenswrapper[4971]: I1127 08:00:00.428725 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/19e3b6ec-43b6-4fa4-81f6-51c5385ae246-secret-volume\") pod \"collect-profiles-29403840-rbjnc\" (UID: \"19e3b6ec-43b6-4fa4-81f6-51c5385ae246\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403840-rbjnc" Nov 27 08:00:00 crc kubenswrapper[4971]: I1127 08:00:00.435238 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gqdkm\" (UniqueName: \"kubernetes.io/projected/19e3b6ec-43b6-4fa4-81f6-51c5385ae246-kube-api-access-gqdkm\") pod \"collect-profiles-29403840-rbjnc\" (UID: \"19e3b6ec-43b6-4fa4-81f6-51c5385ae246\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403840-rbjnc" Nov 27 08:00:00 crc kubenswrapper[4971]: I1127 08:00:00.512283 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403840-rbjnc" Nov 27 08:00:00 crc kubenswrapper[4971]: I1127 08:00:00.758609 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403840-rbjnc"] Nov 27 08:00:01 crc kubenswrapper[4971]: I1127 08:00:01.255718 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403840-rbjnc" event={"ID":"19e3b6ec-43b6-4fa4-81f6-51c5385ae246","Type":"ContainerStarted","Data":"2b53877f309230a7e50155c125aa08b61674704902686ec28b0f41db04dbc387"} Nov 27 08:00:01 crc kubenswrapper[4971]: I1127 08:00:01.255777 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403840-rbjnc" event={"ID":"19e3b6ec-43b6-4fa4-81f6-51c5385ae246","Type":"ContainerStarted","Data":"919c6cec3e143a274f9982aca1d69c76b26bf6c280c4cf00ca5991ada033905a"} Nov 27 08:00:01 crc kubenswrapper[4971]: I1127 08:00:01.278802 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29403840-rbjnc" podStartSLOduration=1.278779221 podStartE2EDuration="1.278779221s" podCreationTimestamp="2025-11-27 08:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:00:01.274430387 +0000 UTC m=+4039.466474315" watchObservedRunningTime="2025-11-27 08:00:01.278779221 +0000 UTC m=+4039.470823139" Nov 27 08:00:02 crc kubenswrapper[4971]: I1127 08:00:02.271977 4971 generic.go:334] "Generic (PLEG): container finished" podID="19e3b6ec-43b6-4fa4-81f6-51c5385ae246" containerID="2b53877f309230a7e50155c125aa08b61674704902686ec28b0f41db04dbc387" exitCode=0 Nov 27 08:00:02 crc kubenswrapper[4971]: I1127 08:00:02.272423 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403840-rbjnc" event={"ID":"19e3b6ec-43b6-4fa4-81f6-51c5385ae246","Type":"ContainerDied","Data":"2b53877f309230a7e50155c125aa08b61674704902686ec28b0f41db04dbc387"} Nov 27 08:00:03 crc kubenswrapper[4971]: I1127 08:00:03.550249 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403840-rbjnc" Nov 27 08:00:03 crc kubenswrapper[4971]: I1127 08:00:03.685057 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/19e3b6ec-43b6-4fa4-81f6-51c5385ae246-config-volume\") pod \"19e3b6ec-43b6-4fa4-81f6-51c5385ae246\" (UID: \"19e3b6ec-43b6-4fa4-81f6-51c5385ae246\") " Nov 27 08:00:03 crc kubenswrapper[4971]: I1127 08:00:03.685186 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gqdkm\" (UniqueName: \"kubernetes.io/projected/19e3b6ec-43b6-4fa4-81f6-51c5385ae246-kube-api-access-gqdkm\") pod \"19e3b6ec-43b6-4fa4-81f6-51c5385ae246\" (UID: \"19e3b6ec-43b6-4fa4-81f6-51c5385ae246\") " Nov 27 08:00:03 crc kubenswrapper[4971]: I1127 08:00:03.685802 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/19e3b6ec-43b6-4fa4-81f6-51c5385ae246-config-volume" (OuterVolumeSpecName: "config-volume") pod "19e3b6ec-43b6-4fa4-81f6-51c5385ae246" (UID: "19e3b6ec-43b6-4fa4-81f6-51c5385ae246"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:00:03 crc kubenswrapper[4971]: I1127 08:00:03.685910 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/19e3b6ec-43b6-4fa4-81f6-51c5385ae246-secret-volume\") pod \"19e3b6ec-43b6-4fa4-81f6-51c5385ae246\" (UID: \"19e3b6ec-43b6-4fa4-81f6-51c5385ae246\") " Nov 27 08:00:03 crc kubenswrapper[4971]: I1127 08:00:03.686882 4971 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/19e3b6ec-43b6-4fa4-81f6-51c5385ae246-config-volume\") on node \"crc\" DevicePath \"\"" Nov 27 08:00:03 crc kubenswrapper[4971]: I1127 08:00:03.690150 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19e3b6ec-43b6-4fa4-81f6-51c5385ae246-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "19e3b6ec-43b6-4fa4-81f6-51c5385ae246" (UID: "19e3b6ec-43b6-4fa4-81f6-51c5385ae246"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:00:03 crc kubenswrapper[4971]: I1127 08:00:03.690691 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19e3b6ec-43b6-4fa4-81f6-51c5385ae246-kube-api-access-gqdkm" (OuterVolumeSpecName: "kube-api-access-gqdkm") pod "19e3b6ec-43b6-4fa4-81f6-51c5385ae246" (UID: "19e3b6ec-43b6-4fa4-81f6-51c5385ae246"). InnerVolumeSpecName "kube-api-access-gqdkm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:00:03 crc kubenswrapper[4971]: I1127 08:00:03.787899 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gqdkm\" (UniqueName: \"kubernetes.io/projected/19e3b6ec-43b6-4fa4-81f6-51c5385ae246-kube-api-access-gqdkm\") on node \"crc\" DevicePath \"\"" Nov 27 08:00:03 crc kubenswrapper[4971]: I1127 08:00:03.787942 4971 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/19e3b6ec-43b6-4fa4-81f6-51c5385ae246-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 27 08:00:04 crc kubenswrapper[4971]: I1127 08:00:04.291158 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403840-rbjnc" event={"ID":"19e3b6ec-43b6-4fa4-81f6-51c5385ae246","Type":"ContainerDied","Data":"919c6cec3e143a274f9982aca1d69c76b26bf6c280c4cf00ca5991ada033905a"} Nov 27 08:00:04 crc kubenswrapper[4971]: I1127 08:00:04.291628 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="919c6cec3e143a274f9982aca1d69c76b26bf6c280c4cf00ca5991ada033905a" Nov 27 08:00:04 crc kubenswrapper[4971]: I1127 08:00:04.291272 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403840-rbjnc" Nov 27 08:00:04 crc kubenswrapper[4971]: I1127 08:00:04.370042 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403795-sb5ts"] Nov 27 08:00:04 crc kubenswrapper[4971]: I1127 08:00:04.375171 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403795-sb5ts"] Nov 27 08:00:04 crc kubenswrapper[4971]: I1127 08:00:04.561469 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8111ff30-6af9-4f0c-95cf-6dc2460ff4bb" path="/var/lib/kubelet/pods/8111ff30-6af9-4f0c-95cf-6dc2460ff4bb/volumes" Nov 27 08:00:26 crc kubenswrapper[4971]: I1127 08:00:26.413008 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 08:00:26 crc kubenswrapper[4971]: I1127 08:00:26.413798 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 08:00:51 crc kubenswrapper[4971]: I1127 08:00:51.377431 4971 scope.go:117] "RemoveContainer" containerID="06e5c4798cbcadbeb23060dff3a163709767f5cc0d898def21a29e52aacb95b5" Nov 27 08:00:56 crc kubenswrapper[4971]: I1127 08:00:56.412975 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 08:00:56 crc kubenswrapper[4971]: I1127 08:00:56.414714 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 08:00:56 crc kubenswrapper[4971]: I1127 08:00:56.414848 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 08:00:56 crc kubenswrapper[4971]: I1127 08:00:56.415415 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2f816c0bb2be43107714feedadec18053ad453801121cb429b2fc5ed0df8ec95"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 08:00:56 crc kubenswrapper[4971]: I1127 08:00:56.415575 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://2f816c0bb2be43107714feedadec18053ad453801121cb429b2fc5ed0df8ec95" gracePeriod=600 Nov 27 08:00:56 crc kubenswrapper[4971]: I1127 08:00:56.785255 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="2f816c0bb2be43107714feedadec18053ad453801121cb429b2fc5ed0df8ec95" exitCode=0 Nov 27 08:00:56 crc kubenswrapper[4971]: I1127 08:00:56.785314 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"2f816c0bb2be43107714feedadec18053ad453801121cb429b2fc5ed0df8ec95"} Nov 27 08:00:56 crc kubenswrapper[4971]: I1127 08:00:56.785357 4971 scope.go:117] "RemoveContainer" containerID="3a8813f2f14eb2d16fd345841429ed6871b301a5aa14294cc225c677f1f6c863" Nov 27 08:00:57 crc kubenswrapper[4971]: I1127 08:00:57.797183 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"68fab616a248a14b593a899bbe38d7ef12b8321e0ca1a445a290e3c1dbfedc2f"} Nov 27 08:01:00 crc kubenswrapper[4971]: I1127 08:01:00.083030 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-b2gn9"] Nov 27 08:01:00 crc kubenswrapper[4971]: E1127 08:01:00.083824 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19e3b6ec-43b6-4fa4-81f6-51c5385ae246" containerName="collect-profiles" Nov 27 08:01:00 crc kubenswrapper[4971]: I1127 08:01:00.083840 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="19e3b6ec-43b6-4fa4-81f6-51c5385ae246" containerName="collect-profiles" Nov 27 08:01:00 crc kubenswrapper[4971]: I1127 08:01:00.084012 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="19e3b6ec-43b6-4fa4-81f6-51c5385ae246" containerName="collect-profiles" Nov 27 08:01:00 crc kubenswrapper[4971]: I1127 08:01:00.085224 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b2gn9" Nov 27 08:01:00 crc kubenswrapper[4971]: I1127 08:01:00.093689 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-b2gn9"] Nov 27 08:01:00 crc kubenswrapper[4971]: I1127 08:01:00.190011 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab0c4abc-8296-4062-a49b-adc982915a0a-catalog-content\") pod \"redhat-operators-b2gn9\" (UID: \"ab0c4abc-8296-4062-a49b-adc982915a0a\") " pod="openshift-marketplace/redhat-operators-b2gn9" Nov 27 08:01:00 crc kubenswrapper[4971]: I1127 08:01:00.190063 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab0c4abc-8296-4062-a49b-adc982915a0a-utilities\") pod \"redhat-operators-b2gn9\" (UID: \"ab0c4abc-8296-4062-a49b-adc982915a0a\") " pod="openshift-marketplace/redhat-operators-b2gn9" Nov 27 08:01:00 crc kubenswrapper[4971]: I1127 08:01:00.190170 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6nqq\" (UniqueName: \"kubernetes.io/projected/ab0c4abc-8296-4062-a49b-adc982915a0a-kube-api-access-f6nqq\") pod \"redhat-operators-b2gn9\" (UID: \"ab0c4abc-8296-4062-a49b-adc982915a0a\") " pod="openshift-marketplace/redhat-operators-b2gn9" Nov 27 08:01:00 crc kubenswrapper[4971]: I1127 08:01:00.291441 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab0c4abc-8296-4062-a49b-adc982915a0a-catalog-content\") pod \"redhat-operators-b2gn9\" (UID: \"ab0c4abc-8296-4062-a49b-adc982915a0a\") " pod="openshift-marketplace/redhat-operators-b2gn9" Nov 27 08:01:00 crc kubenswrapper[4971]: I1127 08:01:00.291513 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab0c4abc-8296-4062-a49b-adc982915a0a-utilities\") pod \"redhat-operators-b2gn9\" (UID: \"ab0c4abc-8296-4062-a49b-adc982915a0a\") " pod="openshift-marketplace/redhat-operators-b2gn9" Nov 27 08:01:00 crc kubenswrapper[4971]: I1127 08:01:00.291628 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6nqq\" (UniqueName: \"kubernetes.io/projected/ab0c4abc-8296-4062-a49b-adc982915a0a-kube-api-access-f6nqq\") pod \"redhat-operators-b2gn9\" (UID: \"ab0c4abc-8296-4062-a49b-adc982915a0a\") " pod="openshift-marketplace/redhat-operators-b2gn9" Nov 27 08:01:00 crc kubenswrapper[4971]: I1127 08:01:00.292325 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab0c4abc-8296-4062-a49b-adc982915a0a-catalog-content\") pod \"redhat-operators-b2gn9\" (UID: \"ab0c4abc-8296-4062-a49b-adc982915a0a\") " pod="openshift-marketplace/redhat-operators-b2gn9" Nov 27 08:01:00 crc kubenswrapper[4971]: I1127 08:01:00.292406 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab0c4abc-8296-4062-a49b-adc982915a0a-utilities\") pod \"redhat-operators-b2gn9\" (UID: \"ab0c4abc-8296-4062-a49b-adc982915a0a\") " pod="openshift-marketplace/redhat-operators-b2gn9" Nov 27 08:01:00 crc kubenswrapper[4971]: I1127 08:01:00.324474 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6nqq\" (UniqueName: \"kubernetes.io/projected/ab0c4abc-8296-4062-a49b-adc982915a0a-kube-api-access-f6nqq\") pod \"redhat-operators-b2gn9\" (UID: \"ab0c4abc-8296-4062-a49b-adc982915a0a\") " pod="openshift-marketplace/redhat-operators-b2gn9" Nov 27 08:01:00 crc kubenswrapper[4971]: I1127 08:01:00.416479 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b2gn9" Nov 27 08:01:00 crc kubenswrapper[4971]: I1127 08:01:00.665831 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-b2gn9"] Nov 27 08:01:00 crc kubenswrapper[4971]: W1127 08:01:00.675173 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podab0c4abc_8296_4062_a49b_adc982915a0a.slice/crio-6b0c8b2635a6fb36798fa8ffe04bbca97672bf69cab75dcbae4400d402d3591a WatchSource:0}: Error finding container 6b0c8b2635a6fb36798fa8ffe04bbca97672bf69cab75dcbae4400d402d3591a: Status 404 returned error can't find the container with id 6b0c8b2635a6fb36798fa8ffe04bbca97672bf69cab75dcbae4400d402d3591a Nov 27 08:01:00 crc kubenswrapper[4971]: I1127 08:01:00.840507 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b2gn9" event={"ID":"ab0c4abc-8296-4062-a49b-adc982915a0a","Type":"ContainerStarted","Data":"6b0c8b2635a6fb36798fa8ffe04bbca97672bf69cab75dcbae4400d402d3591a"} Nov 27 08:01:01 crc kubenswrapper[4971]: I1127 08:01:01.852455 4971 generic.go:334] "Generic (PLEG): container finished" podID="ab0c4abc-8296-4062-a49b-adc982915a0a" containerID="4a97fdb1d55f68ec2e4edf3d480619dbf41d0704d3c4fbc3375369c8099ba9bb" exitCode=0 Nov 27 08:01:01 crc kubenswrapper[4971]: I1127 08:01:01.852562 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b2gn9" event={"ID":"ab0c4abc-8296-4062-a49b-adc982915a0a","Type":"ContainerDied","Data":"4a97fdb1d55f68ec2e4edf3d480619dbf41d0704d3c4fbc3375369c8099ba9bb"} Nov 27 08:01:01 crc kubenswrapper[4971]: I1127 08:01:01.855193 4971 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 27 08:01:03 crc kubenswrapper[4971]: I1127 08:01:03.874316 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b2gn9" event={"ID":"ab0c4abc-8296-4062-a49b-adc982915a0a","Type":"ContainerStarted","Data":"a088941b87521fdccb9d277c368098b340e4ab4619789d263f25d6cdb642070b"} Nov 27 08:01:04 crc kubenswrapper[4971]: I1127 08:01:04.884922 4971 generic.go:334] "Generic (PLEG): container finished" podID="ab0c4abc-8296-4062-a49b-adc982915a0a" containerID="a088941b87521fdccb9d277c368098b340e4ab4619789d263f25d6cdb642070b" exitCode=0 Nov 27 08:01:04 crc kubenswrapper[4971]: I1127 08:01:04.885020 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b2gn9" event={"ID":"ab0c4abc-8296-4062-a49b-adc982915a0a","Type":"ContainerDied","Data":"a088941b87521fdccb9d277c368098b340e4ab4619789d263f25d6cdb642070b"} Nov 27 08:01:05 crc kubenswrapper[4971]: I1127 08:01:05.900959 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b2gn9" event={"ID":"ab0c4abc-8296-4062-a49b-adc982915a0a","Type":"ContainerStarted","Data":"4434a3df3a46245ee74032326a68d82d660eb6c6e417dfc7b6707707cbdca4dc"} Nov 27 08:01:10 crc kubenswrapper[4971]: I1127 08:01:10.416700 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-b2gn9" Nov 27 08:01:10 crc kubenswrapper[4971]: I1127 08:01:10.417433 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-b2gn9" Nov 27 08:01:11 crc kubenswrapper[4971]: I1127 08:01:11.468624 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-b2gn9" podUID="ab0c4abc-8296-4062-a49b-adc982915a0a" containerName="registry-server" probeResult="failure" output=< Nov 27 08:01:11 crc kubenswrapper[4971]: timeout: failed to connect service ":50051" within 1s Nov 27 08:01:11 crc kubenswrapper[4971]: > Nov 27 08:01:20 crc kubenswrapper[4971]: I1127 08:01:20.503159 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-b2gn9" Nov 27 08:01:20 crc kubenswrapper[4971]: I1127 08:01:20.527188 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-b2gn9" podStartSLOduration=17.003730087 podStartE2EDuration="20.527164067s" podCreationTimestamp="2025-11-27 08:01:00 +0000 UTC" firstStartedPulling="2025-11-27 08:01:01.854872022 +0000 UTC m=+4100.046915940" lastFinishedPulling="2025-11-27 08:01:05.378306002 +0000 UTC m=+4103.570349920" observedRunningTime="2025-11-27 08:01:05.925479722 +0000 UTC m=+4104.117523690" watchObservedRunningTime="2025-11-27 08:01:20.527164067 +0000 UTC m=+4118.719207985" Nov 27 08:01:20 crc kubenswrapper[4971]: I1127 08:01:20.565557 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-b2gn9" Nov 27 08:01:20 crc kubenswrapper[4971]: I1127 08:01:20.751149 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-b2gn9"] Nov 27 08:01:22 crc kubenswrapper[4971]: I1127 08:01:22.036894 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-b2gn9" podUID="ab0c4abc-8296-4062-a49b-adc982915a0a" containerName="registry-server" containerID="cri-o://4434a3df3a46245ee74032326a68d82d660eb6c6e417dfc7b6707707cbdca4dc" gracePeriod=2 Nov 27 08:01:22 crc kubenswrapper[4971]: I1127 08:01:22.418402 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b2gn9" Nov 27 08:01:22 crc kubenswrapper[4971]: I1127 08:01:22.483728 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f6nqq\" (UniqueName: \"kubernetes.io/projected/ab0c4abc-8296-4062-a49b-adc982915a0a-kube-api-access-f6nqq\") pod \"ab0c4abc-8296-4062-a49b-adc982915a0a\" (UID: \"ab0c4abc-8296-4062-a49b-adc982915a0a\") " Nov 27 08:01:22 crc kubenswrapper[4971]: I1127 08:01:22.483787 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab0c4abc-8296-4062-a49b-adc982915a0a-catalog-content\") pod \"ab0c4abc-8296-4062-a49b-adc982915a0a\" (UID: \"ab0c4abc-8296-4062-a49b-adc982915a0a\") " Nov 27 08:01:22 crc kubenswrapper[4971]: I1127 08:01:22.483826 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab0c4abc-8296-4062-a49b-adc982915a0a-utilities\") pod \"ab0c4abc-8296-4062-a49b-adc982915a0a\" (UID: \"ab0c4abc-8296-4062-a49b-adc982915a0a\") " Nov 27 08:01:22 crc kubenswrapper[4971]: I1127 08:01:22.485103 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab0c4abc-8296-4062-a49b-adc982915a0a-utilities" (OuterVolumeSpecName: "utilities") pod "ab0c4abc-8296-4062-a49b-adc982915a0a" (UID: "ab0c4abc-8296-4062-a49b-adc982915a0a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:01:22 crc kubenswrapper[4971]: I1127 08:01:22.492610 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab0c4abc-8296-4062-a49b-adc982915a0a-kube-api-access-f6nqq" (OuterVolumeSpecName: "kube-api-access-f6nqq") pod "ab0c4abc-8296-4062-a49b-adc982915a0a" (UID: "ab0c4abc-8296-4062-a49b-adc982915a0a"). InnerVolumeSpecName "kube-api-access-f6nqq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:01:22 crc kubenswrapper[4971]: I1127 08:01:22.588323 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f6nqq\" (UniqueName: \"kubernetes.io/projected/ab0c4abc-8296-4062-a49b-adc982915a0a-kube-api-access-f6nqq\") on node \"crc\" DevicePath \"\"" Nov 27 08:01:22 crc kubenswrapper[4971]: I1127 08:01:22.588377 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab0c4abc-8296-4062-a49b-adc982915a0a-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 08:01:22 crc kubenswrapper[4971]: I1127 08:01:22.602454 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab0c4abc-8296-4062-a49b-adc982915a0a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ab0c4abc-8296-4062-a49b-adc982915a0a" (UID: "ab0c4abc-8296-4062-a49b-adc982915a0a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:01:22 crc kubenswrapper[4971]: I1127 08:01:22.689824 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab0c4abc-8296-4062-a49b-adc982915a0a-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 08:01:23 crc kubenswrapper[4971]: I1127 08:01:23.050691 4971 generic.go:334] "Generic (PLEG): container finished" podID="ab0c4abc-8296-4062-a49b-adc982915a0a" containerID="4434a3df3a46245ee74032326a68d82d660eb6c6e417dfc7b6707707cbdca4dc" exitCode=0 Nov 27 08:01:23 crc kubenswrapper[4971]: I1127 08:01:23.050808 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b2gn9" Nov 27 08:01:23 crc kubenswrapper[4971]: I1127 08:01:23.050822 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b2gn9" event={"ID":"ab0c4abc-8296-4062-a49b-adc982915a0a","Type":"ContainerDied","Data":"4434a3df3a46245ee74032326a68d82d660eb6c6e417dfc7b6707707cbdca4dc"} Nov 27 08:01:23 crc kubenswrapper[4971]: I1127 08:01:23.051338 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b2gn9" event={"ID":"ab0c4abc-8296-4062-a49b-adc982915a0a","Type":"ContainerDied","Data":"6b0c8b2635a6fb36798fa8ffe04bbca97672bf69cab75dcbae4400d402d3591a"} Nov 27 08:01:23 crc kubenswrapper[4971]: I1127 08:01:23.051375 4971 scope.go:117] "RemoveContainer" containerID="4434a3df3a46245ee74032326a68d82d660eb6c6e417dfc7b6707707cbdca4dc" Nov 27 08:01:23 crc kubenswrapper[4971]: I1127 08:01:23.096513 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-b2gn9"] Nov 27 08:01:23 crc kubenswrapper[4971]: I1127 08:01:23.102185 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-b2gn9"] Nov 27 08:01:23 crc kubenswrapper[4971]: I1127 08:01:23.102969 4971 scope.go:117] "RemoveContainer" containerID="a088941b87521fdccb9d277c368098b340e4ab4619789d263f25d6cdb642070b" Nov 27 08:01:23 crc kubenswrapper[4971]: I1127 08:01:23.127116 4971 scope.go:117] "RemoveContainer" containerID="4a97fdb1d55f68ec2e4edf3d480619dbf41d0704d3c4fbc3375369c8099ba9bb" Nov 27 08:01:23 crc kubenswrapper[4971]: I1127 08:01:23.157776 4971 scope.go:117] "RemoveContainer" containerID="4434a3df3a46245ee74032326a68d82d660eb6c6e417dfc7b6707707cbdca4dc" Nov 27 08:01:23 crc kubenswrapper[4971]: E1127 08:01:23.158521 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4434a3df3a46245ee74032326a68d82d660eb6c6e417dfc7b6707707cbdca4dc\": container with ID starting with 4434a3df3a46245ee74032326a68d82d660eb6c6e417dfc7b6707707cbdca4dc not found: ID does not exist" containerID="4434a3df3a46245ee74032326a68d82d660eb6c6e417dfc7b6707707cbdca4dc" Nov 27 08:01:23 crc kubenswrapper[4971]: I1127 08:01:23.158600 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4434a3df3a46245ee74032326a68d82d660eb6c6e417dfc7b6707707cbdca4dc"} err="failed to get container status \"4434a3df3a46245ee74032326a68d82d660eb6c6e417dfc7b6707707cbdca4dc\": rpc error: code = NotFound desc = could not find container \"4434a3df3a46245ee74032326a68d82d660eb6c6e417dfc7b6707707cbdca4dc\": container with ID starting with 4434a3df3a46245ee74032326a68d82d660eb6c6e417dfc7b6707707cbdca4dc not found: ID does not exist" Nov 27 08:01:23 crc kubenswrapper[4971]: I1127 08:01:23.158639 4971 scope.go:117] "RemoveContainer" containerID="a088941b87521fdccb9d277c368098b340e4ab4619789d263f25d6cdb642070b" Nov 27 08:01:23 crc kubenswrapper[4971]: E1127 08:01:23.159232 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a088941b87521fdccb9d277c368098b340e4ab4619789d263f25d6cdb642070b\": container with ID starting with a088941b87521fdccb9d277c368098b340e4ab4619789d263f25d6cdb642070b not found: ID does not exist" containerID="a088941b87521fdccb9d277c368098b340e4ab4619789d263f25d6cdb642070b" Nov 27 08:01:23 crc kubenswrapper[4971]: I1127 08:01:23.159264 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a088941b87521fdccb9d277c368098b340e4ab4619789d263f25d6cdb642070b"} err="failed to get container status \"a088941b87521fdccb9d277c368098b340e4ab4619789d263f25d6cdb642070b\": rpc error: code = NotFound desc = could not find container \"a088941b87521fdccb9d277c368098b340e4ab4619789d263f25d6cdb642070b\": container with ID starting with a088941b87521fdccb9d277c368098b340e4ab4619789d263f25d6cdb642070b not found: ID does not exist" Nov 27 08:01:23 crc kubenswrapper[4971]: I1127 08:01:23.159283 4971 scope.go:117] "RemoveContainer" containerID="4a97fdb1d55f68ec2e4edf3d480619dbf41d0704d3c4fbc3375369c8099ba9bb" Nov 27 08:01:23 crc kubenswrapper[4971]: E1127 08:01:23.159961 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a97fdb1d55f68ec2e4edf3d480619dbf41d0704d3c4fbc3375369c8099ba9bb\": container with ID starting with 4a97fdb1d55f68ec2e4edf3d480619dbf41d0704d3c4fbc3375369c8099ba9bb not found: ID does not exist" containerID="4a97fdb1d55f68ec2e4edf3d480619dbf41d0704d3c4fbc3375369c8099ba9bb" Nov 27 08:01:23 crc kubenswrapper[4971]: I1127 08:01:23.160019 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a97fdb1d55f68ec2e4edf3d480619dbf41d0704d3c4fbc3375369c8099ba9bb"} err="failed to get container status \"4a97fdb1d55f68ec2e4edf3d480619dbf41d0704d3c4fbc3375369c8099ba9bb\": rpc error: code = NotFound desc = could not find container \"4a97fdb1d55f68ec2e4edf3d480619dbf41d0704d3c4fbc3375369c8099ba9bb\": container with ID starting with 4a97fdb1d55f68ec2e4edf3d480619dbf41d0704d3c4fbc3375369c8099ba9bb not found: ID does not exist" Nov 27 08:01:24 crc kubenswrapper[4971]: I1127 08:01:24.560832 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab0c4abc-8296-4062-a49b-adc982915a0a" path="/var/lib/kubelet/pods/ab0c4abc-8296-4062-a49b-adc982915a0a/volumes" Nov 27 08:02:36 crc kubenswrapper[4971]: I1127 08:02:36.214358 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-np4kj"] Nov 27 08:02:36 crc kubenswrapper[4971]: E1127 08:02:36.215771 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab0c4abc-8296-4062-a49b-adc982915a0a" containerName="extract-content" Nov 27 08:02:36 crc kubenswrapper[4971]: I1127 08:02:36.215798 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab0c4abc-8296-4062-a49b-adc982915a0a" containerName="extract-content" Nov 27 08:02:36 crc kubenswrapper[4971]: E1127 08:02:36.215848 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab0c4abc-8296-4062-a49b-adc982915a0a" containerName="extract-utilities" Nov 27 08:02:36 crc kubenswrapper[4971]: I1127 08:02:36.215857 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab0c4abc-8296-4062-a49b-adc982915a0a" containerName="extract-utilities" Nov 27 08:02:36 crc kubenswrapper[4971]: E1127 08:02:36.215887 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab0c4abc-8296-4062-a49b-adc982915a0a" containerName="registry-server" Nov 27 08:02:36 crc kubenswrapper[4971]: I1127 08:02:36.215894 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab0c4abc-8296-4062-a49b-adc982915a0a" containerName="registry-server" Nov 27 08:02:36 crc kubenswrapper[4971]: I1127 08:02:36.216101 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab0c4abc-8296-4062-a49b-adc982915a0a" containerName="registry-server" Nov 27 08:02:36 crc kubenswrapper[4971]: I1127 08:02:36.217556 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-np4kj" Nov 27 08:02:36 crc kubenswrapper[4971]: I1127 08:02:36.229827 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-np4kj"] Nov 27 08:02:36 crc kubenswrapper[4971]: I1127 08:02:36.371802 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/047e5a2f-96f7-4368-a26b-9692b2d3419e-utilities\") pod \"certified-operators-np4kj\" (UID: \"047e5a2f-96f7-4368-a26b-9692b2d3419e\") " pod="openshift-marketplace/certified-operators-np4kj" Nov 27 08:02:36 crc kubenswrapper[4971]: I1127 08:02:36.372266 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/047e5a2f-96f7-4368-a26b-9692b2d3419e-catalog-content\") pod \"certified-operators-np4kj\" (UID: \"047e5a2f-96f7-4368-a26b-9692b2d3419e\") " pod="openshift-marketplace/certified-operators-np4kj" Nov 27 08:02:36 crc kubenswrapper[4971]: I1127 08:02:36.372331 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7snbl\" (UniqueName: \"kubernetes.io/projected/047e5a2f-96f7-4368-a26b-9692b2d3419e-kube-api-access-7snbl\") pod \"certified-operators-np4kj\" (UID: \"047e5a2f-96f7-4368-a26b-9692b2d3419e\") " pod="openshift-marketplace/certified-operators-np4kj" Nov 27 08:02:36 crc kubenswrapper[4971]: I1127 08:02:36.474245 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/047e5a2f-96f7-4368-a26b-9692b2d3419e-utilities\") pod \"certified-operators-np4kj\" (UID: \"047e5a2f-96f7-4368-a26b-9692b2d3419e\") " pod="openshift-marketplace/certified-operators-np4kj" Nov 27 08:02:36 crc kubenswrapper[4971]: I1127 08:02:36.474318 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/047e5a2f-96f7-4368-a26b-9692b2d3419e-catalog-content\") pod \"certified-operators-np4kj\" (UID: \"047e5a2f-96f7-4368-a26b-9692b2d3419e\") " pod="openshift-marketplace/certified-operators-np4kj" Nov 27 08:02:36 crc kubenswrapper[4971]: I1127 08:02:36.474395 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7snbl\" (UniqueName: \"kubernetes.io/projected/047e5a2f-96f7-4368-a26b-9692b2d3419e-kube-api-access-7snbl\") pod \"certified-operators-np4kj\" (UID: \"047e5a2f-96f7-4368-a26b-9692b2d3419e\") " pod="openshift-marketplace/certified-operators-np4kj" Nov 27 08:02:36 crc kubenswrapper[4971]: I1127 08:02:36.475471 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/047e5a2f-96f7-4368-a26b-9692b2d3419e-catalog-content\") pod \"certified-operators-np4kj\" (UID: \"047e5a2f-96f7-4368-a26b-9692b2d3419e\") " pod="openshift-marketplace/certified-operators-np4kj" Nov 27 08:02:36 crc kubenswrapper[4971]: I1127 08:02:36.475488 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/047e5a2f-96f7-4368-a26b-9692b2d3419e-utilities\") pod \"certified-operators-np4kj\" (UID: \"047e5a2f-96f7-4368-a26b-9692b2d3419e\") " pod="openshift-marketplace/certified-operators-np4kj" Nov 27 08:02:36 crc kubenswrapper[4971]: I1127 08:02:36.500489 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7snbl\" (UniqueName: \"kubernetes.io/projected/047e5a2f-96f7-4368-a26b-9692b2d3419e-kube-api-access-7snbl\") pod \"certified-operators-np4kj\" (UID: \"047e5a2f-96f7-4368-a26b-9692b2d3419e\") " pod="openshift-marketplace/certified-operators-np4kj" Nov 27 08:02:36 crc kubenswrapper[4971]: I1127 08:02:36.547055 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-np4kj" Nov 27 08:02:36 crc kubenswrapper[4971]: I1127 08:02:36.880379 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-np4kj"] Nov 27 08:02:37 crc kubenswrapper[4971]: I1127 08:02:37.747958 4971 generic.go:334] "Generic (PLEG): container finished" podID="047e5a2f-96f7-4368-a26b-9692b2d3419e" containerID="8c93cd5c78360179bb11417dae7cf22a8647680f52880a3d69972bcf01b98150" exitCode=0 Nov 27 08:02:37 crc kubenswrapper[4971]: I1127 08:02:37.748031 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-np4kj" event={"ID":"047e5a2f-96f7-4368-a26b-9692b2d3419e","Type":"ContainerDied","Data":"8c93cd5c78360179bb11417dae7cf22a8647680f52880a3d69972bcf01b98150"} Nov 27 08:02:37 crc kubenswrapper[4971]: I1127 08:02:37.748753 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-np4kj" event={"ID":"047e5a2f-96f7-4368-a26b-9692b2d3419e","Type":"ContainerStarted","Data":"7efc0737e791e79e01e87480658367eb2e77c5a24f9dad87751fde3478365d32"} Nov 27 08:02:41 crc kubenswrapper[4971]: I1127 08:02:41.791018 4971 generic.go:334] "Generic (PLEG): container finished" podID="047e5a2f-96f7-4368-a26b-9692b2d3419e" containerID="0e995a4cd07d813f8fcb0c6af8ec992e6444a3694991ee2130a449ec0020b09d" exitCode=0 Nov 27 08:02:41 crc kubenswrapper[4971]: I1127 08:02:41.791125 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-np4kj" event={"ID":"047e5a2f-96f7-4368-a26b-9692b2d3419e","Type":"ContainerDied","Data":"0e995a4cd07d813f8fcb0c6af8ec992e6444a3694991ee2130a449ec0020b09d"} Nov 27 08:02:42 crc kubenswrapper[4971]: I1127 08:02:42.803599 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-np4kj" event={"ID":"047e5a2f-96f7-4368-a26b-9692b2d3419e","Type":"ContainerStarted","Data":"9431eb86553b3f8153086637a4dd761aef59549053f7bd489333bbd2071ab1b3"} Nov 27 08:02:42 crc kubenswrapper[4971]: I1127 08:02:42.831864 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-np4kj" podStartSLOduration=2.210757369 podStartE2EDuration="6.831840623s" podCreationTimestamp="2025-11-27 08:02:36 +0000 UTC" firstStartedPulling="2025-11-27 08:02:37.750393474 +0000 UTC m=+4195.942437392" lastFinishedPulling="2025-11-27 08:02:42.371476718 +0000 UTC m=+4200.563520646" observedRunningTime="2025-11-27 08:02:42.825142042 +0000 UTC m=+4201.017185980" watchObservedRunningTime="2025-11-27 08:02:42.831840623 +0000 UTC m=+4201.023884531" Nov 27 08:02:46 crc kubenswrapper[4971]: I1127 08:02:46.547891 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-np4kj" Nov 27 08:02:46 crc kubenswrapper[4971]: I1127 08:02:46.548709 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-np4kj" Nov 27 08:02:46 crc kubenswrapper[4971]: I1127 08:02:46.597861 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-np4kj" Nov 27 08:02:56 crc kubenswrapper[4971]: I1127 08:02:56.599252 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-np4kj" Nov 27 08:02:56 crc kubenswrapper[4971]: I1127 08:02:56.668139 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-np4kj"] Nov 27 08:02:56 crc kubenswrapper[4971]: I1127 08:02:56.712886 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-pthvj"] Nov 27 08:02:56 crc kubenswrapper[4971]: I1127 08:02:56.713978 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-pthvj" podUID="e22bb0db-bf56-410d-8f07-3b6e9c99b15e" containerName="registry-server" containerID="cri-o://dbf8a69731d3c9ea03293cdc4b2ef56d962ea08885ac7327c8b40e0993323a59" gracePeriod=2 Nov 27 08:02:56 crc kubenswrapper[4971]: I1127 08:02:56.963776 4971 generic.go:334] "Generic (PLEG): container finished" podID="e22bb0db-bf56-410d-8f07-3b6e9c99b15e" containerID="dbf8a69731d3c9ea03293cdc4b2ef56d962ea08885ac7327c8b40e0993323a59" exitCode=0 Nov 27 08:02:56 crc kubenswrapper[4971]: I1127 08:02:56.963929 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pthvj" event={"ID":"e22bb0db-bf56-410d-8f07-3b6e9c99b15e","Type":"ContainerDied","Data":"dbf8a69731d3c9ea03293cdc4b2ef56d962ea08885ac7327c8b40e0993323a59"} Nov 27 08:02:57 crc kubenswrapper[4971]: I1127 08:02:57.153062 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pthvj" Nov 27 08:02:57 crc kubenswrapper[4971]: I1127 08:02:57.256550 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e22bb0db-bf56-410d-8f07-3b6e9c99b15e-utilities\") pod \"e22bb0db-bf56-410d-8f07-3b6e9c99b15e\" (UID: \"e22bb0db-bf56-410d-8f07-3b6e9c99b15e\") " Nov 27 08:02:57 crc kubenswrapper[4971]: I1127 08:02:57.257013 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rn994\" (UniqueName: \"kubernetes.io/projected/e22bb0db-bf56-410d-8f07-3b6e9c99b15e-kube-api-access-rn994\") pod \"e22bb0db-bf56-410d-8f07-3b6e9c99b15e\" (UID: \"e22bb0db-bf56-410d-8f07-3b6e9c99b15e\") " Nov 27 08:02:57 crc kubenswrapper[4971]: I1127 08:02:57.257234 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e22bb0db-bf56-410d-8f07-3b6e9c99b15e-catalog-content\") pod \"e22bb0db-bf56-410d-8f07-3b6e9c99b15e\" (UID: \"e22bb0db-bf56-410d-8f07-3b6e9c99b15e\") " Nov 27 08:02:57 crc kubenswrapper[4971]: I1127 08:02:57.259255 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e22bb0db-bf56-410d-8f07-3b6e9c99b15e-utilities" (OuterVolumeSpecName: "utilities") pod "e22bb0db-bf56-410d-8f07-3b6e9c99b15e" (UID: "e22bb0db-bf56-410d-8f07-3b6e9c99b15e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:02:57 crc kubenswrapper[4971]: I1127 08:02:57.267928 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e22bb0db-bf56-410d-8f07-3b6e9c99b15e-kube-api-access-rn994" (OuterVolumeSpecName: "kube-api-access-rn994") pod "e22bb0db-bf56-410d-8f07-3b6e9c99b15e" (UID: "e22bb0db-bf56-410d-8f07-3b6e9c99b15e"). InnerVolumeSpecName "kube-api-access-rn994". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:02:57 crc kubenswrapper[4971]: I1127 08:02:57.301515 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e22bb0db-bf56-410d-8f07-3b6e9c99b15e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e22bb0db-bf56-410d-8f07-3b6e9c99b15e" (UID: "e22bb0db-bf56-410d-8f07-3b6e9c99b15e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:02:57 crc kubenswrapper[4971]: I1127 08:02:57.360265 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e22bb0db-bf56-410d-8f07-3b6e9c99b15e-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 08:02:57 crc kubenswrapper[4971]: I1127 08:02:57.360306 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e22bb0db-bf56-410d-8f07-3b6e9c99b15e-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 08:02:57 crc kubenswrapper[4971]: I1127 08:02:57.360319 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rn994\" (UniqueName: \"kubernetes.io/projected/e22bb0db-bf56-410d-8f07-3b6e9c99b15e-kube-api-access-rn994\") on node \"crc\" DevicePath \"\"" Nov 27 08:02:57 crc kubenswrapper[4971]: I1127 08:02:57.976355 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pthvj" event={"ID":"e22bb0db-bf56-410d-8f07-3b6e9c99b15e","Type":"ContainerDied","Data":"a1bc42b44ed7bd109c17e4b16371cccb2a92c1d237cb388086718eeff9b90134"} Nov 27 08:02:57 crc kubenswrapper[4971]: I1127 08:02:57.976420 4971 scope.go:117] "RemoveContainer" containerID="dbf8a69731d3c9ea03293cdc4b2ef56d962ea08885ac7327c8b40e0993323a59" Nov 27 08:02:57 crc kubenswrapper[4971]: I1127 08:02:57.976652 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pthvj" Nov 27 08:02:58 crc kubenswrapper[4971]: I1127 08:02:58.013480 4971 scope.go:117] "RemoveContainer" containerID="def3a620538f01b98414b7aea4df42df57e943427ac253a0bd6918141b133841" Nov 27 08:02:58 crc kubenswrapper[4971]: I1127 08:02:58.016505 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-pthvj"] Nov 27 08:02:58 crc kubenswrapper[4971]: I1127 08:02:58.027423 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-pthvj"] Nov 27 08:02:58 crc kubenswrapper[4971]: I1127 08:02:58.040342 4971 scope.go:117] "RemoveContainer" containerID="2496911d29c8054af3d8e9b965cb68546683749ae2af2735cd6711b2e6cefccd" Nov 27 08:02:58 crc kubenswrapper[4971]: I1127 08:02:58.564324 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e22bb0db-bf56-410d-8f07-3b6e9c99b15e" path="/var/lib/kubelet/pods/e22bb0db-bf56-410d-8f07-3b6e9c99b15e/volumes" Nov 27 08:03:26 crc kubenswrapper[4971]: I1127 08:03:26.413414 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 08:03:26 crc kubenswrapper[4971]: I1127 08:03:26.413977 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 08:03:56 crc kubenswrapper[4971]: I1127 08:03:56.413600 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 08:03:56 crc kubenswrapper[4971]: I1127 08:03:56.417169 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 08:04:24 crc kubenswrapper[4971]: I1127 08:04:24.650214 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-s84fl"] Nov 27 08:04:24 crc kubenswrapper[4971]: E1127 08:04:24.651368 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e22bb0db-bf56-410d-8f07-3b6e9c99b15e" containerName="extract-utilities" Nov 27 08:04:24 crc kubenswrapper[4971]: I1127 08:04:24.651394 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="e22bb0db-bf56-410d-8f07-3b6e9c99b15e" containerName="extract-utilities" Nov 27 08:04:24 crc kubenswrapper[4971]: E1127 08:04:24.651418 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e22bb0db-bf56-410d-8f07-3b6e9c99b15e" containerName="registry-server" Nov 27 08:04:24 crc kubenswrapper[4971]: I1127 08:04:24.651431 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="e22bb0db-bf56-410d-8f07-3b6e9c99b15e" containerName="registry-server" Nov 27 08:04:24 crc kubenswrapper[4971]: E1127 08:04:24.651478 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e22bb0db-bf56-410d-8f07-3b6e9c99b15e" containerName="extract-content" Nov 27 08:04:24 crc kubenswrapper[4971]: I1127 08:04:24.651490 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="e22bb0db-bf56-410d-8f07-3b6e9c99b15e" containerName="extract-content" Nov 27 08:04:24 crc kubenswrapper[4971]: I1127 08:04:24.655284 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="e22bb0db-bf56-410d-8f07-3b6e9c99b15e" containerName="registry-server" Nov 27 08:04:24 crc kubenswrapper[4971]: I1127 08:04:24.656892 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s84fl" Nov 27 08:04:24 crc kubenswrapper[4971]: I1127 08:04:24.673853 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-s84fl"] Nov 27 08:04:24 crc kubenswrapper[4971]: I1127 08:04:24.724919 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6hpx\" (UniqueName: \"kubernetes.io/projected/392857f2-ef69-4123-92b3-a9398fda5c32-kube-api-access-x6hpx\") pod \"redhat-marketplace-s84fl\" (UID: \"392857f2-ef69-4123-92b3-a9398fda5c32\") " pod="openshift-marketplace/redhat-marketplace-s84fl" Nov 27 08:04:24 crc kubenswrapper[4971]: I1127 08:04:24.724970 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/392857f2-ef69-4123-92b3-a9398fda5c32-utilities\") pod \"redhat-marketplace-s84fl\" (UID: \"392857f2-ef69-4123-92b3-a9398fda5c32\") " pod="openshift-marketplace/redhat-marketplace-s84fl" Nov 27 08:04:24 crc kubenswrapper[4971]: I1127 08:04:24.725255 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/392857f2-ef69-4123-92b3-a9398fda5c32-catalog-content\") pod \"redhat-marketplace-s84fl\" (UID: \"392857f2-ef69-4123-92b3-a9398fda5c32\") " pod="openshift-marketplace/redhat-marketplace-s84fl" Nov 27 08:04:24 crc kubenswrapper[4971]: I1127 08:04:24.826864 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6hpx\" (UniqueName: \"kubernetes.io/projected/392857f2-ef69-4123-92b3-a9398fda5c32-kube-api-access-x6hpx\") pod \"redhat-marketplace-s84fl\" (UID: \"392857f2-ef69-4123-92b3-a9398fda5c32\") " pod="openshift-marketplace/redhat-marketplace-s84fl" Nov 27 08:04:24 crc kubenswrapper[4971]: I1127 08:04:24.826923 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/392857f2-ef69-4123-92b3-a9398fda5c32-utilities\") pod \"redhat-marketplace-s84fl\" (UID: \"392857f2-ef69-4123-92b3-a9398fda5c32\") " pod="openshift-marketplace/redhat-marketplace-s84fl" Nov 27 08:04:24 crc kubenswrapper[4971]: I1127 08:04:24.826994 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/392857f2-ef69-4123-92b3-a9398fda5c32-catalog-content\") pod \"redhat-marketplace-s84fl\" (UID: \"392857f2-ef69-4123-92b3-a9398fda5c32\") " pod="openshift-marketplace/redhat-marketplace-s84fl" Nov 27 08:04:24 crc kubenswrapper[4971]: I1127 08:04:24.827494 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/392857f2-ef69-4123-92b3-a9398fda5c32-catalog-content\") pod \"redhat-marketplace-s84fl\" (UID: \"392857f2-ef69-4123-92b3-a9398fda5c32\") " pod="openshift-marketplace/redhat-marketplace-s84fl" Nov 27 08:04:24 crc kubenswrapper[4971]: I1127 08:04:24.827697 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/392857f2-ef69-4123-92b3-a9398fda5c32-utilities\") pod \"redhat-marketplace-s84fl\" (UID: \"392857f2-ef69-4123-92b3-a9398fda5c32\") " pod="openshift-marketplace/redhat-marketplace-s84fl" Nov 27 08:04:24 crc kubenswrapper[4971]: I1127 08:04:24.865768 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6hpx\" (UniqueName: \"kubernetes.io/projected/392857f2-ef69-4123-92b3-a9398fda5c32-kube-api-access-x6hpx\") pod \"redhat-marketplace-s84fl\" (UID: \"392857f2-ef69-4123-92b3-a9398fda5c32\") " pod="openshift-marketplace/redhat-marketplace-s84fl" Nov 27 08:04:24 crc kubenswrapper[4971]: I1127 08:04:24.983189 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s84fl" Nov 27 08:04:25 crc kubenswrapper[4971]: I1127 08:04:25.489229 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-s84fl"] Nov 27 08:04:25 crc kubenswrapper[4971]: I1127 08:04:25.803320 4971 generic.go:334] "Generic (PLEG): container finished" podID="392857f2-ef69-4123-92b3-a9398fda5c32" containerID="56a24447571aa004ccd06e9110572113f25e40210cecd8aa79f1c09bcbaaa257" exitCode=0 Nov 27 08:04:25 crc kubenswrapper[4971]: I1127 08:04:25.803443 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s84fl" event={"ID":"392857f2-ef69-4123-92b3-a9398fda5c32","Type":"ContainerDied","Data":"56a24447571aa004ccd06e9110572113f25e40210cecd8aa79f1c09bcbaaa257"} Nov 27 08:04:25 crc kubenswrapper[4971]: I1127 08:04:25.803736 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s84fl" event={"ID":"392857f2-ef69-4123-92b3-a9398fda5c32","Type":"ContainerStarted","Data":"5b897a519f6594f7b60c5619a13882e63e9c376950c2921e02e9de6f292b8ec5"} Nov 27 08:04:26 crc kubenswrapper[4971]: I1127 08:04:26.413417 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 08:04:26 crc kubenswrapper[4971]: I1127 08:04:26.413911 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 08:04:26 crc kubenswrapper[4971]: I1127 08:04:26.414081 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 08:04:26 crc kubenswrapper[4971]: I1127 08:04:26.414992 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"68fab616a248a14b593a899bbe38d7ef12b8321e0ca1a445a290e3c1dbfedc2f"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 08:04:26 crc kubenswrapper[4971]: I1127 08:04:26.415173 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://68fab616a248a14b593a899bbe38d7ef12b8321e0ca1a445a290e3c1dbfedc2f" gracePeriod=600 Nov 27 08:04:26 crc kubenswrapper[4971]: E1127 08:04:26.564033 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:04:26 crc kubenswrapper[4971]: I1127 08:04:26.818003 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="68fab616a248a14b593a899bbe38d7ef12b8321e0ca1a445a290e3c1dbfedc2f" exitCode=0 Nov 27 08:04:26 crc kubenswrapper[4971]: I1127 08:04:26.818208 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"68fab616a248a14b593a899bbe38d7ef12b8321e0ca1a445a290e3c1dbfedc2f"} Nov 27 08:04:26 crc kubenswrapper[4971]: I1127 08:04:26.818726 4971 scope.go:117] "RemoveContainer" containerID="2f816c0bb2be43107714feedadec18053ad453801121cb429b2fc5ed0df8ec95" Nov 27 08:04:26 crc kubenswrapper[4971]: I1127 08:04:26.819443 4971 scope.go:117] "RemoveContainer" containerID="68fab616a248a14b593a899bbe38d7ef12b8321e0ca1a445a290e3c1dbfedc2f" Nov 27 08:04:26 crc kubenswrapper[4971]: E1127 08:04:26.819787 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:04:28 crc kubenswrapper[4971]: I1127 08:04:28.843465 4971 generic.go:334] "Generic (PLEG): container finished" podID="392857f2-ef69-4123-92b3-a9398fda5c32" containerID="1e0f57ab736e46c8203c40d6057168a25638b88ed281451dfea3d43dc08b97a4" exitCode=0 Nov 27 08:04:28 crc kubenswrapper[4971]: I1127 08:04:28.843612 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s84fl" event={"ID":"392857f2-ef69-4123-92b3-a9398fda5c32","Type":"ContainerDied","Data":"1e0f57ab736e46c8203c40d6057168a25638b88ed281451dfea3d43dc08b97a4"} Nov 27 08:04:30 crc kubenswrapper[4971]: I1127 08:04:30.214045 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-7mwkt"] Nov 27 08:04:30 crc kubenswrapper[4971]: I1127 08:04:30.216176 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7mwkt" Nov 27 08:04:30 crc kubenswrapper[4971]: I1127 08:04:30.235149 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7mwkt"] Nov 27 08:04:30 crc kubenswrapper[4971]: I1127 08:04:30.319213 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q42nj\" (UniqueName: \"kubernetes.io/projected/5a99341a-f5dd-46fb-b613-180dc9da408c-kube-api-access-q42nj\") pod \"community-operators-7mwkt\" (UID: \"5a99341a-f5dd-46fb-b613-180dc9da408c\") " pod="openshift-marketplace/community-operators-7mwkt" Nov 27 08:04:30 crc kubenswrapper[4971]: I1127 08:04:30.319316 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a99341a-f5dd-46fb-b613-180dc9da408c-catalog-content\") pod \"community-operators-7mwkt\" (UID: \"5a99341a-f5dd-46fb-b613-180dc9da408c\") " pod="openshift-marketplace/community-operators-7mwkt" Nov 27 08:04:30 crc kubenswrapper[4971]: I1127 08:04:30.319671 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a99341a-f5dd-46fb-b613-180dc9da408c-utilities\") pod \"community-operators-7mwkt\" (UID: \"5a99341a-f5dd-46fb-b613-180dc9da408c\") " pod="openshift-marketplace/community-operators-7mwkt" Nov 27 08:04:30 crc kubenswrapper[4971]: I1127 08:04:30.420778 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a99341a-f5dd-46fb-b613-180dc9da408c-catalog-content\") pod \"community-operators-7mwkt\" (UID: \"5a99341a-f5dd-46fb-b613-180dc9da408c\") " pod="openshift-marketplace/community-operators-7mwkt" Nov 27 08:04:30 crc kubenswrapper[4971]: I1127 08:04:30.420854 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a99341a-f5dd-46fb-b613-180dc9da408c-utilities\") pod \"community-operators-7mwkt\" (UID: \"5a99341a-f5dd-46fb-b613-180dc9da408c\") " pod="openshift-marketplace/community-operators-7mwkt" Nov 27 08:04:30 crc kubenswrapper[4971]: I1127 08:04:30.420903 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q42nj\" (UniqueName: \"kubernetes.io/projected/5a99341a-f5dd-46fb-b613-180dc9da408c-kube-api-access-q42nj\") pod \"community-operators-7mwkt\" (UID: \"5a99341a-f5dd-46fb-b613-180dc9da408c\") " pod="openshift-marketplace/community-operators-7mwkt" Nov 27 08:04:30 crc kubenswrapper[4971]: I1127 08:04:30.421583 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a99341a-f5dd-46fb-b613-180dc9da408c-catalog-content\") pod \"community-operators-7mwkt\" (UID: \"5a99341a-f5dd-46fb-b613-180dc9da408c\") " pod="openshift-marketplace/community-operators-7mwkt" Nov 27 08:04:30 crc kubenswrapper[4971]: I1127 08:04:30.421626 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a99341a-f5dd-46fb-b613-180dc9da408c-utilities\") pod \"community-operators-7mwkt\" (UID: \"5a99341a-f5dd-46fb-b613-180dc9da408c\") " pod="openshift-marketplace/community-operators-7mwkt" Nov 27 08:04:30 crc kubenswrapper[4971]: I1127 08:04:30.444035 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q42nj\" (UniqueName: \"kubernetes.io/projected/5a99341a-f5dd-46fb-b613-180dc9da408c-kube-api-access-q42nj\") pod \"community-operators-7mwkt\" (UID: \"5a99341a-f5dd-46fb-b613-180dc9da408c\") " pod="openshift-marketplace/community-operators-7mwkt" Nov 27 08:04:30 crc kubenswrapper[4971]: I1127 08:04:30.539633 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7mwkt" Nov 27 08:04:30 crc kubenswrapper[4971]: I1127 08:04:30.884145 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7mwkt"] Nov 27 08:04:30 crc kubenswrapper[4971]: I1127 08:04:30.884686 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s84fl" event={"ID":"392857f2-ef69-4123-92b3-a9398fda5c32","Type":"ContainerStarted","Data":"123bbf979b35fe56abc47f925e249f8548bb8d8b1eee77bb5c16aee4a6901fe7"} Nov 27 08:04:30 crc kubenswrapper[4971]: I1127 08:04:30.922513 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-s84fl" podStartSLOduration=2.777275532 podStartE2EDuration="6.922485539s" podCreationTimestamp="2025-11-27 08:04:24 +0000 UTC" firstStartedPulling="2025-11-27 08:04:25.805692183 +0000 UTC m=+4303.997736091" lastFinishedPulling="2025-11-27 08:04:29.95090218 +0000 UTC m=+4308.142946098" observedRunningTime="2025-11-27 08:04:30.914843951 +0000 UTC m=+4309.106887869" watchObservedRunningTime="2025-11-27 08:04:30.922485539 +0000 UTC m=+4309.114529477" Nov 27 08:04:31 crc kubenswrapper[4971]: I1127 08:04:31.894332 4971 generic.go:334] "Generic (PLEG): container finished" podID="5a99341a-f5dd-46fb-b613-180dc9da408c" containerID="b59b74f3e5ca045d07d2be95b36b75c359e8a302243a3991b93b1d0abc68e59a" exitCode=0 Nov 27 08:04:31 crc kubenswrapper[4971]: I1127 08:04:31.894595 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7mwkt" event={"ID":"5a99341a-f5dd-46fb-b613-180dc9da408c","Type":"ContainerDied","Data":"b59b74f3e5ca045d07d2be95b36b75c359e8a302243a3991b93b1d0abc68e59a"} Nov 27 08:04:31 crc kubenswrapper[4971]: I1127 08:04:31.895246 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7mwkt" event={"ID":"5a99341a-f5dd-46fb-b613-180dc9da408c","Type":"ContainerStarted","Data":"1caeedb73101d0026d6b55708b7ca2d62c3bf8b959d6feea648e75bc0e6b1851"} Nov 27 08:04:32 crc kubenswrapper[4971]: I1127 08:04:32.906234 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7mwkt" event={"ID":"5a99341a-f5dd-46fb-b613-180dc9da408c","Type":"ContainerStarted","Data":"f87f41f810ed5ae583d38be599ffaad84273ee94d78bc4e970124ec259bc05ef"} Nov 27 08:04:33 crc kubenswrapper[4971]: I1127 08:04:33.915965 4971 generic.go:334] "Generic (PLEG): container finished" podID="5a99341a-f5dd-46fb-b613-180dc9da408c" containerID="f87f41f810ed5ae583d38be599ffaad84273ee94d78bc4e970124ec259bc05ef" exitCode=0 Nov 27 08:04:33 crc kubenswrapper[4971]: I1127 08:04:33.916026 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7mwkt" event={"ID":"5a99341a-f5dd-46fb-b613-180dc9da408c","Type":"ContainerDied","Data":"f87f41f810ed5ae583d38be599ffaad84273ee94d78bc4e970124ec259bc05ef"} Nov 27 08:04:34 crc kubenswrapper[4971]: I1127 08:04:34.983845 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-s84fl" Nov 27 08:04:34 crc kubenswrapper[4971]: I1127 08:04:34.984278 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-s84fl" Nov 27 08:04:35 crc kubenswrapper[4971]: I1127 08:04:35.050421 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-s84fl" Nov 27 08:04:35 crc kubenswrapper[4971]: I1127 08:04:35.935015 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7mwkt" event={"ID":"5a99341a-f5dd-46fb-b613-180dc9da408c","Type":"ContainerStarted","Data":"9506a85a1c023d38f56789a5e197b19eac96eae67774a655e63bde12be090e9e"} Nov 27 08:04:35 crc kubenswrapper[4971]: I1127 08:04:35.963090 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-7mwkt" podStartSLOduration=2.928122748 podStartE2EDuration="5.963068412s" podCreationTimestamp="2025-11-27 08:04:30 +0000 UTC" firstStartedPulling="2025-11-27 08:04:31.897103915 +0000 UTC m=+4310.089147833" lastFinishedPulling="2025-11-27 08:04:34.932049579 +0000 UTC m=+4313.124093497" observedRunningTime="2025-11-27 08:04:35.95737121 +0000 UTC m=+4314.149415148" watchObservedRunningTime="2025-11-27 08:04:35.963068412 +0000 UTC m=+4314.155112330" Nov 27 08:04:35 crc kubenswrapper[4971]: I1127 08:04:35.986809 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-s84fl" Nov 27 08:04:38 crc kubenswrapper[4971]: I1127 08:04:38.216368 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-s84fl"] Nov 27 08:04:38 crc kubenswrapper[4971]: I1127 08:04:38.216747 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-s84fl" podUID="392857f2-ef69-4123-92b3-a9398fda5c32" containerName="registry-server" containerID="cri-o://123bbf979b35fe56abc47f925e249f8548bb8d8b1eee77bb5c16aee4a6901fe7" gracePeriod=2 Nov 27 08:04:38 crc kubenswrapper[4971]: I1127 08:04:38.715373 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s84fl" Nov 27 08:04:38 crc kubenswrapper[4971]: I1127 08:04:38.786264 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x6hpx\" (UniqueName: \"kubernetes.io/projected/392857f2-ef69-4123-92b3-a9398fda5c32-kube-api-access-x6hpx\") pod \"392857f2-ef69-4123-92b3-a9398fda5c32\" (UID: \"392857f2-ef69-4123-92b3-a9398fda5c32\") " Nov 27 08:04:38 crc kubenswrapper[4971]: I1127 08:04:38.786450 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/392857f2-ef69-4123-92b3-a9398fda5c32-catalog-content\") pod \"392857f2-ef69-4123-92b3-a9398fda5c32\" (UID: \"392857f2-ef69-4123-92b3-a9398fda5c32\") " Nov 27 08:04:38 crc kubenswrapper[4971]: I1127 08:04:38.786627 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/392857f2-ef69-4123-92b3-a9398fda5c32-utilities\") pod \"392857f2-ef69-4123-92b3-a9398fda5c32\" (UID: \"392857f2-ef69-4123-92b3-a9398fda5c32\") " Nov 27 08:04:38 crc kubenswrapper[4971]: I1127 08:04:38.787779 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/392857f2-ef69-4123-92b3-a9398fda5c32-utilities" (OuterVolumeSpecName: "utilities") pod "392857f2-ef69-4123-92b3-a9398fda5c32" (UID: "392857f2-ef69-4123-92b3-a9398fda5c32"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:04:38 crc kubenswrapper[4971]: I1127 08:04:38.795125 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/392857f2-ef69-4123-92b3-a9398fda5c32-kube-api-access-x6hpx" (OuterVolumeSpecName: "kube-api-access-x6hpx") pod "392857f2-ef69-4123-92b3-a9398fda5c32" (UID: "392857f2-ef69-4123-92b3-a9398fda5c32"). InnerVolumeSpecName "kube-api-access-x6hpx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:04:38 crc kubenswrapper[4971]: I1127 08:04:38.824115 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/392857f2-ef69-4123-92b3-a9398fda5c32-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "392857f2-ef69-4123-92b3-a9398fda5c32" (UID: "392857f2-ef69-4123-92b3-a9398fda5c32"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:04:38 crc kubenswrapper[4971]: I1127 08:04:38.889591 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x6hpx\" (UniqueName: \"kubernetes.io/projected/392857f2-ef69-4123-92b3-a9398fda5c32-kube-api-access-x6hpx\") on node \"crc\" DevicePath \"\"" Nov 27 08:04:38 crc kubenswrapper[4971]: I1127 08:04:38.889659 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/392857f2-ef69-4123-92b3-a9398fda5c32-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 08:04:38 crc kubenswrapper[4971]: I1127 08:04:38.889672 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/392857f2-ef69-4123-92b3-a9398fda5c32-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 08:04:38 crc kubenswrapper[4971]: I1127 08:04:38.966072 4971 generic.go:334] "Generic (PLEG): container finished" podID="392857f2-ef69-4123-92b3-a9398fda5c32" containerID="123bbf979b35fe56abc47f925e249f8548bb8d8b1eee77bb5c16aee4a6901fe7" exitCode=0 Nov 27 08:04:38 crc kubenswrapper[4971]: I1127 08:04:38.966138 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s84fl" event={"ID":"392857f2-ef69-4123-92b3-a9398fda5c32","Type":"ContainerDied","Data":"123bbf979b35fe56abc47f925e249f8548bb8d8b1eee77bb5c16aee4a6901fe7"} Nov 27 08:04:38 crc kubenswrapper[4971]: I1127 08:04:38.966177 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s84fl" event={"ID":"392857f2-ef69-4123-92b3-a9398fda5c32","Type":"ContainerDied","Data":"5b897a519f6594f7b60c5619a13882e63e9c376950c2921e02e9de6f292b8ec5"} Nov 27 08:04:38 crc kubenswrapper[4971]: I1127 08:04:38.966202 4971 scope.go:117] "RemoveContainer" containerID="123bbf979b35fe56abc47f925e249f8548bb8d8b1eee77bb5c16aee4a6901fe7" Nov 27 08:04:38 crc kubenswrapper[4971]: I1127 08:04:38.966377 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s84fl" Nov 27 08:04:38 crc kubenswrapper[4971]: I1127 08:04:38.988897 4971 scope.go:117] "RemoveContainer" containerID="1e0f57ab736e46c8203c40d6057168a25638b88ed281451dfea3d43dc08b97a4" Nov 27 08:04:39 crc kubenswrapper[4971]: I1127 08:04:39.027322 4971 scope.go:117] "RemoveContainer" containerID="56a24447571aa004ccd06e9110572113f25e40210cecd8aa79f1c09bcbaaa257" Nov 27 08:04:39 crc kubenswrapper[4971]: I1127 08:04:39.030200 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-s84fl"] Nov 27 08:04:39 crc kubenswrapper[4971]: I1127 08:04:39.039314 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-s84fl"] Nov 27 08:04:39 crc kubenswrapper[4971]: I1127 08:04:39.055583 4971 scope.go:117] "RemoveContainer" containerID="123bbf979b35fe56abc47f925e249f8548bb8d8b1eee77bb5c16aee4a6901fe7" Nov 27 08:04:39 crc kubenswrapper[4971]: E1127 08:04:39.056230 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"123bbf979b35fe56abc47f925e249f8548bb8d8b1eee77bb5c16aee4a6901fe7\": container with ID starting with 123bbf979b35fe56abc47f925e249f8548bb8d8b1eee77bb5c16aee4a6901fe7 not found: ID does not exist" containerID="123bbf979b35fe56abc47f925e249f8548bb8d8b1eee77bb5c16aee4a6901fe7" Nov 27 08:04:39 crc kubenswrapper[4971]: I1127 08:04:39.056286 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"123bbf979b35fe56abc47f925e249f8548bb8d8b1eee77bb5c16aee4a6901fe7"} err="failed to get container status \"123bbf979b35fe56abc47f925e249f8548bb8d8b1eee77bb5c16aee4a6901fe7\": rpc error: code = NotFound desc = could not find container \"123bbf979b35fe56abc47f925e249f8548bb8d8b1eee77bb5c16aee4a6901fe7\": container with ID starting with 123bbf979b35fe56abc47f925e249f8548bb8d8b1eee77bb5c16aee4a6901fe7 not found: ID does not exist" Nov 27 08:04:39 crc kubenswrapper[4971]: I1127 08:04:39.056329 4971 scope.go:117] "RemoveContainer" containerID="1e0f57ab736e46c8203c40d6057168a25638b88ed281451dfea3d43dc08b97a4" Nov 27 08:04:39 crc kubenswrapper[4971]: E1127 08:04:39.056872 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e0f57ab736e46c8203c40d6057168a25638b88ed281451dfea3d43dc08b97a4\": container with ID starting with 1e0f57ab736e46c8203c40d6057168a25638b88ed281451dfea3d43dc08b97a4 not found: ID does not exist" containerID="1e0f57ab736e46c8203c40d6057168a25638b88ed281451dfea3d43dc08b97a4" Nov 27 08:04:39 crc kubenswrapper[4971]: I1127 08:04:39.056934 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e0f57ab736e46c8203c40d6057168a25638b88ed281451dfea3d43dc08b97a4"} err="failed to get container status \"1e0f57ab736e46c8203c40d6057168a25638b88ed281451dfea3d43dc08b97a4\": rpc error: code = NotFound desc = could not find container \"1e0f57ab736e46c8203c40d6057168a25638b88ed281451dfea3d43dc08b97a4\": container with ID starting with 1e0f57ab736e46c8203c40d6057168a25638b88ed281451dfea3d43dc08b97a4 not found: ID does not exist" Nov 27 08:04:39 crc kubenswrapper[4971]: I1127 08:04:39.056977 4971 scope.go:117] "RemoveContainer" containerID="56a24447571aa004ccd06e9110572113f25e40210cecd8aa79f1c09bcbaaa257" Nov 27 08:04:39 crc kubenswrapper[4971]: E1127 08:04:39.057394 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"56a24447571aa004ccd06e9110572113f25e40210cecd8aa79f1c09bcbaaa257\": container with ID starting with 56a24447571aa004ccd06e9110572113f25e40210cecd8aa79f1c09bcbaaa257 not found: ID does not exist" containerID="56a24447571aa004ccd06e9110572113f25e40210cecd8aa79f1c09bcbaaa257" Nov 27 08:04:39 crc kubenswrapper[4971]: I1127 08:04:39.057446 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56a24447571aa004ccd06e9110572113f25e40210cecd8aa79f1c09bcbaaa257"} err="failed to get container status \"56a24447571aa004ccd06e9110572113f25e40210cecd8aa79f1c09bcbaaa257\": rpc error: code = NotFound desc = could not find container \"56a24447571aa004ccd06e9110572113f25e40210cecd8aa79f1c09bcbaaa257\": container with ID starting with 56a24447571aa004ccd06e9110572113f25e40210cecd8aa79f1c09bcbaaa257 not found: ID does not exist" Nov 27 08:04:39 crc kubenswrapper[4971]: I1127 08:04:39.550347 4971 scope.go:117] "RemoveContainer" containerID="68fab616a248a14b593a899bbe38d7ef12b8321e0ca1a445a290e3c1dbfedc2f" Nov 27 08:04:39 crc kubenswrapper[4971]: E1127 08:04:39.551059 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:04:40 crc kubenswrapper[4971]: I1127 08:04:40.540731 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-7mwkt" Nov 27 08:04:40 crc kubenswrapper[4971]: I1127 08:04:40.540809 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-7mwkt" Nov 27 08:04:40 crc kubenswrapper[4971]: I1127 08:04:40.567610 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="392857f2-ef69-4123-92b3-a9398fda5c32" path="/var/lib/kubelet/pods/392857f2-ef69-4123-92b3-a9398fda5c32/volumes" Nov 27 08:04:40 crc kubenswrapper[4971]: I1127 08:04:40.595519 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-7mwkt" Nov 27 08:04:41 crc kubenswrapper[4971]: I1127 08:04:41.083047 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-7mwkt" Nov 27 08:04:47 crc kubenswrapper[4971]: I1127 08:04:47.214170 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7mwkt"] Nov 27 08:04:47 crc kubenswrapper[4971]: I1127 08:04:47.215457 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-7mwkt" podUID="5a99341a-f5dd-46fb-b613-180dc9da408c" containerName="registry-server" containerID="cri-o://9506a85a1c023d38f56789a5e197b19eac96eae67774a655e63bde12be090e9e" gracePeriod=2 Nov 27 08:04:47 crc kubenswrapper[4971]: I1127 08:04:47.627040 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7mwkt" Nov 27 08:04:47 crc kubenswrapper[4971]: I1127 08:04:47.745337 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q42nj\" (UniqueName: \"kubernetes.io/projected/5a99341a-f5dd-46fb-b613-180dc9da408c-kube-api-access-q42nj\") pod \"5a99341a-f5dd-46fb-b613-180dc9da408c\" (UID: \"5a99341a-f5dd-46fb-b613-180dc9da408c\") " Nov 27 08:04:47 crc kubenswrapper[4971]: I1127 08:04:47.745793 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a99341a-f5dd-46fb-b613-180dc9da408c-catalog-content\") pod \"5a99341a-f5dd-46fb-b613-180dc9da408c\" (UID: \"5a99341a-f5dd-46fb-b613-180dc9da408c\") " Nov 27 08:04:47 crc kubenswrapper[4971]: I1127 08:04:47.745931 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a99341a-f5dd-46fb-b613-180dc9da408c-utilities\") pod \"5a99341a-f5dd-46fb-b613-180dc9da408c\" (UID: \"5a99341a-f5dd-46fb-b613-180dc9da408c\") " Nov 27 08:04:47 crc kubenswrapper[4971]: I1127 08:04:47.749152 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5a99341a-f5dd-46fb-b613-180dc9da408c-utilities" (OuterVolumeSpecName: "utilities") pod "5a99341a-f5dd-46fb-b613-180dc9da408c" (UID: "5a99341a-f5dd-46fb-b613-180dc9da408c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:04:47 crc kubenswrapper[4971]: I1127 08:04:47.754756 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a99341a-f5dd-46fb-b613-180dc9da408c-kube-api-access-q42nj" (OuterVolumeSpecName: "kube-api-access-q42nj") pod "5a99341a-f5dd-46fb-b613-180dc9da408c" (UID: "5a99341a-f5dd-46fb-b613-180dc9da408c"). InnerVolumeSpecName "kube-api-access-q42nj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:04:47 crc kubenswrapper[4971]: I1127 08:04:47.807507 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5a99341a-f5dd-46fb-b613-180dc9da408c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5a99341a-f5dd-46fb-b613-180dc9da408c" (UID: "5a99341a-f5dd-46fb-b613-180dc9da408c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:04:47 crc kubenswrapper[4971]: I1127 08:04:47.848213 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a99341a-f5dd-46fb-b613-180dc9da408c-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 08:04:47 crc kubenswrapper[4971]: I1127 08:04:47.848260 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a99341a-f5dd-46fb-b613-180dc9da408c-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 08:04:47 crc kubenswrapper[4971]: I1127 08:04:47.848272 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q42nj\" (UniqueName: \"kubernetes.io/projected/5a99341a-f5dd-46fb-b613-180dc9da408c-kube-api-access-q42nj\") on node \"crc\" DevicePath \"\"" Nov 27 08:04:48 crc kubenswrapper[4971]: I1127 08:04:48.050691 4971 generic.go:334] "Generic (PLEG): container finished" podID="5a99341a-f5dd-46fb-b613-180dc9da408c" containerID="9506a85a1c023d38f56789a5e197b19eac96eae67774a655e63bde12be090e9e" exitCode=0 Nov 27 08:04:48 crc kubenswrapper[4971]: I1127 08:04:48.050775 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7mwkt" event={"ID":"5a99341a-f5dd-46fb-b613-180dc9da408c","Type":"ContainerDied","Data":"9506a85a1c023d38f56789a5e197b19eac96eae67774a655e63bde12be090e9e"} Nov 27 08:04:48 crc kubenswrapper[4971]: I1127 08:04:48.050797 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7mwkt" Nov 27 08:04:48 crc kubenswrapper[4971]: I1127 08:04:48.050813 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7mwkt" event={"ID":"5a99341a-f5dd-46fb-b613-180dc9da408c","Type":"ContainerDied","Data":"1caeedb73101d0026d6b55708b7ca2d62c3bf8b959d6feea648e75bc0e6b1851"} Nov 27 08:04:48 crc kubenswrapper[4971]: I1127 08:04:48.050864 4971 scope.go:117] "RemoveContainer" containerID="9506a85a1c023d38f56789a5e197b19eac96eae67774a655e63bde12be090e9e" Nov 27 08:04:48 crc kubenswrapper[4971]: I1127 08:04:48.089867 4971 scope.go:117] "RemoveContainer" containerID="f87f41f810ed5ae583d38be599ffaad84273ee94d78bc4e970124ec259bc05ef" Nov 27 08:04:48 crc kubenswrapper[4971]: I1127 08:04:48.101696 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7mwkt"] Nov 27 08:04:48 crc kubenswrapper[4971]: I1127 08:04:48.109017 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-7mwkt"] Nov 27 08:04:48 crc kubenswrapper[4971]: I1127 08:04:48.118852 4971 scope.go:117] "RemoveContainer" containerID="b59b74f3e5ca045d07d2be95b36b75c359e8a302243a3991b93b1d0abc68e59a" Nov 27 08:04:48 crc kubenswrapper[4971]: I1127 08:04:48.145297 4971 scope.go:117] "RemoveContainer" containerID="9506a85a1c023d38f56789a5e197b19eac96eae67774a655e63bde12be090e9e" Nov 27 08:04:48 crc kubenswrapper[4971]: E1127 08:04:48.146025 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9506a85a1c023d38f56789a5e197b19eac96eae67774a655e63bde12be090e9e\": container with ID starting with 9506a85a1c023d38f56789a5e197b19eac96eae67774a655e63bde12be090e9e not found: ID does not exist" containerID="9506a85a1c023d38f56789a5e197b19eac96eae67774a655e63bde12be090e9e" Nov 27 08:04:48 crc kubenswrapper[4971]: I1127 08:04:48.146088 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9506a85a1c023d38f56789a5e197b19eac96eae67774a655e63bde12be090e9e"} err="failed to get container status \"9506a85a1c023d38f56789a5e197b19eac96eae67774a655e63bde12be090e9e\": rpc error: code = NotFound desc = could not find container \"9506a85a1c023d38f56789a5e197b19eac96eae67774a655e63bde12be090e9e\": container with ID starting with 9506a85a1c023d38f56789a5e197b19eac96eae67774a655e63bde12be090e9e not found: ID does not exist" Nov 27 08:04:48 crc kubenswrapper[4971]: I1127 08:04:48.146130 4971 scope.go:117] "RemoveContainer" containerID="f87f41f810ed5ae583d38be599ffaad84273ee94d78bc4e970124ec259bc05ef" Nov 27 08:04:48 crc kubenswrapper[4971]: E1127 08:04:48.146692 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f87f41f810ed5ae583d38be599ffaad84273ee94d78bc4e970124ec259bc05ef\": container with ID starting with f87f41f810ed5ae583d38be599ffaad84273ee94d78bc4e970124ec259bc05ef not found: ID does not exist" containerID="f87f41f810ed5ae583d38be599ffaad84273ee94d78bc4e970124ec259bc05ef" Nov 27 08:04:48 crc kubenswrapper[4971]: I1127 08:04:48.146728 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f87f41f810ed5ae583d38be599ffaad84273ee94d78bc4e970124ec259bc05ef"} err="failed to get container status \"f87f41f810ed5ae583d38be599ffaad84273ee94d78bc4e970124ec259bc05ef\": rpc error: code = NotFound desc = could not find container \"f87f41f810ed5ae583d38be599ffaad84273ee94d78bc4e970124ec259bc05ef\": container with ID starting with f87f41f810ed5ae583d38be599ffaad84273ee94d78bc4e970124ec259bc05ef not found: ID does not exist" Nov 27 08:04:48 crc kubenswrapper[4971]: I1127 08:04:48.146749 4971 scope.go:117] "RemoveContainer" containerID="b59b74f3e5ca045d07d2be95b36b75c359e8a302243a3991b93b1d0abc68e59a" Nov 27 08:04:48 crc kubenswrapper[4971]: E1127 08:04:48.147117 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b59b74f3e5ca045d07d2be95b36b75c359e8a302243a3991b93b1d0abc68e59a\": container with ID starting with b59b74f3e5ca045d07d2be95b36b75c359e8a302243a3991b93b1d0abc68e59a not found: ID does not exist" containerID="b59b74f3e5ca045d07d2be95b36b75c359e8a302243a3991b93b1d0abc68e59a" Nov 27 08:04:48 crc kubenswrapper[4971]: I1127 08:04:48.147172 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b59b74f3e5ca045d07d2be95b36b75c359e8a302243a3991b93b1d0abc68e59a"} err="failed to get container status \"b59b74f3e5ca045d07d2be95b36b75c359e8a302243a3991b93b1d0abc68e59a\": rpc error: code = NotFound desc = could not find container \"b59b74f3e5ca045d07d2be95b36b75c359e8a302243a3991b93b1d0abc68e59a\": container with ID starting with b59b74f3e5ca045d07d2be95b36b75c359e8a302243a3991b93b1d0abc68e59a not found: ID does not exist" Nov 27 08:04:48 crc kubenswrapper[4971]: I1127 08:04:48.559823 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a99341a-f5dd-46fb-b613-180dc9da408c" path="/var/lib/kubelet/pods/5a99341a-f5dd-46fb-b613-180dc9da408c/volumes" Nov 27 08:04:53 crc kubenswrapper[4971]: I1127 08:04:53.550566 4971 scope.go:117] "RemoveContainer" containerID="68fab616a248a14b593a899bbe38d7ef12b8321e0ca1a445a290e3c1dbfedc2f" Nov 27 08:04:53 crc kubenswrapper[4971]: E1127 08:04:53.551505 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:05:04 crc kubenswrapper[4971]: I1127 08:05:04.550884 4971 scope.go:117] "RemoveContainer" containerID="68fab616a248a14b593a899bbe38d7ef12b8321e0ca1a445a290e3c1dbfedc2f" Nov 27 08:05:04 crc kubenswrapper[4971]: E1127 08:05:04.551622 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:05:19 crc kubenswrapper[4971]: I1127 08:05:19.550246 4971 scope.go:117] "RemoveContainer" containerID="68fab616a248a14b593a899bbe38d7ef12b8321e0ca1a445a290e3c1dbfedc2f" Nov 27 08:05:19 crc kubenswrapper[4971]: E1127 08:05:19.551044 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:05:30 crc kubenswrapper[4971]: I1127 08:05:30.550214 4971 scope.go:117] "RemoveContainer" containerID="68fab616a248a14b593a899bbe38d7ef12b8321e0ca1a445a290e3c1dbfedc2f" Nov 27 08:05:30 crc kubenswrapper[4971]: E1127 08:05:30.551188 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:05:41 crc kubenswrapper[4971]: I1127 08:05:41.550893 4971 scope.go:117] "RemoveContainer" containerID="68fab616a248a14b593a899bbe38d7ef12b8321e0ca1a445a290e3c1dbfedc2f" Nov 27 08:05:41 crc kubenswrapper[4971]: E1127 08:05:41.552113 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:05:53 crc kubenswrapper[4971]: I1127 08:05:53.550848 4971 scope.go:117] "RemoveContainer" containerID="68fab616a248a14b593a899bbe38d7ef12b8321e0ca1a445a290e3c1dbfedc2f" Nov 27 08:05:53 crc kubenswrapper[4971]: E1127 08:05:53.551902 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:06:04 crc kubenswrapper[4971]: I1127 08:06:04.550842 4971 scope.go:117] "RemoveContainer" containerID="68fab616a248a14b593a899bbe38d7ef12b8321e0ca1a445a290e3c1dbfedc2f" Nov 27 08:06:04 crc kubenswrapper[4971]: E1127 08:06:04.551853 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:06:16 crc kubenswrapper[4971]: I1127 08:06:16.551034 4971 scope.go:117] "RemoveContainer" containerID="68fab616a248a14b593a899bbe38d7ef12b8321e0ca1a445a290e3c1dbfedc2f" Nov 27 08:06:16 crc kubenswrapper[4971]: E1127 08:06:16.553689 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:06:31 crc kubenswrapper[4971]: I1127 08:06:31.550932 4971 scope.go:117] "RemoveContainer" containerID="68fab616a248a14b593a899bbe38d7ef12b8321e0ca1a445a290e3c1dbfedc2f" Nov 27 08:06:31 crc kubenswrapper[4971]: E1127 08:06:31.552427 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:06:44 crc kubenswrapper[4971]: I1127 08:06:44.551134 4971 scope.go:117] "RemoveContainer" containerID="68fab616a248a14b593a899bbe38d7ef12b8321e0ca1a445a290e3c1dbfedc2f" Nov 27 08:06:44 crc kubenswrapper[4971]: E1127 08:06:44.552016 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:06:55 crc kubenswrapper[4971]: I1127 08:06:55.551376 4971 scope.go:117] "RemoveContainer" containerID="68fab616a248a14b593a899bbe38d7ef12b8321e0ca1a445a290e3c1dbfedc2f" Nov 27 08:06:55 crc kubenswrapper[4971]: E1127 08:06:55.553635 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:07:06 crc kubenswrapper[4971]: I1127 08:07:06.550862 4971 scope.go:117] "RemoveContainer" containerID="68fab616a248a14b593a899bbe38d7ef12b8321e0ca1a445a290e3c1dbfedc2f" Nov 27 08:07:06 crc kubenswrapper[4971]: E1127 08:07:06.551477 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:07:18 crc kubenswrapper[4971]: I1127 08:07:18.550647 4971 scope.go:117] "RemoveContainer" containerID="68fab616a248a14b593a899bbe38d7ef12b8321e0ca1a445a290e3c1dbfedc2f" Nov 27 08:07:18 crc kubenswrapper[4971]: E1127 08:07:18.551703 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:07:30 crc kubenswrapper[4971]: I1127 08:07:30.550737 4971 scope.go:117] "RemoveContainer" containerID="68fab616a248a14b593a899bbe38d7ef12b8321e0ca1a445a290e3c1dbfedc2f" Nov 27 08:07:30 crc kubenswrapper[4971]: E1127 08:07:30.551731 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:07:43 crc kubenswrapper[4971]: I1127 08:07:43.550969 4971 scope.go:117] "RemoveContainer" containerID="68fab616a248a14b593a899bbe38d7ef12b8321e0ca1a445a290e3c1dbfedc2f" Nov 27 08:07:43 crc kubenswrapper[4971]: E1127 08:07:43.552163 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:07:57 crc kubenswrapper[4971]: I1127 08:07:57.551072 4971 scope.go:117] "RemoveContainer" containerID="68fab616a248a14b593a899bbe38d7ef12b8321e0ca1a445a290e3c1dbfedc2f" Nov 27 08:07:57 crc kubenswrapper[4971]: E1127 08:07:57.553153 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:08:08 crc kubenswrapper[4971]: I1127 08:08:08.550122 4971 scope.go:117] "RemoveContainer" containerID="68fab616a248a14b593a899bbe38d7ef12b8321e0ca1a445a290e3c1dbfedc2f" Nov 27 08:08:08 crc kubenswrapper[4971]: E1127 08:08:08.550993 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:08:19 crc kubenswrapper[4971]: I1127 08:08:19.552167 4971 scope.go:117] "RemoveContainer" containerID="68fab616a248a14b593a899bbe38d7ef12b8321e0ca1a445a290e3c1dbfedc2f" Nov 27 08:08:19 crc kubenswrapper[4971]: E1127 08:08:19.553795 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:08:32 crc kubenswrapper[4971]: I1127 08:08:32.559145 4971 scope.go:117] "RemoveContainer" containerID="68fab616a248a14b593a899bbe38d7ef12b8321e0ca1a445a290e3c1dbfedc2f" Nov 27 08:08:32 crc kubenswrapper[4971]: E1127 08:08:32.560923 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:08:46 crc kubenswrapper[4971]: I1127 08:08:46.556140 4971 scope.go:117] "RemoveContainer" containerID="68fab616a248a14b593a899bbe38d7ef12b8321e0ca1a445a290e3c1dbfedc2f" Nov 27 08:08:46 crc kubenswrapper[4971]: E1127 08:08:46.557651 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:08:59 crc kubenswrapper[4971]: I1127 08:08:59.551323 4971 scope.go:117] "RemoveContainer" containerID="68fab616a248a14b593a899bbe38d7ef12b8321e0ca1a445a290e3c1dbfedc2f" Nov 27 08:08:59 crc kubenswrapper[4971]: E1127 08:08:59.552461 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:09:13 crc kubenswrapper[4971]: I1127 08:09:13.550920 4971 scope.go:117] "RemoveContainer" containerID="68fab616a248a14b593a899bbe38d7ef12b8321e0ca1a445a290e3c1dbfedc2f" Nov 27 08:09:13 crc kubenswrapper[4971]: E1127 08:09:13.551808 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:09:27 crc kubenswrapper[4971]: I1127 08:09:27.550473 4971 scope.go:117] "RemoveContainer" containerID="68fab616a248a14b593a899bbe38d7ef12b8321e0ca1a445a290e3c1dbfedc2f" Nov 27 08:09:28 crc kubenswrapper[4971]: I1127 08:09:28.329061 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"26fa812a55b4f3841abae52bb3428271d7ad0f2d0eba615b6bab0abc70b4002f"} Nov 27 08:11:26 crc kubenswrapper[4971]: I1127 08:11:26.474190 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-9vxcg"] Nov 27 08:11:26 crc kubenswrapper[4971]: E1127 08:11:26.475165 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a99341a-f5dd-46fb-b613-180dc9da408c" containerName="extract-utilities" Nov 27 08:11:26 crc kubenswrapper[4971]: I1127 08:11:26.475181 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a99341a-f5dd-46fb-b613-180dc9da408c" containerName="extract-utilities" Nov 27 08:11:26 crc kubenswrapper[4971]: E1127 08:11:26.475200 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="392857f2-ef69-4123-92b3-a9398fda5c32" containerName="extract-utilities" Nov 27 08:11:26 crc kubenswrapper[4971]: I1127 08:11:26.475207 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="392857f2-ef69-4123-92b3-a9398fda5c32" containerName="extract-utilities" Nov 27 08:11:26 crc kubenswrapper[4971]: E1127 08:11:26.475220 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="392857f2-ef69-4123-92b3-a9398fda5c32" containerName="extract-content" Nov 27 08:11:26 crc kubenswrapper[4971]: I1127 08:11:26.475227 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="392857f2-ef69-4123-92b3-a9398fda5c32" containerName="extract-content" Nov 27 08:11:26 crc kubenswrapper[4971]: E1127 08:11:26.475242 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="392857f2-ef69-4123-92b3-a9398fda5c32" containerName="registry-server" Nov 27 08:11:26 crc kubenswrapper[4971]: I1127 08:11:26.475248 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="392857f2-ef69-4123-92b3-a9398fda5c32" containerName="registry-server" Nov 27 08:11:26 crc kubenswrapper[4971]: E1127 08:11:26.475266 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a99341a-f5dd-46fb-b613-180dc9da408c" containerName="extract-content" Nov 27 08:11:26 crc kubenswrapper[4971]: I1127 08:11:26.475272 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a99341a-f5dd-46fb-b613-180dc9da408c" containerName="extract-content" Nov 27 08:11:26 crc kubenswrapper[4971]: E1127 08:11:26.475283 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a99341a-f5dd-46fb-b613-180dc9da408c" containerName="registry-server" Nov 27 08:11:26 crc kubenswrapper[4971]: I1127 08:11:26.475289 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a99341a-f5dd-46fb-b613-180dc9da408c" containerName="registry-server" Nov 27 08:11:26 crc kubenswrapper[4971]: I1127 08:11:26.475920 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a99341a-f5dd-46fb-b613-180dc9da408c" containerName="registry-server" Nov 27 08:11:26 crc kubenswrapper[4971]: I1127 08:11:26.475944 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="392857f2-ef69-4123-92b3-a9398fda5c32" containerName="registry-server" Nov 27 08:11:26 crc kubenswrapper[4971]: I1127 08:11:26.477564 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9vxcg" Nov 27 08:11:26 crc kubenswrapper[4971]: I1127 08:11:26.495328 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9vxcg"] Nov 27 08:11:26 crc kubenswrapper[4971]: I1127 08:11:26.547301 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5sbr\" (UniqueName: \"kubernetes.io/projected/076e71da-5327-466e-a54c-c2d21f02a073-kube-api-access-r5sbr\") pod \"redhat-operators-9vxcg\" (UID: \"076e71da-5327-466e-a54c-c2d21f02a073\") " pod="openshift-marketplace/redhat-operators-9vxcg" Nov 27 08:11:26 crc kubenswrapper[4971]: I1127 08:11:26.547819 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/076e71da-5327-466e-a54c-c2d21f02a073-utilities\") pod \"redhat-operators-9vxcg\" (UID: \"076e71da-5327-466e-a54c-c2d21f02a073\") " pod="openshift-marketplace/redhat-operators-9vxcg" Nov 27 08:11:26 crc kubenswrapper[4971]: I1127 08:11:26.547917 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/076e71da-5327-466e-a54c-c2d21f02a073-catalog-content\") pod \"redhat-operators-9vxcg\" (UID: \"076e71da-5327-466e-a54c-c2d21f02a073\") " pod="openshift-marketplace/redhat-operators-9vxcg" Nov 27 08:11:26 crc kubenswrapper[4971]: I1127 08:11:26.650990 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/076e71da-5327-466e-a54c-c2d21f02a073-catalog-content\") pod \"redhat-operators-9vxcg\" (UID: \"076e71da-5327-466e-a54c-c2d21f02a073\") " pod="openshift-marketplace/redhat-operators-9vxcg" Nov 27 08:11:26 crc kubenswrapper[4971]: I1127 08:11:26.651135 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5sbr\" (UniqueName: \"kubernetes.io/projected/076e71da-5327-466e-a54c-c2d21f02a073-kube-api-access-r5sbr\") pod \"redhat-operators-9vxcg\" (UID: \"076e71da-5327-466e-a54c-c2d21f02a073\") " pod="openshift-marketplace/redhat-operators-9vxcg" Nov 27 08:11:26 crc kubenswrapper[4971]: I1127 08:11:26.651692 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/076e71da-5327-466e-a54c-c2d21f02a073-catalog-content\") pod \"redhat-operators-9vxcg\" (UID: \"076e71da-5327-466e-a54c-c2d21f02a073\") " pod="openshift-marketplace/redhat-operators-9vxcg" Nov 27 08:11:26 crc kubenswrapper[4971]: I1127 08:11:26.652180 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/076e71da-5327-466e-a54c-c2d21f02a073-utilities\") pod \"redhat-operators-9vxcg\" (UID: \"076e71da-5327-466e-a54c-c2d21f02a073\") " pod="openshift-marketplace/redhat-operators-9vxcg" Nov 27 08:11:26 crc kubenswrapper[4971]: I1127 08:11:26.652813 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/076e71da-5327-466e-a54c-c2d21f02a073-utilities\") pod \"redhat-operators-9vxcg\" (UID: \"076e71da-5327-466e-a54c-c2d21f02a073\") " pod="openshift-marketplace/redhat-operators-9vxcg" Nov 27 08:11:26 crc kubenswrapper[4971]: I1127 08:11:26.746494 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5sbr\" (UniqueName: \"kubernetes.io/projected/076e71da-5327-466e-a54c-c2d21f02a073-kube-api-access-r5sbr\") pod \"redhat-operators-9vxcg\" (UID: \"076e71da-5327-466e-a54c-c2d21f02a073\") " pod="openshift-marketplace/redhat-operators-9vxcg" Nov 27 08:11:26 crc kubenswrapper[4971]: I1127 08:11:26.826291 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9vxcg" Nov 27 08:11:27 crc kubenswrapper[4971]: I1127 08:11:27.270951 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9vxcg"] Nov 27 08:11:27 crc kubenswrapper[4971]: I1127 08:11:27.331069 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9vxcg" event={"ID":"076e71da-5327-466e-a54c-c2d21f02a073","Type":"ContainerStarted","Data":"048cf57200db3944558c84bb81e83680a6e0dae7b34234d746efcdedfe1f21f1"} Nov 27 08:11:28 crc kubenswrapper[4971]: I1127 08:11:28.340136 4971 generic.go:334] "Generic (PLEG): container finished" podID="076e71da-5327-466e-a54c-c2d21f02a073" containerID="7a9a3aa5d16da87de5a467fa7c94d0a2ae33b3c1eaffa9f97b8109ab541f6180" exitCode=0 Nov 27 08:11:28 crc kubenswrapper[4971]: I1127 08:11:28.340244 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9vxcg" event={"ID":"076e71da-5327-466e-a54c-c2d21f02a073","Type":"ContainerDied","Data":"7a9a3aa5d16da87de5a467fa7c94d0a2ae33b3c1eaffa9f97b8109ab541f6180"} Nov 27 08:11:28 crc kubenswrapper[4971]: I1127 08:11:28.342091 4971 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 27 08:11:30 crc kubenswrapper[4971]: I1127 08:11:30.358204 4971 generic.go:334] "Generic (PLEG): container finished" podID="076e71da-5327-466e-a54c-c2d21f02a073" containerID="9a3276c54c5914a4dbd05623c607f41f803f3d0c41f299ef7294a76d1e4b8c40" exitCode=0 Nov 27 08:11:30 crc kubenswrapper[4971]: I1127 08:11:30.358265 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9vxcg" event={"ID":"076e71da-5327-466e-a54c-c2d21f02a073","Type":"ContainerDied","Data":"9a3276c54c5914a4dbd05623c607f41f803f3d0c41f299ef7294a76d1e4b8c40"} Nov 27 08:11:31 crc kubenswrapper[4971]: I1127 08:11:31.369845 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9vxcg" event={"ID":"076e71da-5327-466e-a54c-c2d21f02a073","Type":"ContainerStarted","Data":"7bf81ca780001518506b721c128d05711b9c9cbeea16a2df74cecbc192856e4c"} Nov 27 08:11:36 crc kubenswrapper[4971]: I1127 08:11:36.826955 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-9vxcg" Nov 27 08:11:36 crc kubenswrapper[4971]: I1127 08:11:36.827568 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-9vxcg" Nov 27 08:11:36 crc kubenswrapper[4971]: I1127 08:11:36.870391 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-9vxcg" Nov 27 08:11:36 crc kubenswrapper[4971]: I1127 08:11:36.902018 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-9vxcg" podStartSLOduration=8.387066989 podStartE2EDuration="10.901997632s" podCreationTimestamp="2025-11-27 08:11:26 +0000 UTC" firstStartedPulling="2025-11-27 08:11:28.341870252 +0000 UTC m=+4726.533914170" lastFinishedPulling="2025-11-27 08:11:30.856800855 +0000 UTC m=+4729.048844813" observedRunningTime="2025-11-27 08:11:31.392587668 +0000 UTC m=+4729.584631586" watchObservedRunningTime="2025-11-27 08:11:36.901997632 +0000 UTC m=+4735.094041550" Nov 27 08:11:37 crc kubenswrapper[4971]: I1127 08:11:37.470030 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-9vxcg" Nov 27 08:11:39 crc kubenswrapper[4971]: I1127 08:11:39.653573 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9vxcg"] Nov 27 08:11:39 crc kubenswrapper[4971]: I1127 08:11:39.654349 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-9vxcg" podUID="076e71da-5327-466e-a54c-c2d21f02a073" containerName="registry-server" containerID="cri-o://7bf81ca780001518506b721c128d05711b9c9cbeea16a2df74cecbc192856e4c" gracePeriod=2 Nov 27 08:11:41 crc kubenswrapper[4971]: I1127 08:11:41.438046 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9vxcg" Nov 27 08:11:41 crc kubenswrapper[4971]: I1127 08:11:41.470805 4971 generic.go:334] "Generic (PLEG): container finished" podID="076e71da-5327-466e-a54c-c2d21f02a073" containerID="7bf81ca780001518506b721c128d05711b9c9cbeea16a2df74cecbc192856e4c" exitCode=0 Nov 27 08:11:41 crc kubenswrapper[4971]: I1127 08:11:41.470859 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9vxcg" event={"ID":"076e71da-5327-466e-a54c-c2d21f02a073","Type":"ContainerDied","Data":"7bf81ca780001518506b721c128d05711b9c9cbeea16a2df74cecbc192856e4c"} Nov 27 08:11:41 crc kubenswrapper[4971]: I1127 08:11:41.470897 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9vxcg" event={"ID":"076e71da-5327-466e-a54c-c2d21f02a073","Type":"ContainerDied","Data":"048cf57200db3944558c84bb81e83680a6e0dae7b34234d746efcdedfe1f21f1"} Nov 27 08:11:41 crc kubenswrapper[4971]: I1127 08:11:41.470920 4971 scope.go:117] "RemoveContainer" containerID="7bf81ca780001518506b721c128d05711b9c9cbeea16a2df74cecbc192856e4c" Nov 27 08:11:41 crc kubenswrapper[4971]: I1127 08:11:41.470927 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9vxcg" Nov 27 08:11:41 crc kubenswrapper[4971]: I1127 08:11:41.493145 4971 scope.go:117] "RemoveContainer" containerID="9a3276c54c5914a4dbd05623c607f41f803f3d0c41f299ef7294a76d1e4b8c40" Nov 27 08:11:41 crc kubenswrapper[4971]: I1127 08:11:41.502654 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r5sbr\" (UniqueName: \"kubernetes.io/projected/076e71da-5327-466e-a54c-c2d21f02a073-kube-api-access-r5sbr\") pod \"076e71da-5327-466e-a54c-c2d21f02a073\" (UID: \"076e71da-5327-466e-a54c-c2d21f02a073\") " Nov 27 08:11:41 crc kubenswrapper[4971]: I1127 08:11:41.502722 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/076e71da-5327-466e-a54c-c2d21f02a073-utilities\") pod \"076e71da-5327-466e-a54c-c2d21f02a073\" (UID: \"076e71da-5327-466e-a54c-c2d21f02a073\") " Nov 27 08:11:41 crc kubenswrapper[4971]: I1127 08:11:41.502789 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/076e71da-5327-466e-a54c-c2d21f02a073-catalog-content\") pod \"076e71da-5327-466e-a54c-c2d21f02a073\" (UID: \"076e71da-5327-466e-a54c-c2d21f02a073\") " Nov 27 08:11:41 crc kubenswrapper[4971]: I1127 08:11:41.503808 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/076e71da-5327-466e-a54c-c2d21f02a073-utilities" (OuterVolumeSpecName: "utilities") pod "076e71da-5327-466e-a54c-c2d21f02a073" (UID: "076e71da-5327-466e-a54c-c2d21f02a073"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:11:41 crc kubenswrapper[4971]: I1127 08:11:41.509869 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/076e71da-5327-466e-a54c-c2d21f02a073-kube-api-access-r5sbr" (OuterVolumeSpecName: "kube-api-access-r5sbr") pod "076e71da-5327-466e-a54c-c2d21f02a073" (UID: "076e71da-5327-466e-a54c-c2d21f02a073"). InnerVolumeSpecName "kube-api-access-r5sbr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:11:41 crc kubenswrapper[4971]: I1127 08:11:41.513874 4971 scope.go:117] "RemoveContainer" containerID="7a9a3aa5d16da87de5a467fa7c94d0a2ae33b3c1eaffa9f97b8109ab541f6180" Nov 27 08:11:41 crc kubenswrapper[4971]: I1127 08:11:41.582415 4971 scope.go:117] "RemoveContainer" containerID="7bf81ca780001518506b721c128d05711b9c9cbeea16a2df74cecbc192856e4c" Nov 27 08:11:41 crc kubenswrapper[4971]: E1127 08:11:41.583145 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7bf81ca780001518506b721c128d05711b9c9cbeea16a2df74cecbc192856e4c\": container with ID starting with 7bf81ca780001518506b721c128d05711b9c9cbeea16a2df74cecbc192856e4c not found: ID does not exist" containerID="7bf81ca780001518506b721c128d05711b9c9cbeea16a2df74cecbc192856e4c" Nov 27 08:11:41 crc kubenswrapper[4971]: I1127 08:11:41.583193 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7bf81ca780001518506b721c128d05711b9c9cbeea16a2df74cecbc192856e4c"} err="failed to get container status \"7bf81ca780001518506b721c128d05711b9c9cbeea16a2df74cecbc192856e4c\": rpc error: code = NotFound desc = could not find container \"7bf81ca780001518506b721c128d05711b9c9cbeea16a2df74cecbc192856e4c\": container with ID starting with 7bf81ca780001518506b721c128d05711b9c9cbeea16a2df74cecbc192856e4c not found: ID does not exist" Nov 27 08:11:41 crc kubenswrapper[4971]: I1127 08:11:41.583229 4971 scope.go:117] "RemoveContainer" containerID="9a3276c54c5914a4dbd05623c607f41f803f3d0c41f299ef7294a76d1e4b8c40" Nov 27 08:11:41 crc kubenswrapper[4971]: E1127 08:11:41.583600 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a3276c54c5914a4dbd05623c607f41f803f3d0c41f299ef7294a76d1e4b8c40\": container with ID starting with 9a3276c54c5914a4dbd05623c607f41f803f3d0c41f299ef7294a76d1e4b8c40 not found: ID does not exist" containerID="9a3276c54c5914a4dbd05623c607f41f803f3d0c41f299ef7294a76d1e4b8c40" Nov 27 08:11:41 crc kubenswrapper[4971]: I1127 08:11:41.583647 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a3276c54c5914a4dbd05623c607f41f803f3d0c41f299ef7294a76d1e4b8c40"} err="failed to get container status \"9a3276c54c5914a4dbd05623c607f41f803f3d0c41f299ef7294a76d1e4b8c40\": rpc error: code = NotFound desc = could not find container \"9a3276c54c5914a4dbd05623c607f41f803f3d0c41f299ef7294a76d1e4b8c40\": container with ID starting with 9a3276c54c5914a4dbd05623c607f41f803f3d0c41f299ef7294a76d1e4b8c40 not found: ID does not exist" Nov 27 08:11:41 crc kubenswrapper[4971]: I1127 08:11:41.583677 4971 scope.go:117] "RemoveContainer" containerID="7a9a3aa5d16da87de5a467fa7c94d0a2ae33b3c1eaffa9f97b8109ab541f6180" Nov 27 08:11:41 crc kubenswrapper[4971]: E1127 08:11:41.583973 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a9a3aa5d16da87de5a467fa7c94d0a2ae33b3c1eaffa9f97b8109ab541f6180\": container with ID starting with 7a9a3aa5d16da87de5a467fa7c94d0a2ae33b3c1eaffa9f97b8109ab541f6180 not found: ID does not exist" containerID="7a9a3aa5d16da87de5a467fa7c94d0a2ae33b3c1eaffa9f97b8109ab541f6180" Nov 27 08:11:41 crc kubenswrapper[4971]: I1127 08:11:41.584000 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a9a3aa5d16da87de5a467fa7c94d0a2ae33b3c1eaffa9f97b8109ab541f6180"} err="failed to get container status \"7a9a3aa5d16da87de5a467fa7c94d0a2ae33b3c1eaffa9f97b8109ab541f6180\": rpc error: code = NotFound desc = could not find container \"7a9a3aa5d16da87de5a467fa7c94d0a2ae33b3c1eaffa9f97b8109ab541f6180\": container with ID starting with 7a9a3aa5d16da87de5a467fa7c94d0a2ae33b3c1eaffa9f97b8109ab541f6180 not found: ID does not exist" Nov 27 08:11:41 crc kubenswrapper[4971]: I1127 08:11:41.594523 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/076e71da-5327-466e-a54c-c2d21f02a073-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "076e71da-5327-466e-a54c-c2d21f02a073" (UID: "076e71da-5327-466e-a54c-c2d21f02a073"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:11:41 crc kubenswrapper[4971]: I1127 08:11:41.604676 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/076e71da-5327-466e-a54c-c2d21f02a073-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 08:11:41 crc kubenswrapper[4971]: I1127 08:11:41.604742 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/076e71da-5327-466e-a54c-c2d21f02a073-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 08:11:41 crc kubenswrapper[4971]: I1127 08:11:41.604762 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r5sbr\" (UniqueName: \"kubernetes.io/projected/076e71da-5327-466e-a54c-c2d21f02a073-kube-api-access-r5sbr\") on node \"crc\" DevicePath \"\"" Nov 27 08:11:41 crc kubenswrapper[4971]: I1127 08:11:41.806965 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9vxcg"] Nov 27 08:11:41 crc kubenswrapper[4971]: I1127 08:11:41.813399 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-9vxcg"] Nov 27 08:11:42 crc kubenswrapper[4971]: I1127 08:11:42.567740 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="076e71da-5327-466e-a54c-c2d21f02a073" path="/var/lib/kubelet/pods/076e71da-5327-466e-a54c-c2d21f02a073/volumes" Nov 27 08:11:56 crc kubenswrapper[4971]: I1127 08:11:56.413752 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 08:11:56 crc kubenswrapper[4971]: I1127 08:11:56.414496 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 08:12:26 crc kubenswrapper[4971]: I1127 08:12:26.413296 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 08:12:26 crc kubenswrapper[4971]: I1127 08:12:26.414410 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 08:12:39 crc kubenswrapper[4971]: I1127 08:12:39.984227 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hbm9v"] Nov 27 08:12:39 crc kubenswrapper[4971]: E1127 08:12:39.985324 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="076e71da-5327-466e-a54c-c2d21f02a073" containerName="extract-content" Nov 27 08:12:39 crc kubenswrapper[4971]: I1127 08:12:39.985341 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="076e71da-5327-466e-a54c-c2d21f02a073" containerName="extract-content" Nov 27 08:12:39 crc kubenswrapper[4971]: E1127 08:12:39.985360 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="076e71da-5327-466e-a54c-c2d21f02a073" containerName="extract-utilities" Nov 27 08:12:39 crc kubenswrapper[4971]: I1127 08:12:39.985369 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="076e71da-5327-466e-a54c-c2d21f02a073" containerName="extract-utilities" Nov 27 08:12:39 crc kubenswrapper[4971]: E1127 08:12:39.985390 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="076e71da-5327-466e-a54c-c2d21f02a073" containerName="registry-server" Nov 27 08:12:39 crc kubenswrapper[4971]: I1127 08:12:39.985400 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="076e71da-5327-466e-a54c-c2d21f02a073" containerName="registry-server" Nov 27 08:12:39 crc kubenswrapper[4971]: I1127 08:12:39.985655 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="076e71da-5327-466e-a54c-c2d21f02a073" containerName="registry-server" Nov 27 08:12:39 crc kubenswrapper[4971]: I1127 08:12:39.987048 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hbm9v" Nov 27 08:12:40 crc kubenswrapper[4971]: I1127 08:12:40.007353 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hbm9v"] Nov 27 08:12:40 crc kubenswrapper[4971]: I1127 08:12:40.189193 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0183a29-262b-486d-8ab4-8e178db33754-utilities\") pod \"certified-operators-hbm9v\" (UID: \"c0183a29-262b-486d-8ab4-8e178db33754\") " pod="openshift-marketplace/certified-operators-hbm9v" Nov 27 08:12:40 crc kubenswrapper[4971]: I1127 08:12:40.189323 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0183a29-262b-486d-8ab4-8e178db33754-catalog-content\") pod \"certified-operators-hbm9v\" (UID: \"c0183a29-262b-486d-8ab4-8e178db33754\") " pod="openshift-marketplace/certified-operators-hbm9v" Nov 27 08:12:40 crc kubenswrapper[4971]: I1127 08:12:40.189378 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sx9w2\" (UniqueName: \"kubernetes.io/projected/c0183a29-262b-486d-8ab4-8e178db33754-kube-api-access-sx9w2\") pod \"certified-operators-hbm9v\" (UID: \"c0183a29-262b-486d-8ab4-8e178db33754\") " pod="openshift-marketplace/certified-operators-hbm9v" Nov 27 08:12:40 crc kubenswrapper[4971]: I1127 08:12:40.290959 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0183a29-262b-486d-8ab4-8e178db33754-utilities\") pod \"certified-operators-hbm9v\" (UID: \"c0183a29-262b-486d-8ab4-8e178db33754\") " pod="openshift-marketplace/certified-operators-hbm9v" Nov 27 08:12:40 crc kubenswrapper[4971]: I1127 08:12:40.291023 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0183a29-262b-486d-8ab4-8e178db33754-catalog-content\") pod \"certified-operators-hbm9v\" (UID: \"c0183a29-262b-486d-8ab4-8e178db33754\") " pod="openshift-marketplace/certified-operators-hbm9v" Nov 27 08:12:40 crc kubenswrapper[4971]: I1127 08:12:40.291063 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sx9w2\" (UniqueName: \"kubernetes.io/projected/c0183a29-262b-486d-8ab4-8e178db33754-kube-api-access-sx9w2\") pod \"certified-operators-hbm9v\" (UID: \"c0183a29-262b-486d-8ab4-8e178db33754\") " pod="openshift-marketplace/certified-operators-hbm9v" Nov 27 08:12:40 crc kubenswrapper[4971]: I1127 08:12:40.291637 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0183a29-262b-486d-8ab4-8e178db33754-utilities\") pod \"certified-operators-hbm9v\" (UID: \"c0183a29-262b-486d-8ab4-8e178db33754\") " pod="openshift-marketplace/certified-operators-hbm9v" Nov 27 08:12:40 crc kubenswrapper[4971]: I1127 08:12:40.291726 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0183a29-262b-486d-8ab4-8e178db33754-catalog-content\") pod \"certified-operators-hbm9v\" (UID: \"c0183a29-262b-486d-8ab4-8e178db33754\") " pod="openshift-marketplace/certified-operators-hbm9v" Nov 27 08:12:40 crc kubenswrapper[4971]: I1127 08:12:40.314895 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sx9w2\" (UniqueName: \"kubernetes.io/projected/c0183a29-262b-486d-8ab4-8e178db33754-kube-api-access-sx9w2\") pod \"certified-operators-hbm9v\" (UID: \"c0183a29-262b-486d-8ab4-8e178db33754\") " pod="openshift-marketplace/certified-operators-hbm9v" Nov 27 08:12:40 crc kubenswrapper[4971]: I1127 08:12:40.614399 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hbm9v" Nov 27 08:12:41 crc kubenswrapper[4971]: I1127 08:12:41.066345 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hbm9v"] Nov 27 08:12:41 crc kubenswrapper[4971]: W1127 08:12:41.071345 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc0183a29_262b_486d_8ab4_8e178db33754.slice/crio-855cc75b4b39267a61f460e4ac57ef2da1b87efb5c86e9bb2944facc1d9c4bcc WatchSource:0}: Error finding container 855cc75b4b39267a61f460e4ac57ef2da1b87efb5c86e9bb2944facc1d9c4bcc: Status 404 returned error can't find the container with id 855cc75b4b39267a61f460e4ac57ef2da1b87efb5c86e9bb2944facc1d9c4bcc Nov 27 08:12:42 crc kubenswrapper[4971]: I1127 08:12:42.062700 4971 generic.go:334] "Generic (PLEG): container finished" podID="c0183a29-262b-486d-8ab4-8e178db33754" containerID="ad0b3b47eaefe56ac4b4286f71e5ebdd3a0a2403b9220f806acd4c6f2ce3d029" exitCode=0 Nov 27 08:12:42 crc kubenswrapper[4971]: I1127 08:12:42.062821 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hbm9v" event={"ID":"c0183a29-262b-486d-8ab4-8e178db33754","Type":"ContainerDied","Data":"ad0b3b47eaefe56ac4b4286f71e5ebdd3a0a2403b9220f806acd4c6f2ce3d029"} Nov 27 08:12:42 crc kubenswrapper[4971]: I1127 08:12:42.063043 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hbm9v" event={"ID":"c0183a29-262b-486d-8ab4-8e178db33754","Type":"ContainerStarted","Data":"855cc75b4b39267a61f460e4ac57ef2da1b87efb5c86e9bb2944facc1d9c4bcc"} Nov 27 08:12:44 crc kubenswrapper[4971]: I1127 08:12:44.079837 4971 generic.go:334] "Generic (PLEG): container finished" podID="c0183a29-262b-486d-8ab4-8e178db33754" containerID="90ae2cb3d9c6f8ba2c7cdc35bb6b52e00665bec610fad2c65fb26ad3513974bd" exitCode=0 Nov 27 08:12:44 crc kubenswrapper[4971]: I1127 08:12:44.080002 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hbm9v" event={"ID":"c0183a29-262b-486d-8ab4-8e178db33754","Type":"ContainerDied","Data":"90ae2cb3d9c6f8ba2c7cdc35bb6b52e00665bec610fad2c65fb26ad3513974bd"} Nov 27 08:12:45 crc kubenswrapper[4971]: I1127 08:12:45.091031 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hbm9v" event={"ID":"c0183a29-262b-486d-8ab4-8e178db33754","Type":"ContainerStarted","Data":"bb6b4bb44edf7add102c54582c232fe6da8d70d5ee7e8cefef927babd80f27ac"} Nov 27 08:12:45 crc kubenswrapper[4971]: I1127 08:12:45.117132 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hbm9v" podStartSLOduration=3.50026614 podStartE2EDuration="6.117110199s" podCreationTimestamp="2025-11-27 08:12:39 +0000 UTC" firstStartedPulling="2025-11-27 08:12:42.065189479 +0000 UTC m=+4800.257233407" lastFinishedPulling="2025-11-27 08:12:44.682033548 +0000 UTC m=+4802.874077466" observedRunningTime="2025-11-27 08:12:45.112765564 +0000 UTC m=+4803.304809522" watchObservedRunningTime="2025-11-27 08:12:45.117110199 +0000 UTC m=+4803.309154117" Nov 27 08:12:50 crc kubenswrapper[4971]: I1127 08:12:50.615122 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hbm9v" Nov 27 08:12:50 crc kubenswrapper[4971]: I1127 08:12:50.615806 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hbm9v" Nov 27 08:12:50 crc kubenswrapper[4971]: I1127 08:12:50.677754 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hbm9v" Nov 27 08:12:51 crc kubenswrapper[4971]: I1127 08:12:51.194319 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hbm9v" Nov 27 08:12:51 crc kubenswrapper[4971]: I1127 08:12:51.268004 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hbm9v"] Nov 27 08:12:53 crc kubenswrapper[4971]: I1127 08:12:53.164355 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hbm9v" podUID="c0183a29-262b-486d-8ab4-8e178db33754" containerName="registry-server" containerID="cri-o://bb6b4bb44edf7add102c54582c232fe6da8d70d5ee7e8cefef927babd80f27ac" gracePeriod=2 Nov 27 08:12:53 crc kubenswrapper[4971]: I1127 08:12:53.551646 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hbm9v" Nov 27 08:12:53 crc kubenswrapper[4971]: I1127 08:12:53.717194 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sx9w2\" (UniqueName: \"kubernetes.io/projected/c0183a29-262b-486d-8ab4-8e178db33754-kube-api-access-sx9w2\") pod \"c0183a29-262b-486d-8ab4-8e178db33754\" (UID: \"c0183a29-262b-486d-8ab4-8e178db33754\") " Nov 27 08:12:53 crc kubenswrapper[4971]: I1127 08:12:53.717271 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0183a29-262b-486d-8ab4-8e178db33754-utilities\") pod \"c0183a29-262b-486d-8ab4-8e178db33754\" (UID: \"c0183a29-262b-486d-8ab4-8e178db33754\") " Nov 27 08:12:53 crc kubenswrapper[4971]: I1127 08:12:53.717387 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0183a29-262b-486d-8ab4-8e178db33754-catalog-content\") pod \"c0183a29-262b-486d-8ab4-8e178db33754\" (UID: \"c0183a29-262b-486d-8ab4-8e178db33754\") " Nov 27 08:12:53 crc kubenswrapper[4971]: I1127 08:12:53.718432 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0183a29-262b-486d-8ab4-8e178db33754-utilities" (OuterVolumeSpecName: "utilities") pod "c0183a29-262b-486d-8ab4-8e178db33754" (UID: "c0183a29-262b-486d-8ab4-8e178db33754"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:12:53 crc kubenswrapper[4971]: I1127 08:12:53.731343 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0183a29-262b-486d-8ab4-8e178db33754-kube-api-access-sx9w2" (OuterVolumeSpecName: "kube-api-access-sx9w2") pod "c0183a29-262b-486d-8ab4-8e178db33754" (UID: "c0183a29-262b-486d-8ab4-8e178db33754"). InnerVolumeSpecName "kube-api-access-sx9w2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:12:53 crc kubenswrapper[4971]: I1127 08:12:53.771373 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0183a29-262b-486d-8ab4-8e178db33754-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c0183a29-262b-486d-8ab4-8e178db33754" (UID: "c0183a29-262b-486d-8ab4-8e178db33754"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:12:53 crc kubenswrapper[4971]: I1127 08:12:53.819772 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0183a29-262b-486d-8ab4-8e178db33754-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 08:12:53 crc kubenswrapper[4971]: I1127 08:12:53.819825 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0183a29-262b-486d-8ab4-8e178db33754-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 08:12:53 crc kubenswrapper[4971]: I1127 08:12:53.819847 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sx9w2\" (UniqueName: \"kubernetes.io/projected/c0183a29-262b-486d-8ab4-8e178db33754-kube-api-access-sx9w2\") on node \"crc\" DevicePath \"\"" Nov 27 08:12:54 crc kubenswrapper[4971]: I1127 08:12:54.178147 4971 generic.go:334] "Generic (PLEG): container finished" podID="c0183a29-262b-486d-8ab4-8e178db33754" containerID="bb6b4bb44edf7add102c54582c232fe6da8d70d5ee7e8cefef927babd80f27ac" exitCode=0 Nov 27 08:12:54 crc kubenswrapper[4971]: I1127 08:12:54.178220 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hbm9v" Nov 27 08:12:54 crc kubenswrapper[4971]: I1127 08:12:54.178219 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hbm9v" event={"ID":"c0183a29-262b-486d-8ab4-8e178db33754","Type":"ContainerDied","Data":"bb6b4bb44edf7add102c54582c232fe6da8d70d5ee7e8cefef927babd80f27ac"} Nov 27 08:12:54 crc kubenswrapper[4971]: I1127 08:12:54.178686 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hbm9v" event={"ID":"c0183a29-262b-486d-8ab4-8e178db33754","Type":"ContainerDied","Data":"855cc75b4b39267a61f460e4ac57ef2da1b87efb5c86e9bb2944facc1d9c4bcc"} Nov 27 08:12:54 crc kubenswrapper[4971]: I1127 08:12:54.178717 4971 scope.go:117] "RemoveContainer" containerID="bb6b4bb44edf7add102c54582c232fe6da8d70d5ee7e8cefef927babd80f27ac" Nov 27 08:12:54 crc kubenswrapper[4971]: I1127 08:12:54.196685 4971 scope.go:117] "RemoveContainer" containerID="90ae2cb3d9c6f8ba2c7cdc35bb6b52e00665bec610fad2c65fb26ad3513974bd" Nov 27 08:12:54 crc kubenswrapper[4971]: I1127 08:12:54.213112 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hbm9v"] Nov 27 08:12:54 crc kubenswrapper[4971]: I1127 08:12:54.219260 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hbm9v"] Nov 27 08:12:54 crc kubenswrapper[4971]: I1127 08:12:54.237174 4971 scope.go:117] "RemoveContainer" containerID="ad0b3b47eaefe56ac4b4286f71e5ebdd3a0a2403b9220f806acd4c6f2ce3d029" Nov 27 08:12:54 crc kubenswrapper[4971]: I1127 08:12:54.252940 4971 scope.go:117] "RemoveContainer" containerID="bb6b4bb44edf7add102c54582c232fe6da8d70d5ee7e8cefef927babd80f27ac" Nov 27 08:12:54 crc kubenswrapper[4971]: E1127 08:12:54.253448 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb6b4bb44edf7add102c54582c232fe6da8d70d5ee7e8cefef927babd80f27ac\": container with ID starting with bb6b4bb44edf7add102c54582c232fe6da8d70d5ee7e8cefef927babd80f27ac not found: ID does not exist" containerID="bb6b4bb44edf7add102c54582c232fe6da8d70d5ee7e8cefef927babd80f27ac" Nov 27 08:12:54 crc kubenswrapper[4971]: I1127 08:12:54.253507 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb6b4bb44edf7add102c54582c232fe6da8d70d5ee7e8cefef927babd80f27ac"} err="failed to get container status \"bb6b4bb44edf7add102c54582c232fe6da8d70d5ee7e8cefef927babd80f27ac\": rpc error: code = NotFound desc = could not find container \"bb6b4bb44edf7add102c54582c232fe6da8d70d5ee7e8cefef927babd80f27ac\": container with ID starting with bb6b4bb44edf7add102c54582c232fe6da8d70d5ee7e8cefef927babd80f27ac not found: ID does not exist" Nov 27 08:12:54 crc kubenswrapper[4971]: I1127 08:12:54.253565 4971 scope.go:117] "RemoveContainer" containerID="90ae2cb3d9c6f8ba2c7cdc35bb6b52e00665bec610fad2c65fb26ad3513974bd" Nov 27 08:12:54 crc kubenswrapper[4971]: E1127 08:12:54.254073 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90ae2cb3d9c6f8ba2c7cdc35bb6b52e00665bec610fad2c65fb26ad3513974bd\": container with ID starting with 90ae2cb3d9c6f8ba2c7cdc35bb6b52e00665bec610fad2c65fb26ad3513974bd not found: ID does not exist" containerID="90ae2cb3d9c6f8ba2c7cdc35bb6b52e00665bec610fad2c65fb26ad3513974bd" Nov 27 08:12:54 crc kubenswrapper[4971]: I1127 08:12:54.254107 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90ae2cb3d9c6f8ba2c7cdc35bb6b52e00665bec610fad2c65fb26ad3513974bd"} err="failed to get container status \"90ae2cb3d9c6f8ba2c7cdc35bb6b52e00665bec610fad2c65fb26ad3513974bd\": rpc error: code = NotFound desc = could not find container \"90ae2cb3d9c6f8ba2c7cdc35bb6b52e00665bec610fad2c65fb26ad3513974bd\": container with ID starting with 90ae2cb3d9c6f8ba2c7cdc35bb6b52e00665bec610fad2c65fb26ad3513974bd not found: ID does not exist" Nov 27 08:12:54 crc kubenswrapper[4971]: I1127 08:12:54.254128 4971 scope.go:117] "RemoveContainer" containerID="ad0b3b47eaefe56ac4b4286f71e5ebdd3a0a2403b9220f806acd4c6f2ce3d029" Nov 27 08:12:54 crc kubenswrapper[4971]: E1127 08:12:54.254398 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad0b3b47eaefe56ac4b4286f71e5ebdd3a0a2403b9220f806acd4c6f2ce3d029\": container with ID starting with ad0b3b47eaefe56ac4b4286f71e5ebdd3a0a2403b9220f806acd4c6f2ce3d029 not found: ID does not exist" containerID="ad0b3b47eaefe56ac4b4286f71e5ebdd3a0a2403b9220f806acd4c6f2ce3d029" Nov 27 08:12:54 crc kubenswrapper[4971]: I1127 08:12:54.254429 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad0b3b47eaefe56ac4b4286f71e5ebdd3a0a2403b9220f806acd4c6f2ce3d029"} err="failed to get container status \"ad0b3b47eaefe56ac4b4286f71e5ebdd3a0a2403b9220f806acd4c6f2ce3d029\": rpc error: code = NotFound desc = could not find container \"ad0b3b47eaefe56ac4b4286f71e5ebdd3a0a2403b9220f806acd4c6f2ce3d029\": container with ID starting with ad0b3b47eaefe56ac4b4286f71e5ebdd3a0a2403b9220f806acd4c6f2ce3d029 not found: ID does not exist" Nov 27 08:12:54 crc kubenswrapper[4971]: I1127 08:12:54.559251 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0183a29-262b-486d-8ab4-8e178db33754" path="/var/lib/kubelet/pods/c0183a29-262b-486d-8ab4-8e178db33754/volumes" Nov 27 08:12:56 crc kubenswrapper[4971]: I1127 08:12:56.413949 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 08:12:56 crc kubenswrapper[4971]: I1127 08:12:56.414620 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 08:12:56 crc kubenswrapper[4971]: I1127 08:12:56.414694 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 08:12:56 crc kubenswrapper[4971]: I1127 08:12:56.415666 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"26fa812a55b4f3841abae52bb3428271d7ad0f2d0eba615b6bab0abc70b4002f"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 08:12:56 crc kubenswrapper[4971]: I1127 08:12:56.415755 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://26fa812a55b4f3841abae52bb3428271d7ad0f2d0eba615b6bab0abc70b4002f" gracePeriod=600 Nov 27 08:12:57 crc kubenswrapper[4971]: I1127 08:12:57.205304 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="26fa812a55b4f3841abae52bb3428271d7ad0f2d0eba615b6bab0abc70b4002f" exitCode=0 Nov 27 08:12:57 crc kubenswrapper[4971]: I1127 08:12:57.205345 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"26fa812a55b4f3841abae52bb3428271d7ad0f2d0eba615b6bab0abc70b4002f"} Nov 27 08:12:57 crc kubenswrapper[4971]: I1127 08:12:57.205707 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"2c7abcbabaf098a166a56842633d94d346c3fe217cb16d289735e3ab9912a15b"} Nov 27 08:12:57 crc kubenswrapper[4971]: I1127 08:12:57.205734 4971 scope.go:117] "RemoveContainer" containerID="68fab616a248a14b593a899bbe38d7ef12b8321e0ca1a445a290e3c1dbfedc2f" Nov 27 08:14:41 crc kubenswrapper[4971]: I1127 08:14:41.682921 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-dqxjt"] Nov 27 08:14:41 crc kubenswrapper[4971]: E1127 08:14:41.684176 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0183a29-262b-486d-8ab4-8e178db33754" containerName="extract-utilities" Nov 27 08:14:41 crc kubenswrapper[4971]: I1127 08:14:41.684202 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0183a29-262b-486d-8ab4-8e178db33754" containerName="extract-utilities" Nov 27 08:14:41 crc kubenswrapper[4971]: E1127 08:14:41.684230 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0183a29-262b-486d-8ab4-8e178db33754" containerName="registry-server" Nov 27 08:14:41 crc kubenswrapper[4971]: I1127 08:14:41.684242 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0183a29-262b-486d-8ab4-8e178db33754" containerName="registry-server" Nov 27 08:14:41 crc kubenswrapper[4971]: E1127 08:14:41.684267 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0183a29-262b-486d-8ab4-8e178db33754" containerName="extract-content" Nov 27 08:14:41 crc kubenswrapper[4971]: I1127 08:14:41.684278 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0183a29-262b-486d-8ab4-8e178db33754" containerName="extract-content" Nov 27 08:14:41 crc kubenswrapper[4971]: I1127 08:14:41.684560 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0183a29-262b-486d-8ab4-8e178db33754" containerName="registry-server" Nov 27 08:14:41 crc kubenswrapper[4971]: I1127 08:14:41.688254 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dqxjt" Nov 27 08:14:41 crc kubenswrapper[4971]: I1127 08:14:41.712260 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dqxjt"] Nov 27 08:14:41 crc kubenswrapper[4971]: I1127 08:14:41.796777 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1e994151-ca2a-4036-8c17-626d791ab510-utilities\") pod \"redhat-marketplace-dqxjt\" (UID: \"1e994151-ca2a-4036-8c17-626d791ab510\") " pod="openshift-marketplace/redhat-marketplace-dqxjt" Nov 27 08:14:41 crc kubenswrapper[4971]: I1127 08:14:41.797136 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1e994151-ca2a-4036-8c17-626d791ab510-catalog-content\") pod \"redhat-marketplace-dqxjt\" (UID: \"1e994151-ca2a-4036-8c17-626d791ab510\") " pod="openshift-marketplace/redhat-marketplace-dqxjt" Nov 27 08:14:41 crc kubenswrapper[4971]: I1127 08:14:41.797275 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sfgh2\" (UniqueName: \"kubernetes.io/projected/1e994151-ca2a-4036-8c17-626d791ab510-kube-api-access-sfgh2\") pod \"redhat-marketplace-dqxjt\" (UID: \"1e994151-ca2a-4036-8c17-626d791ab510\") " pod="openshift-marketplace/redhat-marketplace-dqxjt" Nov 27 08:14:41 crc kubenswrapper[4971]: I1127 08:14:41.899035 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1e994151-ca2a-4036-8c17-626d791ab510-catalog-content\") pod \"redhat-marketplace-dqxjt\" (UID: \"1e994151-ca2a-4036-8c17-626d791ab510\") " pod="openshift-marketplace/redhat-marketplace-dqxjt" Nov 27 08:14:41 crc kubenswrapper[4971]: I1127 08:14:41.899560 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sfgh2\" (UniqueName: \"kubernetes.io/projected/1e994151-ca2a-4036-8c17-626d791ab510-kube-api-access-sfgh2\") pod \"redhat-marketplace-dqxjt\" (UID: \"1e994151-ca2a-4036-8c17-626d791ab510\") " pod="openshift-marketplace/redhat-marketplace-dqxjt" Nov 27 08:14:41 crc kubenswrapper[4971]: I1127 08:14:41.899729 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1e994151-ca2a-4036-8c17-626d791ab510-utilities\") pod \"redhat-marketplace-dqxjt\" (UID: \"1e994151-ca2a-4036-8c17-626d791ab510\") " pod="openshift-marketplace/redhat-marketplace-dqxjt" Nov 27 08:14:41 crc kubenswrapper[4971]: I1127 08:14:41.899856 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1e994151-ca2a-4036-8c17-626d791ab510-catalog-content\") pod \"redhat-marketplace-dqxjt\" (UID: \"1e994151-ca2a-4036-8c17-626d791ab510\") " pod="openshift-marketplace/redhat-marketplace-dqxjt" Nov 27 08:14:41 crc kubenswrapper[4971]: I1127 08:14:41.900068 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1e994151-ca2a-4036-8c17-626d791ab510-utilities\") pod \"redhat-marketplace-dqxjt\" (UID: \"1e994151-ca2a-4036-8c17-626d791ab510\") " pod="openshift-marketplace/redhat-marketplace-dqxjt" Nov 27 08:14:41 crc kubenswrapper[4971]: I1127 08:14:41.925663 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sfgh2\" (UniqueName: \"kubernetes.io/projected/1e994151-ca2a-4036-8c17-626d791ab510-kube-api-access-sfgh2\") pod \"redhat-marketplace-dqxjt\" (UID: \"1e994151-ca2a-4036-8c17-626d791ab510\") " pod="openshift-marketplace/redhat-marketplace-dqxjt" Nov 27 08:14:42 crc kubenswrapper[4971]: I1127 08:14:42.011145 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dqxjt" Nov 27 08:14:42 crc kubenswrapper[4971]: I1127 08:14:42.295614 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dqxjt"] Nov 27 08:14:43 crc kubenswrapper[4971]: I1127 08:14:43.134440 4971 generic.go:334] "Generic (PLEG): container finished" podID="1e994151-ca2a-4036-8c17-626d791ab510" containerID="996267aa24bab7eac301ecc9c7b67731e2f41c4e9b34b1454e231acd4427c5a4" exitCode=0 Nov 27 08:14:43 crc kubenswrapper[4971]: I1127 08:14:43.134587 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dqxjt" event={"ID":"1e994151-ca2a-4036-8c17-626d791ab510","Type":"ContainerDied","Data":"996267aa24bab7eac301ecc9c7b67731e2f41c4e9b34b1454e231acd4427c5a4"} Nov 27 08:14:43 crc kubenswrapper[4971]: I1127 08:14:43.135024 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dqxjt" event={"ID":"1e994151-ca2a-4036-8c17-626d791ab510","Type":"ContainerStarted","Data":"c5bc5a065d66a14366df201823aee7fa16bbcfc0999d62dbb79a2cdbc10fc44f"} Nov 27 08:14:44 crc kubenswrapper[4971]: I1127 08:14:44.146597 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dqxjt" event={"ID":"1e994151-ca2a-4036-8c17-626d791ab510","Type":"ContainerStarted","Data":"0d35975579be9714a73995ac6331d1cacebf4adeacebe0e5d4640db88b70457b"} Nov 27 08:14:45 crc kubenswrapper[4971]: I1127 08:14:45.157938 4971 generic.go:334] "Generic (PLEG): container finished" podID="1e994151-ca2a-4036-8c17-626d791ab510" containerID="0d35975579be9714a73995ac6331d1cacebf4adeacebe0e5d4640db88b70457b" exitCode=0 Nov 27 08:14:45 crc kubenswrapper[4971]: I1127 08:14:45.157992 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dqxjt" event={"ID":"1e994151-ca2a-4036-8c17-626d791ab510","Type":"ContainerDied","Data":"0d35975579be9714a73995ac6331d1cacebf4adeacebe0e5d4640db88b70457b"} Nov 27 08:14:47 crc kubenswrapper[4971]: I1127 08:14:47.176072 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dqxjt" event={"ID":"1e994151-ca2a-4036-8c17-626d791ab510","Type":"ContainerStarted","Data":"a207b2786f94acdae776b98d4ad5ca1d51422b02245eb4a2e8b416b490a11333"} Nov 27 08:14:47 crc kubenswrapper[4971]: I1127 08:14:47.205401 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-dqxjt" podStartSLOduration=3.097021235 podStartE2EDuration="6.205376805s" podCreationTimestamp="2025-11-27 08:14:41 +0000 UTC" firstStartedPulling="2025-11-27 08:14:43.140619966 +0000 UTC m=+4921.332663924" lastFinishedPulling="2025-11-27 08:14:46.248975566 +0000 UTC m=+4924.441019494" observedRunningTime="2025-11-27 08:14:47.195202723 +0000 UTC m=+4925.387246651" watchObservedRunningTime="2025-11-27 08:14:47.205376805 +0000 UTC m=+4925.397420733" Nov 27 08:14:52 crc kubenswrapper[4971]: I1127 08:14:52.011334 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-dqxjt" Nov 27 08:14:52 crc kubenswrapper[4971]: I1127 08:14:52.011994 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-dqxjt" Nov 27 08:14:52 crc kubenswrapper[4971]: I1127 08:14:52.058873 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-dqxjt" Nov 27 08:14:52 crc kubenswrapper[4971]: I1127 08:14:52.265283 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-dqxjt" Nov 27 08:14:52 crc kubenswrapper[4971]: I1127 08:14:52.335857 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dqxjt"] Nov 27 08:14:54 crc kubenswrapper[4971]: I1127 08:14:54.231210 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-dqxjt" podUID="1e994151-ca2a-4036-8c17-626d791ab510" containerName="registry-server" containerID="cri-o://a207b2786f94acdae776b98d4ad5ca1d51422b02245eb4a2e8b416b490a11333" gracePeriod=2 Nov 27 08:14:54 crc kubenswrapper[4971]: I1127 08:14:54.632615 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dqxjt" Nov 27 08:14:54 crc kubenswrapper[4971]: I1127 08:14:54.737325 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1e994151-ca2a-4036-8c17-626d791ab510-utilities\") pod \"1e994151-ca2a-4036-8c17-626d791ab510\" (UID: \"1e994151-ca2a-4036-8c17-626d791ab510\") " Nov 27 08:14:54 crc kubenswrapper[4971]: I1127 08:14:54.737408 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1e994151-ca2a-4036-8c17-626d791ab510-catalog-content\") pod \"1e994151-ca2a-4036-8c17-626d791ab510\" (UID: \"1e994151-ca2a-4036-8c17-626d791ab510\") " Nov 27 08:14:54 crc kubenswrapper[4971]: I1127 08:14:54.737482 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sfgh2\" (UniqueName: \"kubernetes.io/projected/1e994151-ca2a-4036-8c17-626d791ab510-kube-api-access-sfgh2\") pod \"1e994151-ca2a-4036-8c17-626d791ab510\" (UID: \"1e994151-ca2a-4036-8c17-626d791ab510\") " Nov 27 08:14:54 crc kubenswrapper[4971]: I1127 08:14:54.738277 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1e994151-ca2a-4036-8c17-626d791ab510-utilities" (OuterVolumeSpecName: "utilities") pod "1e994151-ca2a-4036-8c17-626d791ab510" (UID: "1e994151-ca2a-4036-8c17-626d791ab510"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:14:54 crc kubenswrapper[4971]: I1127 08:14:54.742280 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e994151-ca2a-4036-8c17-626d791ab510-kube-api-access-sfgh2" (OuterVolumeSpecName: "kube-api-access-sfgh2") pod "1e994151-ca2a-4036-8c17-626d791ab510" (UID: "1e994151-ca2a-4036-8c17-626d791ab510"). InnerVolumeSpecName "kube-api-access-sfgh2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:14:54 crc kubenswrapper[4971]: I1127 08:14:54.757252 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1e994151-ca2a-4036-8c17-626d791ab510-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1e994151-ca2a-4036-8c17-626d791ab510" (UID: "1e994151-ca2a-4036-8c17-626d791ab510"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:14:54 crc kubenswrapper[4971]: I1127 08:14:54.839521 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1e994151-ca2a-4036-8c17-626d791ab510-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 08:14:54 crc kubenswrapper[4971]: I1127 08:14:54.839577 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1e994151-ca2a-4036-8c17-626d791ab510-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 08:14:54 crc kubenswrapper[4971]: I1127 08:14:54.839592 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sfgh2\" (UniqueName: \"kubernetes.io/projected/1e994151-ca2a-4036-8c17-626d791ab510-kube-api-access-sfgh2\") on node \"crc\" DevicePath \"\"" Nov 27 08:14:55 crc kubenswrapper[4971]: I1127 08:14:55.239795 4971 generic.go:334] "Generic (PLEG): container finished" podID="1e994151-ca2a-4036-8c17-626d791ab510" containerID="a207b2786f94acdae776b98d4ad5ca1d51422b02245eb4a2e8b416b490a11333" exitCode=0 Nov 27 08:14:55 crc kubenswrapper[4971]: I1127 08:14:55.239857 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dqxjt" event={"ID":"1e994151-ca2a-4036-8c17-626d791ab510","Type":"ContainerDied","Data":"a207b2786f94acdae776b98d4ad5ca1d51422b02245eb4a2e8b416b490a11333"} Nov 27 08:14:55 crc kubenswrapper[4971]: I1127 08:14:55.239897 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dqxjt" event={"ID":"1e994151-ca2a-4036-8c17-626d791ab510","Type":"ContainerDied","Data":"c5bc5a065d66a14366df201823aee7fa16bbcfc0999d62dbb79a2cdbc10fc44f"} Nov 27 08:14:55 crc kubenswrapper[4971]: I1127 08:14:55.239925 4971 scope.go:117] "RemoveContainer" containerID="a207b2786f94acdae776b98d4ad5ca1d51422b02245eb4a2e8b416b490a11333" Nov 27 08:14:55 crc kubenswrapper[4971]: I1127 08:14:55.240134 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dqxjt" Nov 27 08:14:55 crc kubenswrapper[4971]: I1127 08:14:55.273610 4971 scope.go:117] "RemoveContainer" containerID="0d35975579be9714a73995ac6331d1cacebf4adeacebe0e5d4640db88b70457b" Nov 27 08:14:55 crc kubenswrapper[4971]: I1127 08:14:55.276360 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dqxjt"] Nov 27 08:14:55 crc kubenswrapper[4971]: I1127 08:14:55.281923 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-dqxjt"] Nov 27 08:14:55 crc kubenswrapper[4971]: I1127 08:14:55.554507 4971 scope.go:117] "RemoveContainer" containerID="996267aa24bab7eac301ecc9c7b67731e2f41c4e9b34b1454e231acd4427c5a4" Nov 27 08:14:55 crc kubenswrapper[4971]: I1127 08:14:55.590573 4971 scope.go:117] "RemoveContainer" containerID="a207b2786f94acdae776b98d4ad5ca1d51422b02245eb4a2e8b416b490a11333" Nov 27 08:14:55 crc kubenswrapper[4971]: E1127 08:14:55.591229 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a207b2786f94acdae776b98d4ad5ca1d51422b02245eb4a2e8b416b490a11333\": container with ID starting with a207b2786f94acdae776b98d4ad5ca1d51422b02245eb4a2e8b416b490a11333 not found: ID does not exist" containerID="a207b2786f94acdae776b98d4ad5ca1d51422b02245eb4a2e8b416b490a11333" Nov 27 08:14:55 crc kubenswrapper[4971]: I1127 08:14:55.591285 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a207b2786f94acdae776b98d4ad5ca1d51422b02245eb4a2e8b416b490a11333"} err="failed to get container status \"a207b2786f94acdae776b98d4ad5ca1d51422b02245eb4a2e8b416b490a11333\": rpc error: code = NotFound desc = could not find container \"a207b2786f94acdae776b98d4ad5ca1d51422b02245eb4a2e8b416b490a11333\": container with ID starting with a207b2786f94acdae776b98d4ad5ca1d51422b02245eb4a2e8b416b490a11333 not found: ID does not exist" Nov 27 08:14:55 crc kubenswrapper[4971]: I1127 08:14:55.591327 4971 scope.go:117] "RemoveContainer" containerID="0d35975579be9714a73995ac6331d1cacebf4adeacebe0e5d4640db88b70457b" Nov 27 08:14:55 crc kubenswrapper[4971]: E1127 08:14:55.591798 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0d35975579be9714a73995ac6331d1cacebf4adeacebe0e5d4640db88b70457b\": container with ID starting with 0d35975579be9714a73995ac6331d1cacebf4adeacebe0e5d4640db88b70457b not found: ID does not exist" containerID="0d35975579be9714a73995ac6331d1cacebf4adeacebe0e5d4640db88b70457b" Nov 27 08:14:55 crc kubenswrapper[4971]: I1127 08:14:55.591840 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d35975579be9714a73995ac6331d1cacebf4adeacebe0e5d4640db88b70457b"} err="failed to get container status \"0d35975579be9714a73995ac6331d1cacebf4adeacebe0e5d4640db88b70457b\": rpc error: code = NotFound desc = could not find container \"0d35975579be9714a73995ac6331d1cacebf4adeacebe0e5d4640db88b70457b\": container with ID starting with 0d35975579be9714a73995ac6331d1cacebf4adeacebe0e5d4640db88b70457b not found: ID does not exist" Nov 27 08:14:55 crc kubenswrapper[4971]: I1127 08:14:55.591868 4971 scope.go:117] "RemoveContainer" containerID="996267aa24bab7eac301ecc9c7b67731e2f41c4e9b34b1454e231acd4427c5a4" Nov 27 08:14:55 crc kubenswrapper[4971]: E1127 08:14:55.592388 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"996267aa24bab7eac301ecc9c7b67731e2f41c4e9b34b1454e231acd4427c5a4\": container with ID starting with 996267aa24bab7eac301ecc9c7b67731e2f41c4e9b34b1454e231acd4427c5a4 not found: ID does not exist" containerID="996267aa24bab7eac301ecc9c7b67731e2f41c4e9b34b1454e231acd4427c5a4" Nov 27 08:14:55 crc kubenswrapper[4971]: I1127 08:14:55.592611 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"996267aa24bab7eac301ecc9c7b67731e2f41c4e9b34b1454e231acd4427c5a4"} err="failed to get container status \"996267aa24bab7eac301ecc9c7b67731e2f41c4e9b34b1454e231acd4427c5a4\": rpc error: code = NotFound desc = could not find container \"996267aa24bab7eac301ecc9c7b67731e2f41c4e9b34b1454e231acd4427c5a4\": container with ID starting with 996267aa24bab7eac301ecc9c7b67731e2f41c4e9b34b1454e231acd4427c5a4 not found: ID does not exist" Nov 27 08:14:56 crc kubenswrapper[4971]: I1127 08:14:56.413402 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 08:14:56 crc kubenswrapper[4971]: I1127 08:14:56.413478 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 08:14:56 crc kubenswrapper[4971]: I1127 08:14:56.561686 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1e994151-ca2a-4036-8c17-626d791ab510" path="/var/lib/kubelet/pods/1e994151-ca2a-4036-8c17-626d791ab510/volumes" Nov 27 08:15:00 crc kubenswrapper[4971]: I1127 08:15:00.167940 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403855-kblnw"] Nov 27 08:15:00 crc kubenswrapper[4971]: E1127 08:15:00.169712 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e994151-ca2a-4036-8c17-626d791ab510" containerName="registry-server" Nov 27 08:15:00 crc kubenswrapper[4971]: I1127 08:15:00.169793 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e994151-ca2a-4036-8c17-626d791ab510" containerName="registry-server" Nov 27 08:15:00 crc kubenswrapper[4971]: E1127 08:15:00.169872 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e994151-ca2a-4036-8c17-626d791ab510" containerName="extract-content" Nov 27 08:15:00 crc kubenswrapper[4971]: I1127 08:15:00.169924 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e994151-ca2a-4036-8c17-626d791ab510" containerName="extract-content" Nov 27 08:15:00 crc kubenswrapper[4971]: E1127 08:15:00.170029 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e994151-ca2a-4036-8c17-626d791ab510" containerName="extract-utilities" Nov 27 08:15:00 crc kubenswrapper[4971]: I1127 08:15:00.170092 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e994151-ca2a-4036-8c17-626d791ab510" containerName="extract-utilities" Nov 27 08:15:00 crc kubenswrapper[4971]: I1127 08:15:00.170305 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e994151-ca2a-4036-8c17-626d791ab510" containerName="registry-server" Nov 27 08:15:00 crc kubenswrapper[4971]: I1127 08:15:00.171003 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403855-kblnw" Nov 27 08:15:00 crc kubenswrapper[4971]: I1127 08:15:00.175060 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 27 08:15:00 crc kubenswrapper[4971]: I1127 08:15:00.175435 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 27 08:15:00 crc kubenswrapper[4971]: I1127 08:15:00.181308 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403855-kblnw"] Nov 27 08:15:00 crc kubenswrapper[4971]: I1127 08:15:00.220778 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8f409368-a7a7-49ff-908c-022268f93d16-config-volume\") pod \"collect-profiles-29403855-kblnw\" (UID: \"8f409368-a7a7-49ff-908c-022268f93d16\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403855-kblnw" Nov 27 08:15:00 crc kubenswrapper[4971]: I1127 08:15:00.221170 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8f409368-a7a7-49ff-908c-022268f93d16-secret-volume\") pod \"collect-profiles-29403855-kblnw\" (UID: \"8f409368-a7a7-49ff-908c-022268f93d16\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403855-kblnw" Nov 27 08:15:00 crc kubenswrapper[4971]: I1127 08:15:00.221645 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7zw8t\" (UniqueName: \"kubernetes.io/projected/8f409368-a7a7-49ff-908c-022268f93d16-kube-api-access-7zw8t\") pod \"collect-profiles-29403855-kblnw\" (UID: \"8f409368-a7a7-49ff-908c-022268f93d16\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403855-kblnw" Nov 27 08:15:00 crc kubenswrapper[4971]: I1127 08:15:00.323064 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7zw8t\" (UniqueName: \"kubernetes.io/projected/8f409368-a7a7-49ff-908c-022268f93d16-kube-api-access-7zw8t\") pod \"collect-profiles-29403855-kblnw\" (UID: \"8f409368-a7a7-49ff-908c-022268f93d16\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403855-kblnw" Nov 27 08:15:00 crc kubenswrapper[4971]: I1127 08:15:00.323138 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8f409368-a7a7-49ff-908c-022268f93d16-config-volume\") pod \"collect-profiles-29403855-kblnw\" (UID: \"8f409368-a7a7-49ff-908c-022268f93d16\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403855-kblnw" Nov 27 08:15:00 crc kubenswrapper[4971]: I1127 08:15:00.323170 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8f409368-a7a7-49ff-908c-022268f93d16-secret-volume\") pod \"collect-profiles-29403855-kblnw\" (UID: \"8f409368-a7a7-49ff-908c-022268f93d16\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403855-kblnw" Nov 27 08:15:00 crc kubenswrapper[4971]: I1127 08:15:00.324982 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8f409368-a7a7-49ff-908c-022268f93d16-config-volume\") pod \"collect-profiles-29403855-kblnw\" (UID: \"8f409368-a7a7-49ff-908c-022268f93d16\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403855-kblnw" Nov 27 08:15:00 crc kubenswrapper[4971]: I1127 08:15:00.329466 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8f409368-a7a7-49ff-908c-022268f93d16-secret-volume\") pod \"collect-profiles-29403855-kblnw\" (UID: \"8f409368-a7a7-49ff-908c-022268f93d16\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403855-kblnw" Nov 27 08:15:00 crc kubenswrapper[4971]: I1127 08:15:00.341900 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7zw8t\" (UniqueName: \"kubernetes.io/projected/8f409368-a7a7-49ff-908c-022268f93d16-kube-api-access-7zw8t\") pod \"collect-profiles-29403855-kblnw\" (UID: \"8f409368-a7a7-49ff-908c-022268f93d16\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403855-kblnw" Nov 27 08:15:00 crc kubenswrapper[4971]: I1127 08:15:00.493125 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403855-kblnw" Nov 27 08:15:00 crc kubenswrapper[4971]: I1127 08:15:00.931729 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403855-kblnw"] Nov 27 08:15:01 crc kubenswrapper[4971]: I1127 08:15:01.287096 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403855-kblnw" event={"ID":"8f409368-a7a7-49ff-908c-022268f93d16","Type":"ContainerStarted","Data":"6532fa198ed3bcaa1fa758d5c5adda55eeaf9f943f209671bdb0ae910d9af30b"} Nov 27 08:15:01 crc kubenswrapper[4971]: I1127 08:15:01.287441 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403855-kblnw" event={"ID":"8f409368-a7a7-49ff-908c-022268f93d16","Type":"ContainerStarted","Data":"cbb33d927a6eafbb8e2d83e0549ca27bac8956c8da859e437f2856045d6cc0b2"} Nov 27 08:15:01 crc kubenswrapper[4971]: I1127 08:15:01.314369 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29403855-kblnw" podStartSLOduration=1.31435181 podStartE2EDuration="1.31435181s" podCreationTimestamp="2025-11-27 08:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:15:01.312487217 +0000 UTC m=+4939.504531145" watchObservedRunningTime="2025-11-27 08:15:01.31435181 +0000 UTC m=+4939.506395738" Nov 27 08:15:02 crc kubenswrapper[4971]: I1127 08:15:02.297971 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403855-kblnw" event={"ID":"8f409368-a7a7-49ff-908c-022268f93d16","Type":"ContainerDied","Data":"6532fa198ed3bcaa1fa758d5c5adda55eeaf9f943f209671bdb0ae910d9af30b"} Nov 27 08:15:02 crc kubenswrapper[4971]: I1127 08:15:02.297851 4971 generic.go:334] "Generic (PLEG): container finished" podID="8f409368-a7a7-49ff-908c-022268f93d16" containerID="6532fa198ed3bcaa1fa758d5c5adda55eeaf9f943f209671bdb0ae910d9af30b" exitCode=0 Nov 27 08:15:03 crc kubenswrapper[4971]: I1127 08:15:03.595919 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403855-kblnw" Nov 27 08:15:03 crc kubenswrapper[4971]: I1127 08:15:03.685437 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8f409368-a7a7-49ff-908c-022268f93d16-config-volume\") pod \"8f409368-a7a7-49ff-908c-022268f93d16\" (UID: \"8f409368-a7a7-49ff-908c-022268f93d16\") " Nov 27 08:15:03 crc kubenswrapper[4971]: I1127 08:15:03.685786 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8f409368-a7a7-49ff-908c-022268f93d16-secret-volume\") pod \"8f409368-a7a7-49ff-908c-022268f93d16\" (UID: \"8f409368-a7a7-49ff-908c-022268f93d16\") " Nov 27 08:15:03 crc kubenswrapper[4971]: I1127 08:15:03.685889 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7zw8t\" (UniqueName: \"kubernetes.io/projected/8f409368-a7a7-49ff-908c-022268f93d16-kube-api-access-7zw8t\") pod \"8f409368-a7a7-49ff-908c-022268f93d16\" (UID: \"8f409368-a7a7-49ff-908c-022268f93d16\") " Nov 27 08:15:03 crc kubenswrapper[4971]: I1127 08:15:03.686924 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f409368-a7a7-49ff-908c-022268f93d16-config-volume" (OuterVolumeSpecName: "config-volume") pod "8f409368-a7a7-49ff-908c-022268f93d16" (UID: "8f409368-a7a7-49ff-908c-022268f93d16"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:15:03 crc kubenswrapper[4971]: I1127 08:15:03.691851 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f409368-a7a7-49ff-908c-022268f93d16-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "8f409368-a7a7-49ff-908c-022268f93d16" (UID: "8f409368-a7a7-49ff-908c-022268f93d16"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:15:03 crc kubenswrapper[4971]: I1127 08:15:03.691911 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f409368-a7a7-49ff-908c-022268f93d16-kube-api-access-7zw8t" (OuterVolumeSpecName: "kube-api-access-7zw8t") pod "8f409368-a7a7-49ff-908c-022268f93d16" (UID: "8f409368-a7a7-49ff-908c-022268f93d16"). InnerVolumeSpecName "kube-api-access-7zw8t". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:15:03 crc kubenswrapper[4971]: I1127 08:15:03.787617 4971 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8f409368-a7a7-49ff-908c-022268f93d16-config-volume\") on node \"crc\" DevicePath \"\"" Nov 27 08:15:03 crc kubenswrapper[4971]: I1127 08:15:03.787656 4971 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8f409368-a7a7-49ff-908c-022268f93d16-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 27 08:15:03 crc kubenswrapper[4971]: I1127 08:15:03.787667 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7zw8t\" (UniqueName: \"kubernetes.io/projected/8f409368-a7a7-49ff-908c-022268f93d16-kube-api-access-7zw8t\") on node \"crc\" DevicePath \"\"" Nov 27 08:15:04 crc kubenswrapper[4971]: I1127 08:15:04.318111 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403855-kblnw" event={"ID":"8f409368-a7a7-49ff-908c-022268f93d16","Type":"ContainerDied","Data":"cbb33d927a6eafbb8e2d83e0549ca27bac8956c8da859e437f2856045d6cc0b2"} Nov 27 08:15:04 crc kubenswrapper[4971]: I1127 08:15:04.318170 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403855-kblnw" Nov 27 08:15:04 crc kubenswrapper[4971]: I1127 08:15:04.318182 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cbb33d927a6eafbb8e2d83e0549ca27bac8956c8da859e437f2856045d6cc0b2" Nov 27 08:15:04 crc kubenswrapper[4971]: I1127 08:15:04.673079 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403810-d9b6q"] Nov 27 08:15:04 crc kubenswrapper[4971]: I1127 08:15:04.681734 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403810-d9b6q"] Nov 27 08:15:06 crc kubenswrapper[4971]: I1127 08:15:06.561054 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="16c5281c-08c7-4023-8d82-64ffa321b419" path="/var/lib/kubelet/pods/16c5281c-08c7-4023-8d82-64ffa321b419/volumes" Nov 27 08:15:18 crc kubenswrapper[4971]: I1127 08:15:18.368929 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-5wvsq"] Nov 27 08:15:18 crc kubenswrapper[4971]: E1127 08:15:18.370001 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f409368-a7a7-49ff-908c-022268f93d16" containerName="collect-profiles" Nov 27 08:15:18 crc kubenswrapper[4971]: I1127 08:15:18.370020 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f409368-a7a7-49ff-908c-022268f93d16" containerName="collect-profiles" Nov 27 08:15:18 crc kubenswrapper[4971]: I1127 08:15:18.370248 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f409368-a7a7-49ff-908c-022268f93d16" containerName="collect-profiles" Nov 27 08:15:18 crc kubenswrapper[4971]: I1127 08:15:18.375731 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5wvsq" Nov 27 08:15:18 crc kubenswrapper[4971]: I1127 08:15:18.398257 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5wvsq"] Nov 27 08:15:18 crc kubenswrapper[4971]: I1127 08:15:18.494885 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvpjq\" (UniqueName: \"kubernetes.io/projected/d8d94553-f4a3-408c-9f86-0bbfbce5e228-kube-api-access-cvpjq\") pod \"community-operators-5wvsq\" (UID: \"d8d94553-f4a3-408c-9f86-0bbfbce5e228\") " pod="openshift-marketplace/community-operators-5wvsq" Nov 27 08:15:18 crc kubenswrapper[4971]: I1127 08:15:18.494939 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8d94553-f4a3-408c-9f86-0bbfbce5e228-utilities\") pod \"community-operators-5wvsq\" (UID: \"d8d94553-f4a3-408c-9f86-0bbfbce5e228\") " pod="openshift-marketplace/community-operators-5wvsq" Nov 27 08:15:18 crc kubenswrapper[4971]: I1127 08:15:18.495064 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8d94553-f4a3-408c-9f86-0bbfbce5e228-catalog-content\") pod \"community-operators-5wvsq\" (UID: \"d8d94553-f4a3-408c-9f86-0bbfbce5e228\") " pod="openshift-marketplace/community-operators-5wvsq" Nov 27 08:15:18 crc kubenswrapper[4971]: I1127 08:15:18.596261 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvpjq\" (UniqueName: \"kubernetes.io/projected/d8d94553-f4a3-408c-9f86-0bbfbce5e228-kube-api-access-cvpjq\") pod \"community-operators-5wvsq\" (UID: \"d8d94553-f4a3-408c-9f86-0bbfbce5e228\") " pod="openshift-marketplace/community-operators-5wvsq" Nov 27 08:15:18 crc kubenswrapper[4971]: I1127 08:15:18.596337 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8d94553-f4a3-408c-9f86-0bbfbce5e228-utilities\") pod \"community-operators-5wvsq\" (UID: \"d8d94553-f4a3-408c-9f86-0bbfbce5e228\") " pod="openshift-marketplace/community-operators-5wvsq" Nov 27 08:15:18 crc kubenswrapper[4971]: I1127 08:15:18.596386 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8d94553-f4a3-408c-9f86-0bbfbce5e228-catalog-content\") pod \"community-operators-5wvsq\" (UID: \"d8d94553-f4a3-408c-9f86-0bbfbce5e228\") " pod="openshift-marketplace/community-operators-5wvsq" Nov 27 08:15:18 crc kubenswrapper[4971]: I1127 08:15:18.597086 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8d94553-f4a3-408c-9f86-0bbfbce5e228-utilities\") pod \"community-operators-5wvsq\" (UID: \"d8d94553-f4a3-408c-9f86-0bbfbce5e228\") " pod="openshift-marketplace/community-operators-5wvsq" Nov 27 08:15:18 crc kubenswrapper[4971]: I1127 08:15:18.597156 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8d94553-f4a3-408c-9f86-0bbfbce5e228-catalog-content\") pod \"community-operators-5wvsq\" (UID: \"d8d94553-f4a3-408c-9f86-0bbfbce5e228\") " pod="openshift-marketplace/community-operators-5wvsq" Nov 27 08:15:18 crc kubenswrapper[4971]: I1127 08:15:18.634894 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvpjq\" (UniqueName: \"kubernetes.io/projected/d8d94553-f4a3-408c-9f86-0bbfbce5e228-kube-api-access-cvpjq\") pod \"community-operators-5wvsq\" (UID: \"d8d94553-f4a3-408c-9f86-0bbfbce5e228\") " pod="openshift-marketplace/community-operators-5wvsq" Nov 27 08:15:18 crc kubenswrapper[4971]: I1127 08:15:18.707932 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5wvsq" Nov 27 08:15:19 crc kubenswrapper[4971]: I1127 08:15:19.260752 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5wvsq"] Nov 27 08:15:19 crc kubenswrapper[4971]: I1127 08:15:19.444376 4971 generic.go:334] "Generic (PLEG): container finished" podID="d8d94553-f4a3-408c-9f86-0bbfbce5e228" containerID="a30684b30ce34581c1cd7b6b62f151c1ef47951fa785ccd36ffd52337c87cd25" exitCode=0 Nov 27 08:15:19 crc kubenswrapper[4971]: I1127 08:15:19.444430 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5wvsq" event={"ID":"d8d94553-f4a3-408c-9f86-0bbfbce5e228","Type":"ContainerDied","Data":"a30684b30ce34581c1cd7b6b62f151c1ef47951fa785ccd36ffd52337c87cd25"} Nov 27 08:15:19 crc kubenswrapper[4971]: I1127 08:15:19.444467 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5wvsq" event={"ID":"d8d94553-f4a3-408c-9f86-0bbfbce5e228","Type":"ContainerStarted","Data":"84bae2a157799b2809080ceb0c7c5345c9d238c1f3eb9b37bdfd441b9d228783"} Nov 27 08:15:21 crc kubenswrapper[4971]: I1127 08:15:21.460696 4971 generic.go:334] "Generic (PLEG): container finished" podID="d8d94553-f4a3-408c-9f86-0bbfbce5e228" containerID="c78bd43dd271d332aafc84b118e5bfc3da375b7e68883273d0962b0fee93b6c1" exitCode=0 Nov 27 08:15:21 crc kubenswrapper[4971]: I1127 08:15:21.460749 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5wvsq" event={"ID":"d8d94553-f4a3-408c-9f86-0bbfbce5e228","Type":"ContainerDied","Data":"c78bd43dd271d332aafc84b118e5bfc3da375b7e68883273d0962b0fee93b6c1"} Nov 27 08:15:22 crc kubenswrapper[4971]: I1127 08:15:22.473639 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5wvsq" event={"ID":"d8d94553-f4a3-408c-9f86-0bbfbce5e228","Type":"ContainerStarted","Data":"700f55a8b21ccf0665f50da270172decdf8ff365d297ec92143ccbd97fb9f288"} Nov 27 08:15:22 crc kubenswrapper[4971]: I1127 08:15:22.499720 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-5wvsq" podStartSLOduration=1.9124034170000002 podStartE2EDuration="4.499692047s" podCreationTimestamp="2025-11-27 08:15:18 +0000 UTC" firstStartedPulling="2025-11-27 08:15:19.44613799 +0000 UTC m=+4957.638181898" lastFinishedPulling="2025-11-27 08:15:22.03342661 +0000 UTC m=+4960.225470528" observedRunningTime="2025-11-27 08:15:22.495414804 +0000 UTC m=+4960.687458722" watchObservedRunningTime="2025-11-27 08:15:22.499692047 +0000 UTC m=+4960.691736005" Nov 27 08:15:26 crc kubenswrapper[4971]: I1127 08:15:26.414063 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 08:15:26 crc kubenswrapper[4971]: I1127 08:15:26.414410 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 08:15:28 crc kubenswrapper[4971]: I1127 08:15:28.708166 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-5wvsq" Nov 27 08:15:28 crc kubenswrapper[4971]: I1127 08:15:28.708441 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-5wvsq" Nov 27 08:15:28 crc kubenswrapper[4971]: I1127 08:15:28.753379 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-5wvsq" Nov 27 08:15:29 crc kubenswrapper[4971]: I1127 08:15:29.636019 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-5wvsq" Nov 27 08:15:29 crc kubenswrapper[4971]: I1127 08:15:29.730992 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5wvsq"] Nov 27 08:15:31 crc kubenswrapper[4971]: I1127 08:15:31.544421 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-5wvsq" podUID="d8d94553-f4a3-408c-9f86-0bbfbce5e228" containerName="registry-server" containerID="cri-o://700f55a8b21ccf0665f50da270172decdf8ff365d297ec92143ccbd97fb9f288" gracePeriod=2 Nov 27 08:15:32 crc kubenswrapper[4971]: I1127 08:15:32.052269 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5wvsq" Nov 27 08:15:32 crc kubenswrapper[4971]: I1127 08:15:32.126176 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8d94553-f4a3-408c-9f86-0bbfbce5e228-catalog-content\") pod \"d8d94553-f4a3-408c-9f86-0bbfbce5e228\" (UID: \"d8d94553-f4a3-408c-9f86-0bbfbce5e228\") " Nov 27 08:15:32 crc kubenswrapper[4971]: I1127 08:15:32.126272 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cvpjq\" (UniqueName: \"kubernetes.io/projected/d8d94553-f4a3-408c-9f86-0bbfbce5e228-kube-api-access-cvpjq\") pod \"d8d94553-f4a3-408c-9f86-0bbfbce5e228\" (UID: \"d8d94553-f4a3-408c-9f86-0bbfbce5e228\") " Nov 27 08:15:32 crc kubenswrapper[4971]: I1127 08:15:32.126303 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8d94553-f4a3-408c-9f86-0bbfbce5e228-utilities\") pod \"d8d94553-f4a3-408c-9f86-0bbfbce5e228\" (UID: \"d8d94553-f4a3-408c-9f86-0bbfbce5e228\") " Nov 27 08:15:32 crc kubenswrapper[4971]: I1127 08:15:32.127729 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8d94553-f4a3-408c-9f86-0bbfbce5e228-utilities" (OuterVolumeSpecName: "utilities") pod "d8d94553-f4a3-408c-9f86-0bbfbce5e228" (UID: "d8d94553-f4a3-408c-9f86-0bbfbce5e228"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:15:32 crc kubenswrapper[4971]: I1127 08:15:32.132396 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8d94553-f4a3-408c-9f86-0bbfbce5e228-kube-api-access-cvpjq" (OuterVolumeSpecName: "kube-api-access-cvpjq") pod "d8d94553-f4a3-408c-9f86-0bbfbce5e228" (UID: "d8d94553-f4a3-408c-9f86-0bbfbce5e228"). InnerVolumeSpecName "kube-api-access-cvpjq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:15:32 crc kubenswrapper[4971]: I1127 08:15:32.227588 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cvpjq\" (UniqueName: \"kubernetes.io/projected/d8d94553-f4a3-408c-9f86-0bbfbce5e228-kube-api-access-cvpjq\") on node \"crc\" DevicePath \"\"" Nov 27 08:15:32 crc kubenswrapper[4971]: I1127 08:15:32.227634 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8d94553-f4a3-408c-9f86-0bbfbce5e228-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 08:15:32 crc kubenswrapper[4971]: I1127 08:15:32.241808 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8d94553-f4a3-408c-9f86-0bbfbce5e228-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d8d94553-f4a3-408c-9f86-0bbfbce5e228" (UID: "d8d94553-f4a3-408c-9f86-0bbfbce5e228"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:15:32 crc kubenswrapper[4971]: I1127 08:15:32.329396 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8d94553-f4a3-408c-9f86-0bbfbce5e228-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 08:15:32 crc kubenswrapper[4971]: I1127 08:15:32.557790 4971 generic.go:334] "Generic (PLEG): container finished" podID="d8d94553-f4a3-408c-9f86-0bbfbce5e228" containerID="700f55a8b21ccf0665f50da270172decdf8ff365d297ec92143ccbd97fb9f288" exitCode=0 Nov 27 08:15:32 crc kubenswrapper[4971]: I1127 08:15:32.557908 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5wvsq" Nov 27 08:15:32 crc kubenswrapper[4971]: I1127 08:15:32.562037 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5wvsq" event={"ID":"d8d94553-f4a3-408c-9f86-0bbfbce5e228","Type":"ContainerDied","Data":"700f55a8b21ccf0665f50da270172decdf8ff365d297ec92143ccbd97fb9f288"} Nov 27 08:15:32 crc kubenswrapper[4971]: I1127 08:15:32.562106 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5wvsq" event={"ID":"d8d94553-f4a3-408c-9f86-0bbfbce5e228","Type":"ContainerDied","Data":"84bae2a157799b2809080ceb0c7c5345c9d238c1f3eb9b37bdfd441b9d228783"} Nov 27 08:15:32 crc kubenswrapper[4971]: I1127 08:15:32.562136 4971 scope.go:117] "RemoveContainer" containerID="700f55a8b21ccf0665f50da270172decdf8ff365d297ec92143ccbd97fb9f288" Nov 27 08:15:32 crc kubenswrapper[4971]: I1127 08:15:32.598565 4971 scope.go:117] "RemoveContainer" containerID="c78bd43dd271d332aafc84b118e5bfc3da375b7e68883273d0962b0fee93b6c1" Nov 27 08:15:32 crc kubenswrapper[4971]: I1127 08:15:32.626808 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5wvsq"] Nov 27 08:15:32 crc kubenswrapper[4971]: I1127 08:15:32.633168 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-5wvsq"] Nov 27 08:15:32 crc kubenswrapper[4971]: I1127 08:15:32.637380 4971 scope.go:117] "RemoveContainer" containerID="a30684b30ce34581c1cd7b6b62f151c1ef47951fa785ccd36ffd52337c87cd25" Nov 27 08:15:32 crc kubenswrapper[4971]: I1127 08:15:32.696615 4971 scope.go:117] "RemoveContainer" containerID="700f55a8b21ccf0665f50da270172decdf8ff365d297ec92143ccbd97fb9f288" Nov 27 08:15:32 crc kubenswrapper[4971]: E1127 08:15:32.697664 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"700f55a8b21ccf0665f50da270172decdf8ff365d297ec92143ccbd97fb9f288\": container with ID starting with 700f55a8b21ccf0665f50da270172decdf8ff365d297ec92143ccbd97fb9f288 not found: ID does not exist" containerID="700f55a8b21ccf0665f50da270172decdf8ff365d297ec92143ccbd97fb9f288" Nov 27 08:15:32 crc kubenswrapper[4971]: I1127 08:15:32.697794 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"700f55a8b21ccf0665f50da270172decdf8ff365d297ec92143ccbd97fb9f288"} err="failed to get container status \"700f55a8b21ccf0665f50da270172decdf8ff365d297ec92143ccbd97fb9f288\": rpc error: code = NotFound desc = could not find container \"700f55a8b21ccf0665f50da270172decdf8ff365d297ec92143ccbd97fb9f288\": container with ID starting with 700f55a8b21ccf0665f50da270172decdf8ff365d297ec92143ccbd97fb9f288 not found: ID does not exist" Nov 27 08:15:32 crc kubenswrapper[4971]: I1127 08:15:32.697898 4971 scope.go:117] "RemoveContainer" containerID="c78bd43dd271d332aafc84b118e5bfc3da375b7e68883273d0962b0fee93b6c1" Nov 27 08:15:32 crc kubenswrapper[4971]: E1127 08:15:32.698474 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c78bd43dd271d332aafc84b118e5bfc3da375b7e68883273d0962b0fee93b6c1\": container with ID starting with c78bd43dd271d332aafc84b118e5bfc3da375b7e68883273d0962b0fee93b6c1 not found: ID does not exist" containerID="c78bd43dd271d332aafc84b118e5bfc3da375b7e68883273d0962b0fee93b6c1" Nov 27 08:15:32 crc kubenswrapper[4971]: I1127 08:15:32.698498 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c78bd43dd271d332aafc84b118e5bfc3da375b7e68883273d0962b0fee93b6c1"} err="failed to get container status \"c78bd43dd271d332aafc84b118e5bfc3da375b7e68883273d0962b0fee93b6c1\": rpc error: code = NotFound desc = could not find container \"c78bd43dd271d332aafc84b118e5bfc3da375b7e68883273d0962b0fee93b6c1\": container with ID starting with c78bd43dd271d332aafc84b118e5bfc3da375b7e68883273d0962b0fee93b6c1 not found: ID does not exist" Nov 27 08:15:32 crc kubenswrapper[4971]: I1127 08:15:32.698516 4971 scope.go:117] "RemoveContainer" containerID="a30684b30ce34581c1cd7b6b62f151c1ef47951fa785ccd36ffd52337c87cd25" Nov 27 08:15:32 crc kubenswrapper[4971]: E1127 08:15:32.698785 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a30684b30ce34581c1cd7b6b62f151c1ef47951fa785ccd36ffd52337c87cd25\": container with ID starting with a30684b30ce34581c1cd7b6b62f151c1ef47951fa785ccd36ffd52337c87cd25 not found: ID does not exist" containerID="a30684b30ce34581c1cd7b6b62f151c1ef47951fa785ccd36ffd52337c87cd25" Nov 27 08:15:32 crc kubenswrapper[4971]: I1127 08:15:32.698807 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a30684b30ce34581c1cd7b6b62f151c1ef47951fa785ccd36ffd52337c87cd25"} err="failed to get container status \"a30684b30ce34581c1cd7b6b62f151c1ef47951fa785ccd36ffd52337c87cd25\": rpc error: code = NotFound desc = could not find container \"a30684b30ce34581c1cd7b6b62f151c1ef47951fa785ccd36ffd52337c87cd25\": container with ID starting with a30684b30ce34581c1cd7b6b62f151c1ef47951fa785ccd36ffd52337c87cd25 not found: ID does not exist" Nov 27 08:15:34 crc kubenswrapper[4971]: I1127 08:15:34.561772 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8d94553-f4a3-408c-9f86-0bbfbce5e228" path="/var/lib/kubelet/pods/d8d94553-f4a3-408c-9f86-0bbfbce5e228/volumes" Nov 27 08:15:51 crc kubenswrapper[4971]: I1127 08:15:51.800194 4971 scope.go:117] "RemoveContainer" containerID="8a7005164b8e83966b8fafce8be97bc5c5460a9ad7d4e1ee9ea5db89739f6ef9" Nov 27 08:15:56 crc kubenswrapper[4971]: I1127 08:15:56.413036 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 08:15:56 crc kubenswrapper[4971]: I1127 08:15:56.413720 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 08:15:56 crc kubenswrapper[4971]: I1127 08:15:56.413791 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 08:15:56 crc kubenswrapper[4971]: I1127 08:15:56.414468 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2c7abcbabaf098a166a56842633d94d346c3fe217cb16d289735e3ab9912a15b"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 08:15:56 crc kubenswrapper[4971]: I1127 08:15:56.414525 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://2c7abcbabaf098a166a56842633d94d346c3fe217cb16d289735e3ab9912a15b" gracePeriod=600 Nov 27 08:15:56 crc kubenswrapper[4971]: E1127 08:15:56.544107 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:15:56 crc kubenswrapper[4971]: I1127 08:15:56.752826 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="2c7abcbabaf098a166a56842633d94d346c3fe217cb16d289735e3ab9912a15b" exitCode=0 Nov 27 08:15:56 crc kubenswrapper[4971]: I1127 08:15:56.752906 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"2c7abcbabaf098a166a56842633d94d346c3fe217cb16d289735e3ab9912a15b"} Nov 27 08:15:56 crc kubenswrapper[4971]: I1127 08:15:56.753345 4971 scope.go:117] "RemoveContainer" containerID="26fa812a55b4f3841abae52bb3428271d7ad0f2d0eba615b6bab0abc70b4002f" Nov 27 08:15:56 crc kubenswrapper[4971]: I1127 08:15:56.754072 4971 scope.go:117] "RemoveContainer" containerID="2c7abcbabaf098a166a56842633d94d346c3fe217cb16d289735e3ab9912a15b" Nov 27 08:15:56 crc kubenswrapper[4971]: E1127 08:15:56.754390 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:16:08 crc kubenswrapper[4971]: I1127 08:16:08.550797 4971 scope.go:117] "RemoveContainer" containerID="2c7abcbabaf098a166a56842633d94d346c3fe217cb16d289735e3ab9912a15b" Nov 27 08:16:08 crc kubenswrapper[4971]: E1127 08:16:08.551408 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:16:22 crc kubenswrapper[4971]: I1127 08:16:22.554439 4971 scope.go:117] "RemoveContainer" containerID="2c7abcbabaf098a166a56842633d94d346c3fe217cb16d289735e3ab9912a15b" Nov 27 08:16:22 crc kubenswrapper[4971]: E1127 08:16:22.556241 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:16:35 crc kubenswrapper[4971]: I1127 08:16:35.551277 4971 scope.go:117] "RemoveContainer" containerID="2c7abcbabaf098a166a56842633d94d346c3fe217cb16d289735e3ab9912a15b" Nov 27 08:16:35 crc kubenswrapper[4971]: E1127 08:16:35.552713 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:16:49 crc kubenswrapper[4971]: I1127 08:16:49.551089 4971 scope.go:117] "RemoveContainer" containerID="2c7abcbabaf098a166a56842633d94d346c3fe217cb16d289735e3ab9912a15b" Nov 27 08:16:49 crc kubenswrapper[4971]: E1127 08:16:49.554245 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:17:01 crc kubenswrapper[4971]: I1127 08:17:01.551168 4971 scope.go:117] "RemoveContainer" containerID="2c7abcbabaf098a166a56842633d94d346c3fe217cb16d289735e3ab9912a15b" Nov 27 08:17:01 crc kubenswrapper[4971]: E1127 08:17:01.552233 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:17:13 crc kubenswrapper[4971]: I1127 08:17:13.550476 4971 scope.go:117] "RemoveContainer" containerID="2c7abcbabaf098a166a56842633d94d346c3fe217cb16d289735e3ab9912a15b" Nov 27 08:17:13 crc kubenswrapper[4971]: E1127 08:17:13.551621 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:17:28 crc kubenswrapper[4971]: I1127 08:17:28.550345 4971 scope.go:117] "RemoveContainer" containerID="2c7abcbabaf098a166a56842633d94d346c3fe217cb16d289735e3ab9912a15b" Nov 27 08:17:28 crc kubenswrapper[4971]: E1127 08:17:28.550998 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:17:41 crc kubenswrapper[4971]: I1127 08:17:41.551637 4971 scope.go:117] "RemoveContainer" containerID="2c7abcbabaf098a166a56842633d94d346c3fe217cb16d289735e3ab9912a15b" Nov 27 08:17:41 crc kubenswrapper[4971]: E1127 08:17:41.553306 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:17:53 crc kubenswrapper[4971]: I1127 08:17:53.551068 4971 scope.go:117] "RemoveContainer" containerID="2c7abcbabaf098a166a56842633d94d346c3fe217cb16d289735e3ab9912a15b" Nov 27 08:17:53 crc kubenswrapper[4971]: E1127 08:17:53.552232 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:18:07 crc kubenswrapper[4971]: I1127 08:18:07.550137 4971 scope.go:117] "RemoveContainer" containerID="2c7abcbabaf098a166a56842633d94d346c3fe217cb16d289735e3ab9912a15b" Nov 27 08:18:07 crc kubenswrapper[4971]: E1127 08:18:07.550985 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:18:22 crc kubenswrapper[4971]: I1127 08:18:22.554732 4971 scope.go:117] "RemoveContainer" containerID="2c7abcbabaf098a166a56842633d94d346c3fe217cb16d289735e3ab9912a15b" Nov 27 08:18:22 crc kubenswrapper[4971]: E1127 08:18:22.555751 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:18:34 crc kubenswrapper[4971]: I1127 08:18:34.550390 4971 scope.go:117] "RemoveContainer" containerID="2c7abcbabaf098a166a56842633d94d346c3fe217cb16d289735e3ab9912a15b" Nov 27 08:18:34 crc kubenswrapper[4971]: E1127 08:18:34.551318 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:18:49 crc kubenswrapper[4971]: I1127 08:18:49.550261 4971 scope.go:117] "RemoveContainer" containerID="2c7abcbabaf098a166a56842633d94d346c3fe217cb16d289735e3ab9912a15b" Nov 27 08:18:49 crc kubenswrapper[4971]: E1127 08:18:49.551178 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:19:03 crc kubenswrapper[4971]: I1127 08:19:03.551504 4971 scope.go:117] "RemoveContainer" containerID="2c7abcbabaf098a166a56842633d94d346c3fe217cb16d289735e3ab9912a15b" Nov 27 08:19:03 crc kubenswrapper[4971]: E1127 08:19:03.552348 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:19:18 crc kubenswrapper[4971]: I1127 08:19:18.550583 4971 scope.go:117] "RemoveContainer" containerID="2c7abcbabaf098a166a56842633d94d346c3fe217cb16d289735e3ab9912a15b" Nov 27 08:19:18 crc kubenswrapper[4971]: E1127 08:19:18.551295 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:19:32 crc kubenswrapper[4971]: I1127 08:19:32.554460 4971 scope.go:117] "RemoveContainer" containerID="2c7abcbabaf098a166a56842633d94d346c3fe217cb16d289735e3ab9912a15b" Nov 27 08:19:32 crc kubenswrapper[4971]: E1127 08:19:32.555635 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:19:45 crc kubenswrapper[4971]: I1127 08:19:45.551036 4971 scope.go:117] "RemoveContainer" containerID="2c7abcbabaf098a166a56842633d94d346c3fe217cb16d289735e3ab9912a15b" Nov 27 08:19:45 crc kubenswrapper[4971]: E1127 08:19:45.551705 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:19:58 crc kubenswrapper[4971]: I1127 08:19:58.550437 4971 scope.go:117] "RemoveContainer" containerID="2c7abcbabaf098a166a56842633d94d346c3fe217cb16d289735e3ab9912a15b" Nov 27 08:19:58 crc kubenswrapper[4971]: E1127 08:19:58.550992 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:20:13 crc kubenswrapper[4971]: I1127 08:20:13.550779 4971 scope.go:117] "RemoveContainer" containerID="2c7abcbabaf098a166a56842633d94d346c3fe217cb16d289735e3ab9912a15b" Nov 27 08:20:13 crc kubenswrapper[4971]: E1127 08:20:13.551382 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:20:25 crc kubenswrapper[4971]: I1127 08:20:25.550403 4971 scope.go:117] "RemoveContainer" containerID="2c7abcbabaf098a166a56842633d94d346c3fe217cb16d289735e3ab9912a15b" Nov 27 08:20:25 crc kubenswrapper[4971]: E1127 08:20:25.551552 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:20:39 crc kubenswrapper[4971]: I1127 08:20:39.550872 4971 scope.go:117] "RemoveContainer" containerID="2c7abcbabaf098a166a56842633d94d346c3fe217cb16d289735e3ab9912a15b" Nov 27 08:20:39 crc kubenswrapper[4971]: E1127 08:20:39.551910 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:20:50 crc kubenswrapper[4971]: I1127 08:20:50.551380 4971 scope.go:117] "RemoveContainer" containerID="2c7abcbabaf098a166a56842633d94d346c3fe217cb16d289735e3ab9912a15b" Nov 27 08:20:50 crc kubenswrapper[4971]: E1127 08:20:50.553042 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:21:01 crc kubenswrapper[4971]: I1127 08:21:01.550753 4971 scope.go:117] "RemoveContainer" containerID="2c7abcbabaf098a166a56842633d94d346c3fe217cb16d289735e3ab9912a15b" Nov 27 08:21:01 crc kubenswrapper[4971]: I1127 08:21:01.829573 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-operators/openstack-operator-index-r5brv" podUID="90063128-e4a8-4b8a-b830-44c061b9f533" containerName="registry-server" probeResult="failure" output=< Nov 27 08:21:01 crc kubenswrapper[4971]: timeout: failed to connect service ":50051" within 1s Nov 27 08:21:01 crc kubenswrapper[4971]: > Nov 27 08:21:01 crc kubenswrapper[4971]: I1127 08:21:01.834224 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-operators/openstack-operator-index-r5brv" podUID="90063128-e4a8-4b8a-b830-44c061b9f533" containerName="registry-server" probeResult="failure" output=< Nov 27 08:21:01 crc kubenswrapper[4971]: timeout: failed to connect service ":50051" within 1s Nov 27 08:21:01 crc kubenswrapper[4971]: > Nov 27 08:21:02 crc kubenswrapper[4971]: I1127 08:21:02.442903 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"191774ed9bf782651f467ab8c8b78eb518cc6f5ac5f6914842a64746bf293caf"} Nov 27 08:23:26 crc kubenswrapper[4971]: I1127 08:23:26.413389 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 08:23:26 crc kubenswrapper[4971]: I1127 08:23:26.414651 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 08:23:56 crc kubenswrapper[4971]: I1127 08:23:56.413959 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 08:23:56 crc kubenswrapper[4971]: I1127 08:23:56.415943 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 08:24:26 crc kubenswrapper[4971]: I1127 08:24:26.413117 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 08:24:26 crc kubenswrapper[4971]: I1127 08:24:26.414255 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 08:24:26 crc kubenswrapper[4971]: I1127 08:24:26.414327 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 08:24:26 crc kubenswrapper[4971]: I1127 08:24:26.415353 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"191774ed9bf782651f467ab8c8b78eb518cc6f5ac5f6914842a64746bf293caf"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 08:24:26 crc kubenswrapper[4971]: I1127 08:24:26.415691 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://191774ed9bf782651f467ab8c8b78eb518cc6f5ac5f6914842a64746bf293caf" gracePeriod=600 Nov 27 08:24:27 crc kubenswrapper[4971]: I1127 08:24:27.303953 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="191774ed9bf782651f467ab8c8b78eb518cc6f5ac5f6914842a64746bf293caf" exitCode=0 Nov 27 08:24:27 crc kubenswrapper[4971]: I1127 08:24:27.304026 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"191774ed9bf782651f467ab8c8b78eb518cc6f5ac5f6914842a64746bf293caf"} Nov 27 08:24:27 crc kubenswrapper[4971]: I1127 08:24:27.304609 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"93cd60eaf9341477b0a46d5e899a633f82782eec17c2d7bb6388b091c64ffa3a"} Nov 27 08:24:27 crc kubenswrapper[4971]: I1127 08:24:27.304645 4971 scope.go:117] "RemoveContainer" containerID="2c7abcbabaf098a166a56842633d94d346c3fe217cb16d289735e3ab9912a15b" Nov 27 08:25:47 crc kubenswrapper[4971]: I1127 08:25:47.560424 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mz9m7"] Nov 27 08:25:47 crc kubenswrapper[4971]: E1127 08:25:47.561696 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8d94553-f4a3-408c-9f86-0bbfbce5e228" containerName="extract-utilities" Nov 27 08:25:47 crc kubenswrapper[4971]: I1127 08:25:47.561715 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8d94553-f4a3-408c-9f86-0bbfbce5e228" containerName="extract-utilities" Nov 27 08:25:47 crc kubenswrapper[4971]: E1127 08:25:47.561735 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8d94553-f4a3-408c-9f86-0bbfbce5e228" containerName="registry-server" Nov 27 08:25:47 crc kubenswrapper[4971]: I1127 08:25:47.561743 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8d94553-f4a3-408c-9f86-0bbfbce5e228" containerName="registry-server" Nov 27 08:25:47 crc kubenswrapper[4971]: E1127 08:25:47.561763 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8d94553-f4a3-408c-9f86-0bbfbce5e228" containerName="extract-content" Nov 27 08:25:47 crc kubenswrapper[4971]: I1127 08:25:47.561774 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8d94553-f4a3-408c-9f86-0bbfbce5e228" containerName="extract-content" Nov 27 08:25:47 crc kubenswrapper[4971]: I1127 08:25:47.561991 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8d94553-f4a3-408c-9f86-0bbfbce5e228" containerName="registry-server" Nov 27 08:25:47 crc kubenswrapper[4971]: I1127 08:25:47.563625 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mz9m7" Nov 27 08:25:47 crc kubenswrapper[4971]: I1127 08:25:47.600714 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mz9m7"] Nov 27 08:25:47 crc kubenswrapper[4971]: I1127 08:25:47.727183 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a45ad671-96d6-4717-86da-34f906acbbb8-catalog-content\") pod \"redhat-marketplace-mz9m7\" (UID: \"a45ad671-96d6-4717-86da-34f906acbbb8\") " pod="openshift-marketplace/redhat-marketplace-mz9m7" Nov 27 08:25:47 crc kubenswrapper[4971]: I1127 08:25:47.727611 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a45ad671-96d6-4717-86da-34f906acbbb8-utilities\") pod \"redhat-marketplace-mz9m7\" (UID: \"a45ad671-96d6-4717-86da-34f906acbbb8\") " pod="openshift-marketplace/redhat-marketplace-mz9m7" Nov 27 08:25:47 crc kubenswrapper[4971]: I1127 08:25:47.727925 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t4rl7\" (UniqueName: \"kubernetes.io/projected/a45ad671-96d6-4717-86da-34f906acbbb8-kube-api-access-t4rl7\") pod \"redhat-marketplace-mz9m7\" (UID: \"a45ad671-96d6-4717-86da-34f906acbbb8\") " pod="openshift-marketplace/redhat-marketplace-mz9m7" Nov 27 08:25:47 crc kubenswrapper[4971]: I1127 08:25:47.830011 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a45ad671-96d6-4717-86da-34f906acbbb8-utilities\") pod \"redhat-marketplace-mz9m7\" (UID: \"a45ad671-96d6-4717-86da-34f906acbbb8\") " pod="openshift-marketplace/redhat-marketplace-mz9m7" Nov 27 08:25:47 crc kubenswrapper[4971]: I1127 08:25:47.830124 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t4rl7\" (UniqueName: \"kubernetes.io/projected/a45ad671-96d6-4717-86da-34f906acbbb8-kube-api-access-t4rl7\") pod \"redhat-marketplace-mz9m7\" (UID: \"a45ad671-96d6-4717-86da-34f906acbbb8\") " pod="openshift-marketplace/redhat-marketplace-mz9m7" Nov 27 08:25:47 crc kubenswrapper[4971]: I1127 08:25:47.830175 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a45ad671-96d6-4717-86da-34f906acbbb8-catalog-content\") pod \"redhat-marketplace-mz9m7\" (UID: \"a45ad671-96d6-4717-86da-34f906acbbb8\") " pod="openshift-marketplace/redhat-marketplace-mz9m7" Nov 27 08:25:47 crc kubenswrapper[4971]: I1127 08:25:47.830640 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a45ad671-96d6-4717-86da-34f906acbbb8-utilities\") pod \"redhat-marketplace-mz9m7\" (UID: \"a45ad671-96d6-4717-86da-34f906acbbb8\") " pod="openshift-marketplace/redhat-marketplace-mz9m7" Nov 27 08:25:47 crc kubenswrapper[4971]: I1127 08:25:47.831306 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a45ad671-96d6-4717-86da-34f906acbbb8-catalog-content\") pod \"redhat-marketplace-mz9m7\" (UID: \"a45ad671-96d6-4717-86da-34f906acbbb8\") " pod="openshift-marketplace/redhat-marketplace-mz9m7" Nov 27 08:25:47 crc kubenswrapper[4971]: I1127 08:25:47.863707 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t4rl7\" (UniqueName: \"kubernetes.io/projected/a45ad671-96d6-4717-86da-34f906acbbb8-kube-api-access-t4rl7\") pod \"redhat-marketplace-mz9m7\" (UID: \"a45ad671-96d6-4717-86da-34f906acbbb8\") " pod="openshift-marketplace/redhat-marketplace-mz9m7" Nov 27 08:25:47 crc kubenswrapper[4971]: I1127 08:25:47.887513 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mz9m7" Nov 27 08:25:48 crc kubenswrapper[4971]: I1127 08:25:48.443889 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mz9m7"] Nov 27 08:25:49 crc kubenswrapper[4971]: I1127 08:25:49.039527 4971 generic.go:334] "Generic (PLEG): container finished" podID="a45ad671-96d6-4717-86da-34f906acbbb8" containerID="19a49d6045c4adf0b747611214d37f7e1bd24759b81da1130c4638ac66f4c723" exitCode=0 Nov 27 08:25:49 crc kubenswrapper[4971]: I1127 08:25:49.039684 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mz9m7" event={"ID":"a45ad671-96d6-4717-86da-34f906acbbb8","Type":"ContainerDied","Data":"19a49d6045c4adf0b747611214d37f7e1bd24759b81da1130c4638ac66f4c723"} Nov 27 08:25:49 crc kubenswrapper[4971]: I1127 08:25:49.040106 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mz9m7" event={"ID":"a45ad671-96d6-4717-86da-34f906acbbb8","Type":"ContainerStarted","Data":"893add830c65b2a5dc7155f53bd73d94e0751e28ae6e5c30bd841af59e97b94b"} Nov 27 08:25:49 crc kubenswrapper[4971]: I1127 08:25:49.043588 4971 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 27 08:25:51 crc kubenswrapper[4971]: I1127 08:25:51.059234 4971 generic.go:334] "Generic (PLEG): container finished" podID="a45ad671-96d6-4717-86da-34f906acbbb8" containerID="f558dc44a32d5c79e0374e3c5daa0303d158bfbd9e9a8f01bba6ec7e69f617f1" exitCode=0 Nov 27 08:25:51 crc kubenswrapper[4971]: I1127 08:25:51.059324 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mz9m7" event={"ID":"a45ad671-96d6-4717-86da-34f906acbbb8","Type":"ContainerDied","Data":"f558dc44a32d5c79e0374e3c5daa0303d158bfbd9e9a8f01bba6ec7e69f617f1"} Nov 27 08:25:52 crc kubenswrapper[4971]: I1127 08:25:52.071254 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mz9m7" event={"ID":"a45ad671-96d6-4717-86da-34f906acbbb8","Type":"ContainerStarted","Data":"fb07f1b4b16243b319271df334336cc066862190400f4ec36291b27718b5ab82"} Nov 27 08:25:52 crc kubenswrapper[4971]: I1127 08:25:52.097855 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mz9m7" podStartSLOduration=2.440234841 podStartE2EDuration="5.097829413s" podCreationTimestamp="2025-11-27 08:25:47 +0000 UTC" firstStartedPulling="2025-11-27 08:25:49.04331092 +0000 UTC m=+5587.235354838" lastFinishedPulling="2025-11-27 08:25:51.700905492 +0000 UTC m=+5589.892949410" observedRunningTime="2025-11-27 08:25:52.092559805 +0000 UTC m=+5590.284603733" watchObservedRunningTime="2025-11-27 08:25:52.097829413 +0000 UTC m=+5590.289873341" Nov 27 08:25:53 crc kubenswrapper[4971]: I1127 08:25:53.530815 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-8ll4c"] Nov 27 08:25:53 crc kubenswrapper[4971]: I1127 08:25:53.546559 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8ll4c" Nov 27 08:25:53 crc kubenswrapper[4971]: I1127 08:25:53.574913 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8ll4c"] Nov 27 08:25:53 crc kubenswrapper[4971]: I1127 08:25:53.628381 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f7ef79f9-2133-467c-bcad-e29afcdb340c-catalog-content\") pod \"redhat-operators-8ll4c\" (UID: \"f7ef79f9-2133-467c-bcad-e29afcdb340c\") " pod="openshift-marketplace/redhat-operators-8ll4c" Nov 27 08:25:53 crc kubenswrapper[4971]: I1127 08:25:53.628490 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f7ef79f9-2133-467c-bcad-e29afcdb340c-utilities\") pod \"redhat-operators-8ll4c\" (UID: \"f7ef79f9-2133-467c-bcad-e29afcdb340c\") " pod="openshift-marketplace/redhat-operators-8ll4c" Nov 27 08:25:53 crc kubenswrapper[4971]: I1127 08:25:53.628559 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gqdr4\" (UniqueName: \"kubernetes.io/projected/f7ef79f9-2133-467c-bcad-e29afcdb340c-kube-api-access-gqdr4\") pod \"redhat-operators-8ll4c\" (UID: \"f7ef79f9-2133-467c-bcad-e29afcdb340c\") " pod="openshift-marketplace/redhat-operators-8ll4c" Nov 27 08:25:53 crc kubenswrapper[4971]: I1127 08:25:53.730252 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f7ef79f9-2133-467c-bcad-e29afcdb340c-catalog-content\") pod \"redhat-operators-8ll4c\" (UID: \"f7ef79f9-2133-467c-bcad-e29afcdb340c\") " pod="openshift-marketplace/redhat-operators-8ll4c" Nov 27 08:25:53 crc kubenswrapper[4971]: I1127 08:25:53.730344 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f7ef79f9-2133-467c-bcad-e29afcdb340c-utilities\") pod \"redhat-operators-8ll4c\" (UID: \"f7ef79f9-2133-467c-bcad-e29afcdb340c\") " pod="openshift-marketplace/redhat-operators-8ll4c" Nov 27 08:25:53 crc kubenswrapper[4971]: I1127 08:25:53.730407 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gqdr4\" (UniqueName: \"kubernetes.io/projected/f7ef79f9-2133-467c-bcad-e29afcdb340c-kube-api-access-gqdr4\") pod \"redhat-operators-8ll4c\" (UID: \"f7ef79f9-2133-467c-bcad-e29afcdb340c\") " pod="openshift-marketplace/redhat-operators-8ll4c" Nov 27 08:25:53 crc kubenswrapper[4971]: I1127 08:25:53.730983 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f7ef79f9-2133-467c-bcad-e29afcdb340c-catalog-content\") pod \"redhat-operators-8ll4c\" (UID: \"f7ef79f9-2133-467c-bcad-e29afcdb340c\") " pod="openshift-marketplace/redhat-operators-8ll4c" Nov 27 08:25:53 crc kubenswrapper[4971]: I1127 08:25:53.731048 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f7ef79f9-2133-467c-bcad-e29afcdb340c-utilities\") pod \"redhat-operators-8ll4c\" (UID: \"f7ef79f9-2133-467c-bcad-e29afcdb340c\") " pod="openshift-marketplace/redhat-operators-8ll4c" Nov 27 08:25:53 crc kubenswrapper[4971]: I1127 08:25:53.753279 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gqdr4\" (UniqueName: \"kubernetes.io/projected/f7ef79f9-2133-467c-bcad-e29afcdb340c-kube-api-access-gqdr4\") pod \"redhat-operators-8ll4c\" (UID: \"f7ef79f9-2133-467c-bcad-e29afcdb340c\") " pod="openshift-marketplace/redhat-operators-8ll4c" Nov 27 08:25:53 crc kubenswrapper[4971]: I1127 08:25:53.910674 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8ll4c" Nov 27 08:25:54 crc kubenswrapper[4971]: I1127 08:25:54.152429 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8ll4c"] Nov 27 08:25:55 crc kubenswrapper[4971]: I1127 08:25:55.097204 4971 generic.go:334] "Generic (PLEG): container finished" podID="f7ef79f9-2133-467c-bcad-e29afcdb340c" containerID="524f8b979fc9c1a82c6c8be637440ac3b341b1c89e480914d36065efe6f61f7e" exitCode=0 Nov 27 08:25:55 crc kubenswrapper[4971]: I1127 08:25:55.097268 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8ll4c" event={"ID":"f7ef79f9-2133-467c-bcad-e29afcdb340c","Type":"ContainerDied","Data":"524f8b979fc9c1a82c6c8be637440ac3b341b1c89e480914d36065efe6f61f7e"} Nov 27 08:25:55 crc kubenswrapper[4971]: I1127 08:25:55.097309 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8ll4c" event={"ID":"f7ef79f9-2133-467c-bcad-e29afcdb340c","Type":"ContainerStarted","Data":"eccb51b62aa395868dabf1b4a753116837637ad1800aab0866d867d73b4c40cf"} Nov 27 08:25:55 crc kubenswrapper[4971]: I1127 08:25:55.345590 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-snrv6"] Nov 27 08:25:55 crc kubenswrapper[4971]: I1127 08:25:55.347934 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-snrv6" Nov 27 08:25:55 crc kubenswrapper[4971]: I1127 08:25:55.373386 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-snrv6"] Nov 27 08:25:55 crc kubenswrapper[4971]: I1127 08:25:55.458667 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c27561fe-dbb7-41d1-95de-e1def1e99715-utilities\") pod \"certified-operators-snrv6\" (UID: \"c27561fe-dbb7-41d1-95de-e1def1e99715\") " pod="openshift-marketplace/certified-operators-snrv6" Nov 27 08:25:55 crc kubenswrapper[4971]: I1127 08:25:55.459023 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cq9kl\" (UniqueName: \"kubernetes.io/projected/c27561fe-dbb7-41d1-95de-e1def1e99715-kube-api-access-cq9kl\") pod \"certified-operators-snrv6\" (UID: \"c27561fe-dbb7-41d1-95de-e1def1e99715\") " pod="openshift-marketplace/certified-operators-snrv6" Nov 27 08:25:55 crc kubenswrapper[4971]: I1127 08:25:55.459125 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c27561fe-dbb7-41d1-95de-e1def1e99715-catalog-content\") pod \"certified-operators-snrv6\" (UID: \"c27561fe-dbb7-41d1-95de-e1def1e99715\") " pod="openshift-marketplace/certified-operators-snrv6" Nov 27 08:25:55 crc kubenswrapper[4971]: I1127 08:25:55.560451 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cq9kl\" (UniqueName: \"kubernetes.io/projected/c27561fe-dbb7-41d1-95de-e1def1e99715-kube-api-access-cq9kl\") pod \"certified-operators-snrv6\" (UID: \"c27561fe-dbb7-41d1-95de-e1def1e99715\") " pod="openshift-marketplace/certified-operators-snrv6" Nov 27 08:25:55 crc kubenswrapper[4971]: I1127 08:25:55.560502 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c27561fe-dbb7-41d1-95de-e1def1e99715-catalog-content\") pod \"certified-operators-snrv6\" (UID: \"c27561fe-dbb7-41d1-95de-e1def1e99715\") " pod="openshift-marketplace/certified-operators-snrv6" Nov 27 08:25:55 crc kubenswrapper[4971]: I1127 08:25:55.560575 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c27561fe-dbb7-41d1-95de-e1def1e99715-utilities\") pod \"certified-operators-snrv6\" (UID: \"c27561fe-dbb7-41d1-95de-e1def1e99715\") " pod="openshift-marketplace/certified-operators-snrv6" Nov 27 08:25:55 crc kubenswrapper[4971]: I1127 08:25:55.561069 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c27561fe-dbb7-41d1-95de-e1def1e99715-utilities\") pod \"certified-operators-snrv6\" (UID: \"c27561fe-dbb7-41d1-95de-e1def1e99715\") " pod="openshift-marketplace/certified-operators-snrv6" Nov 27 08:25:55 crc kubenswrapper[4971]: I1127 08:25:55.561211 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c27561fe-dbb7-41d1-95de-e1def1e99715-catalog-content\") pod \"certified-operators-snrv6\" (UID: \"c27561fe-dbb7-41d1-95de-e1def1e99715\") " pod="openshift-marketplace/certified-operators-snrv6" Nov 27 08:25:55 crc kubenswrapper[4971]: I1127 08:25:55.588002 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cq9kl\" (UniqueName: \"kubernetes.io/projected/c27561fe-dbb7-41d1-95de-e1def1e99715-kube-api-access-cq9kl\") pod \"certified-operators-snrv6\" (UID: \"c27561fe-dbb7-41d1-95de-e1def1e99715\") " pod="openshift-marketplace/certified-operators-snrv6" Nov 27 08:25:55 crc kubenswrapper[4971]: I1127 08:25:55.667218 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-snrv6" Nov 27 08:25:55 crc kubenswrapper[4971]: I1127 08:25:55.932242 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2ftfb"] Nov 27 08:25:55 crc kubenswrapper[4971]: I1127 08:25:55.936685 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2ftfb" Nov 27 08:25:55 crc kubenswrapper[4971]: I1127 08:25:55.971767 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2ftfb"] Nov 27 08:25:56 crc kubenswrapper[4971]: I1127 08:25:56.069229 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8ksh\" (UniqueName: \"kubernetes.io/projected/a72e94b8-cad5-4697-9c5e-6c0ba792d155-kube-api-access-f8ksh\") pod \"community-operators-2ftfb\" (UID: \"a72e94b8-cad5-4697-9c5e-6c0ba792d155\") " pod="openshift-marketplace/community-operators-2ftfb" Nov 27 08:25:56 crc kubenswrapper[4971]: I1127 08:25:56.069356 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a72e94b8-cad5-4697-9c5e-6c0ba792d155-catalog-content\") pod \"community-operators-2ftfb\" (UID: \"a72e94b8-cad5-4697-9c5e-6c0ba792d155\") " pod="openshift-marketplace/community-operators-2ftfb" Nov 27 08:25:56 crc kubenswrapper[4971]: I1127 08:25:56.069405 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a72e94b8-cad5-4697-9c5e-6c0ba792d155-utilities\") pod \"community-operators-2ftfb\" (UID: \"a72e94b8-cad5-4697-9c5e-6c0ba792d155\") " pod="openshift-marketplace/community-operators-2ftfb" Nov 27 08:25:56 crc kubenswrapper[4971]: I1127 08:25:56.170667 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a72e94b8-cad5-4697-9c5e-6c0ba792d155-catalog-content\") pod \"community-operators-2ftfb\" (UID: \"a72e94b8-cad5-4697-9c5e-6c0ba792d155\") " pod="openshift-marketplace/community-operators-2ftfb" Nov 27 08:25:56 crc kubenswrapper[4971]: I1127 08:25:56.171177 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a72e94b8-cad5-4697-9c5e-6c0ba792d155-utilities\") pod \"community-operators-2ftfb\" (UID: \"a72e94b8-cad5-4697-9c5e-6c0ba792d155\") " pod="openshift-marketplace/community-operators-2ftfb" Nov 27 08:25:56 crc kubenswrapper[4971]: I1127 08:25:56.171245 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8ksh\" (UniqueName: \"kubernetes.io/projected/a72e94b8-cad5-4697-9c5e-6c0ba792d155-kube-api-access-f8ksh\") pod \"community-operators-2ftfb\" (UID: \"a72e94b8-cad5-4697-9c5e-6c0ba792d155\") " pod="openshift-marketplace/community-operators-2ftfb" Nov 27 08:25:56 crc kubenswrapper[4971]: I1127 08:25:56.171348 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a72e94b8-cad5-4697-9c5e-6c0ba792d155-catalog-content\") pod \"community-operators-2ftfb\" (UID: \"a72e94b8-cad5-4697-9c5e-6c0ba792d155\") " pod="openshift-marketplace/community-operators-2ftfb" Nov 27 08:25:56 crc kubenswrapper[4971]: I1127 08:25:56.171676 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a72e94b8-cad5-4697-9c5e-6c0ba792d155-utilities\") pod \"community-operators-2ftfb\" (UID: \"a72e94b8-cad5-4697-9c5e-6c0ba792d155\") " pod="openshift-marketplace/community-operators-2ftfb" Nov 27 08:25:56 crc kubenswrapper[4971]: I1127 08:25:56.206946 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8ksh\" (UniqueName: \"kubernetes.io/projected/a72e94b8-cad5-4697-9c5e-6c0ba792d155-kube-api-access-f8ksh\") pod \"community-operators-2ftfb\" (UID: \"a72e94b8-cad5-4697-9c5e-6c0ba792d155\") " pod="openshift-marketplace/community-operators-2ftfb" Nov 27 08:25:56 crc kubenswrapper[4971]: I1127 08:25:56.221934 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-snrv6"] Nov 27 08:25:56 crc kubenswrapper[4971]: I1127 08:25:56.268792 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2ftfb" Nov 27 08:25:56 crc kubenswrapper[4971]: I1127 08:25:56.650513 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2ftfb"] Nov 27 08:25:57 crc kubenswrapper[4971]: I1127 08:25:57.115712 4971 generic.go:334] "Generic (PLEG): container finished" podID="a72e94b8-cad5-4697-9c5e-6c0ba792d155" containerID="9ee4144709e3b3815142ddcde5f3e55213992d2a216770fb328891d1e9d1a26f" exitCode=0 Nov 27 08:25:57 crc kubenswrapper[4971]: I1127 08:25:57.115802 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2ftfb" event={"ID":"a72e94b8-cad5-4697-9c5e-6c0ba792d155","Type":"ContainerDied","Data":"9ee4144709e3b3815142ddcde5f3e55213992d2a216770fb328891d1e9d1a26f"} Nov 27 08:25:57 crc kubenswrapper[4971]: I1127 08:25:57.115836 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2ftfb" event={"ID":"a72e94b8-cad5-4697-9c5e-6c0ba792d155","Type":"ContainerStarted","Data":"3cdb33f61792a81ce23e1acf7be31f66e56f09f9678e2357990082ad7760e741"} Nov 27 08:25:57 crc kubenswrapper[4971]: I1127 08:25:57.119408 4971 generic.go:334] "Generic (PLEG): container finished" podID="c27561fe-dbb7-41d1-95de-e1def1e99715" containerID="04bbc82bb853f58d2dd13eefbbcbf6f9d4a8c0deee1fc969d64c39e861a16842" exitCode=0 Nov 27 08:25:57 crc kubenswrapper[4971]: I1127 08:25:57.119547 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-snrv6" event={"ID":"c27561fe-dbb7-41d1-95de-e1def1e99715","Type":"ContainerDied","Data":"04bbc82bb853f58d2dd13eefbbcbf6f9d4a8c0deee1fc969d64c39e861a16842"} Nov 27 08:25:57 crc kubenswrapper[4971]: I1127 08:25:57.119644 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-snrv6" event={"ID":"c27561fe-dbb7-41d1-95de-e1def1e99715","Type":"ContainerStarted","Data":"232a43d8ef39f3ca3fd230f26ab1fe002f2310710ecd286d1e38eccde4c5cd55"} Nov 27 08:25:57 crc kubenswrapper[4971]: I1127 08:25:57.124204 4971 generic.go:334] "Generic (PLEG): container finished" podID="f7ef79f9-2133-467c-bcad-e29afcdb340c" containerID="e9c6419359db20905346dd2b4e2e3fb0d223d18b2f6f97102f352f839611600d" exitCode=0 Nov 27 08:25:57 crc kubenswrapper[4971]: I1127 08:25:57.124275 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8ll4c" event={"ID":"f7ef79f9-2133-467c-bcad-e29afcdb340c","Type":"ContainerDied","Data":"e9c6419359db20905346dd2b4e2e3fb0d223d18b2f6f97102f352f839611600d"} Nov 27 08:25:57 crc kubenswrapper[4971]: I1127 08:25:57.888233 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mz9m7" Nov 27 08:25:57 crc kubenswrapper[4971]: I1127 08:25:57.888749 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mz9m7" Nov 27 08:25:57 crc kubenswrapper[4971]: I1127 08:25:57.949095 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mz9m7" Nov 27 08:25:58 crc kubenswrapper[4971]: I1127 08:25:58.139415 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8ll4c" event={"ID":"f7ef79f9-2133-467c-bcad-e29afcdb340c","Type":"ContainerStarted","Data":"f75b8b87c74209d0c677497d4eae7ef6d82aa8091a24df2d9e259cf84e797853"} Nov 27 08:25:58 crc kubenswrapper[4971]: I1127 08:25:58.151813 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2ftfb" event={"ID":"a72e94b8-cad5-4697-9c5e-6c0ba792d155","Type":"ContainerStarted","Data":"19d6032051e20fef9414916aed7b5cd21bed89a347b615b646528ad48bafb2c1"} Nov 27 08:25:58 crc kubenswrapper[4971]: I1127 08:25:58.180633 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-8ll4c" podStartSLOduration=2.424348969 podStartE2EDuration="5.180604923s" podCreationTimestamp="2025-11-27 08:25:53 +0000 UTC" firstStartedPulling="2025-11-27 08:25:55.098803623 +0000 UTC m=+5593.290847541" lastFinishedPulling="2025-11-27 08:25:57.855059577 +0000 UTC m=+5596.047103495" observedRunningTime="2025-11-27 08:25:58.176297072 +0000 UTC m=+5596.368341010" watchObservedRunningTime="2025-11-27 08:25:58.180604923 +0000 UTC m=+5596.372648841" Nov 27 08:25:58 crc kubenswrapper[4971]: I1127 08:25:58.209821 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mz9m7" Nov 27 08:25:59 crc kubenswrapper[4971]: I1127 08:25:59.162218 4971 generic.go:334] "Generic (PLEG): container finished" podID="c27561fe-dbb7-41d1-95de-e1def1e99715" containerID="1f18a7168f683dbdfbb9acc3afbf0dcfcb8d62dc7840189ba44cecacda6a78a5" exitCode=0 Nov 27 08:25:59 crc kubenswrapper[4971]: I1127 08:25:59.162313 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-snrv6" event={"ID":"c27561fe-dbb7-41d1-95de-e1def1e99715","Type":"ContainerDied","Data":"1f18a7168f683dbdfbb9acc3afbf0dcfcb8d62dc7840189ba44cecacda6a78a5"} Nov 27 08:25:59 crc kubenswrapper[4971]: I1127 08:25:59.166251 4971 generic.go:334] "Generic (PLEG): container finished" podID="a72e94b8-cad5-4697-9c5e-6c0ba792d155" containerID="19d6032051e20fef9414916aed7b5cd21bed89a347b615b646528ad48bafb2c1" exitCode=0 Nov 27 08:25:59 crc kubenswrapper[4971]: I1127 08:25:59.166312 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2ftfb" event={"ID":"a72e94b8-cad5-4697-9c5e-6c0ba792d155","Type":"ContainerDied","Data":"19d6032051e20fef9414916aed7b5cd21bed89a347b615b646528ad48bafb2c1"} Nov 27 08:26:01 crc kubenswrapper[4971]: I1127 08:26:01.890288 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2ftfb" event={"ID":"a72e94b8-cad5-4697-9c5e-6c0ba792d155","Type":"ContainerStarted","Data":"755841fd55002a6a725bf30f86efe91a276890e0dc347c1e56534549caae41a7"} Nov 27 08:26:01 crc kubenswrapper[4971]: I1127 08:26:01.897964 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-snrv6" event={"ID":"c27561fe-dbb7-41d1-95de-e1def1e99715","Type":"ContainerStarted","Data":"59e164888adc861ec684d87be96c6631b55750b9c7abfba2d92962fc785c769b"} Nov 27 08:26:01 crc kubenswrapper[4971]: I1127 08:26:01.950162 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2ftfb" podStartSLOduration=3.103655531 podStartE2EDuration="6.950136144s" podCreationTimestamp="2025-11-27 08:25:55 +0000 UTC" firstStartedPulling="2025-11-27 08:25:57.118778311 +0000 UTC m=+5595.310822229" lastFinishedPulling="2025-11-27 08:26:00.965258924 +0000 UTC m=+5599.157302842" observedRunningTime="2025-11-27 08:26:01.923259979 +0000 UTC m=+5600.115303917" watchObservedRunningTime="2025-11-27 08:26:01.950136144 +0000 UTC m=+5600.142180072" Nov 27 08:26:01 crc kubenswrapper[4971]: I1127 08:26:01.958420 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-snrv6" podStartSLOduration=2.887353064 podStartE2EDuration="6.958395366s" podCreationTimestamp="2025-11-27 08:25:55 +0000 UTC" firstStartedPulling="2025-11-27 08:25:57.121151428 +0000 UTC m=+5595.313195336" lastFinishedPulling="2025-11-27 08:26:01.19219373 +0000 UTC m=+5599.384237638" observedRunningTime="2025-11-27 08:26:01.948253311 +0000 UTC m=+5600.140297229" watchObservedRunningTime="2025-11-27 08:26:01.958395366 +0000 UTC m=+5600.150439284" Nov 27 08:26:02 crc kubenswrapper[4971]: I1127 08:26:02.519831 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mz9m7"] Nov 27 08:26:02 crc kubenswrapper[4971]: I1127 08:26:02.520198 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mz9m7" podUID="a45ad671-96d6-4717-86da-34f906acbbb8" containerName="registry-server" containerID="cri-o://fb07f1b4b16243b319271df334336cc066862190400f4ec36291b27718b5ab82" gracePeriod=2 Nov 27 08:26:03 crc kubenswrapper[4971]: I1127 08:26:03.461212 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mz9m7" Nov 27 08:26:03 crc kubenswrapper[4971]: I1127 08:26:03.498312 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a45ad671-96d6-4717-86da-34f906acbbb8-catalog-content\") pod \"a45ad671-96d6-4717-86da-34f906acbbb8\" (UID: \"a45ad671-96d6-4717-86da-34f906acbbb8\") " Nov 27 08:26:03 crc kubenswrapper[4971]: I1127 08:26:03.498377 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a45ad671-96d6-4717-86da-34f906acbbb8-utilities\") pod \"a45ad671-96d6-4717-86da-34f906acbbb8\" (UID: \"a45ad671-96d6-4717-86da-34f906acbbb8\") " Nov 27 08:26:03 crc kubenswrapper[4971]: I1127 08:26:03.498510 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t4rl7\" (UniqueName: \"kubernetes.io/projected/a45ad671-96d6-4717-86da-34f906acbbb8-kube-api-access-t4rl7\") pod \"a45ad671-96d6-4717-86da-34f906acbbb8\" (UID: \"a45ad671-96d6-4717-86da-34f906acbbb8\") " Nov 27 08:26:03 crc kubenswrapper[4971]: I1127 08:26:03.499580 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a45ad671-96d6-4717-86da-34f906acbbb8-utilities" (OuterVolumeSpecName: "utilities") pod "a45ad671-96d6-4717-86da-34f906acbbb8" (UID: "a45ad671-96d6-4717-86da-34f906acbbb8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:26:03 crc kubenswrapper[4971]: I1127 08:26:03.506991 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a45ad671-96d6-4717-86da-34f906acbbb8-kube-api-access-t4rl7" (OuterVolumeSpecName: "kube-api-access-t4rl7") pod "a45ad671-96d6-4717-86da-34f906acbbb8" (UID: "a45ad671-96d6-4717-86da-34f906acbbb8"). InnerVolumeSpecName "kube-api-access-t4rl7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:26:03 crc kubenswrapper[4971]: I1127 08:26:03.527888 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a45ad671-96d6-4717-86da-34f906acbbb8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a45ad671-96d6-4717-86da-34f906acbbb8" (UID: "a45ad671-96d6-4717-86da-34f906acbbb8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:26:03 crc kubenswrapper[4971]: I1127 08:26:03.602043 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a45ad671-96d6-4717-86da-34f906acbbb8-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 08:26:03 crc kubenswrapper[4971]: I1127 08:26:03.602081 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a45ad671-96d6-4717-86da-34f906acbbb8-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 08:26:03 crc kubenswrapper[4971]: I1127 08:26:03.602095 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t4rl7\" (UniqueName: \"kubernetes.io/projected/a45ad671-96d6-4717-86da-34f906acbbb8-kube-api-access-t4rl7\") on node \"crc\" DevicePath \"\"" Nov 27 08:26:03 crc kubenswrapper[4971]: I1127 08:26:03.911600 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-8ll4c" Nov 27 08:26:03 crc kubenswrapper[4971]: I1127 08:26:03.911689 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-8ll4c" Nov 27 08:26:03 crc kubenswrapper[4971]: I1127 08:26:03.914957 4971 generic.go:334] "Generic (PLEG): container finished" podID="a45ad671-96d6-4717-86da-34f906acbbb8" containerID="fb07f1b4b16243b319271df334336cc066862190400f4ec36291b27718b5ab82" exitCode=0 Nov 27 08:26:03 crc kubenswrapper[4971]: I1127 08:26:03.915015 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mz9m7" event={"ID":"a45ad671-96d6-4717-86da-34f906acbbb8","Type":"ContainerDied","Data":"fb07f1b4b16243b319271df334336cc066862190400f4ec36291b27718b5ab82"} Nov 27 08:26:03 crc kubenswrapper[4971]: I1127 08:26:03.915031 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mz9m7" Nov 27 08:26:03 crc kubenswrapper[4971]: I1127 08:26:03.915051 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mz9m7" event={"ID":"a45ad671-96d6-4717-86da-34f906acbbb8","Type":"ContainerDied","Data":"893add830c65b2a5dc7155f53bd73d94e0751e28ae6e5c30bd841af59e97b94b"} Nov 27 08:26:03 crc kubenswrapper[4971]: I1127 08:26:03.915077 4971 scope.go:117] "RemoveContainer" containerID="fb07f1b4b16243b319271df334336cc066862190400f4ec36291b27718b5ab82" Nov 27 08:26:03 crc kubenswrapper[4971]: I1127 08:26:03.943795 4971 scope.go:117] "RemoveContainer" containerID="f558dc44a32d5c79e0374e3c5daa0303d158bfbd9e9a8f01bba6ec7e69f617f1" Nov 27 08:26:03 crc kubenswrapper[4971]: I1127 08:26:03.952051 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mz9m7"] Nov 27 08:26:03 crc kubenswrapper[4971]: I1127 08:26:03.964947 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mz9m7"] Nov 27 08:26:03 crc kubenswrapper[4971]: I1127 08:26:03.970231 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-8ll4c" Nov 27 08:26:03 crc kubenswrapper[4971]: I1127 08:26:03.971087 4971 scope.go:117] "RemoveContainer" containerID="19a49d6045c4adf0b747611214d37f7e1bd24759b81da1130c4638ac66f4c723" Nov 27 08:26:04 crc kubenswrapper[4971]: I1127 08:26:04.002462 4971 scope.go:117] "RemoveContainer" containerID="fb07f1b4b16243b319271df334336cc066862190400f4ec36291b27718b5ab82" Nov 27 08:26:04 crc kubenswrapper[4971]: E1127 08:26:04.003055 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb07f1b4b16243b319271df334336cc066862190400f4ec36291b27718b5ab82\": container with ID starting with fb07f1b4b16243b319271df334336cc066862190400f4ec36291b27718b5ab82 not found: ID does not exist" containerID="fb07f1b4b16243b319271df334336cc066862190400f4ec36291b27718b5ab82" Nov 27 08:26:04 crc kubenswrapper[4971]: I1127 08:26:04.003110 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb07f1b4b16243b319271df334336cc066862190400f4ec36291b27718b5ab82"} err="failed to get container status \"fb07f1b4b16243b319271df334336cc066862190400f4ec36291b27718b5ab82\": rpc error: code = NotFound desc = could not find container \"fb07f1b4b16243b319271df334336cc066862190400f4ec36291b27718b5ab82\": container with ID starting with fb07f1b4b16243b319271df334336cc066862190400f4ec36291b27718b5ab82 not found: ID does not exist" Nov 27 08:26:04 crc kubenswrapper[4971]: I1127 08:26:04.003148 4971 scope.go:117] "RemoveContainer" containerID="f558dc44a32d5c79e0374e3c5daa0303d158bfbd9e9a8f01bba6ec7e69f617f1" Nov 27 08:26:04 crc kubenswrapper[4971]: E1127 08:26:04.003559 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f558dc44a32d5c79e0374e3c5daa0303d158bfbd9e9a8f01bba6ec7e69f617f1\": container with ID starting with f558dc44a32d5c79e0374e3c5daa0303d158bfbd9e9a8f01bba6ec7e69f617f1 not found: ID does not exist" containerID="f558dc44a32d5c79e0374e3c5daa0303d158bfbd9e9a8f01bba6ec7e69f617f1" Nov 27 08:26:04 crc kubenswrapper[4971]: I1127 08:26:04.003614 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f558dc44a32d5c79e0374e3c5daa0303d158bfbd9e9a8f01bba6ec7e69f617f1"} err="failed to get container status \"f558dc44a32d5c79e0374e3c5daa0303d158bfbd9e9a8f01bba6ec7e69f617f1\": rpc error: code = NotFound desc = could not find container \"f558dc44a32d5c79e0374e3c5daa0303d158bfbd9e9a8f01bba6ec7e69f617f1\": container with ID starting with f558dc44a32d5c79e0374e3c5daa0303d158bfbd9e9a8f01bba6ec7e69f617f1 not found: ID does not exist" Nov 27 08:26:04 crc kubenswrapper[4971]: I1127 08:26:04.003652 4971 scope.go:117] "RemoveContainer" containerID="19a49d6045c4adf0b747611214d37f7e1bd24759b81da1130c4638ac66f4c723" Nov 27 08:26:04 crc kubenswrapper[4971]: E1127 08:26:04.004255 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19a49d6045c4adf0b747611214d37f7e1bd24759b81da1130c4638ac66f4c723\": container with ID starting with 19a49d6045c4adf0b747611214d37f7e1bd24759b81da1130c4638ac66f4c723 not found: ID does not exist" containerID="19a49d6045c4adf0b747611214d37f7e1bd24759b81da1130c4638ac66f4c723" Nov 27 08:26:04 crc kubenswrapper[4971]: I1127 08:26:04.004286 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19a49d6045c4adf0b747611214d37f7e1bd24759b81da1130c4638ac66f4c723"} err="failed to get container status \"19a49d6045c4adf0b747611214d37f7e1bd24759b81da1130c4638ac66f4c723\": rpc error: code = NotFound desc = could not find container \"19a49d6045c4adf0b747611214d37f7e1bd24759b81da1130c4638ac66f4c723\": container with ID starting with 19a49d6045c4adf0b747611214d37f7e1bd24759b81da1130c4638ac66f4c723 not found: ID does not exist" Nov 27 08:26:04 crc kubenswrapper[4971]: I1127 08:26:04.569621 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a45ad671-96d6-4717-86da-34f906acbbb8" path="/var/lib/kubelet/pods/a45ad671-96d6-4717-86da-34f906acbbb8/volumes" Nov 27 08:26:04 crc kubenswrapper[4971]: I1127 08:26:04.965514 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-8ll4c" Nov 27 08:26:05 crc kubenswrapper[4971]: I1127 08:26:05.668257 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-snrv6" Nov 27 08:26:05 crc kubenswrapper[4971]: I1127 08:26:05.668335 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-snrv6" Nov 27 08:26:05 crc kubenswrapper[4971]: I1127 08:26:05.721760 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-snrv6" Nov 27 08:26:06 crc kubenswrapper[4971]: I1127 08:26:06.269702 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2ftfb" Nov 27 08:26:06 crc kubenswrapper[4971]: I1127 08:26:06.270794 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2ftfb" Nov 27 08:26:06 crc kubenswrapper[4971]: I1127 08:26:06.325584 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2ftfb" Nov 27 08:26:07 crc kubenswrapper[4971]: I1127 08:26:07.000353 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2ftfb" Nov 27 08:26:09 crc kubenswrapper[4971]: I1127 08:26:09.123185 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8ll4c"] Nov 27 08:26:09 crc kubenswrapper[4971]: I1127 08:26:09.123466 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-8ll4c" podUID="f7ef79f9-2133-467c-bcad-e29afcdb340c" containerName="registry-server" containerID="cri-o://f75b8b87c74209d0c677497d4eae7ef6d82aa8091a24df2d9e259cf84e797853" gracePeriod=2 Nov 27 08:26:09 crc kubenswrapper[4971]: I1127 08:26:09.971043 4971 generic.go:334] "Generic (PLEG): container finished" podID="f7ef79f9-2133-467c-bcad-e29afcdb340c" containerID="f75b8b87c74209d0c677497d4eae7ef6d82aa8091a24df2d9e259cf84e797853" exitCode=0 Nov 27 08:26:09 crc kubenswrapper[4971]: I1127 08:26:09.971104 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8ll4c" event={"ID":"f7ef79f9-2133-467c-bcad-e29afcdb340c","Type":"ContainerDied","Data":"f75b8b87c74209d0c677497d4eae7ef6d82aa8091a24df2d9e259cf84e797853"} Nov 27 08:26:10 crc kubenswrapper[4971]: I1127 08:26:10.689807 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8ll4c" Nov 27 08:26:10 crc kubenswrapper[4971]: I1127 08:26:10.824760 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gqdr4\" (UniqueName: \"kubernetes.io/projected/f7ef79f9-2133-467c-bcad-e29afcdb340c-kube-api-access-gqdr4\") pod \"f7ef79f9-2133-467c-bcad-e29afcdb340c\" (UID: \"f7ef79f9-2133-467c-bcad-e29afcdb340c\") " Nov 27 08:26:10 crc kubenswrapper[4971]: I1127 08:26:10.825298 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f7ef79f9-2133-467c-bcad-e29afcdb340c-utilities\") pod \"f7ef79f9-2133-467c-bcad-e29afcdb340c\" (UID: \"f7ef79f9-2133-467c-bcad-e29afcdb340c\") " Nov 27 08:26:10 crc kubenswrapper[4971]: I1127 08:26:10.825460 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f7ef79f9-2133-467c-bcad-e29afcdb340c-catalog-content\") pod \"f7ef79f9-2133-467c-bcad-e29afcdb340c\" (UID: \"f7ef79f9-2133-467c-bcad-e29afcdb340c\") " Nov 27 08:26:10 crc kubenswrapper[4971]: I1127 08:26:10.826514 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f7ef79f9-2133-467c-bcad-e29afcdb340c-utilities" (OuterVolumeSpecName: "utilities") pod "f7ef79f9-2133-467c-bcad-e29afcdb340c" (UID: "f7ef79f9-2133-467c-bcad-e29afcdb340c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:26:10 crc kubenswrapper[4971]: I1127 08:26:10.832910 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7ef79f9-2133-467c-bcad-e29afcdb340c-kube-api-access-gqdr4" (OuterVolumeSpecName: "kube-api-access-gqdr4") pod "f7ef79f9-2133-467c-bcad-e29afcdb340c" (UID: "f7ef79f9-2133-467c-bcad-e29afcdb340c"). InnerVolumeSpecName "kube-api-access-gqdr4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:26:10 crc kubenswrapper[4971]: I1127 08:26:10.928073 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gqdr4\" (UniqueName: \"kubernetes.io/projected/f7ef79f9-2133-467c-bcad-e29afcdb340c-kube-api-access-gqdr4\") on node \"crc\" DevicePath \"\"" Nov 27 08:26:10 crc kubenswrapper[4971]: I1127 08:26:10.928139 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f7ef79f9-2133-467c-bcad-e29afcdb340c-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 08:26:10 crc kubenswrapper[4971]: I1127 08:26:10.934338 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f7ef79f9-2133-467c-bcad-e29afcdb340c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f7ef79f9-2133-467c-bcad-e29afcdb340c" (UID: "f7ef79f9-2133-467c-bcad-e29afcdb340c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:26:10 crc kubenswrapper[4971]: I1127 08:26:10.984453 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8ll4c" event={"ID":"f7ef79f9-2133-467c-bcad-e29afcdb340c","Type":"ContainerDied","Data":"eccb51b62aa395868dabf1b4a753116837637ad1800aab0866d867d73b4c40cf"} Nov 27 08:26:10 crc kubenswrapper[4971]: I1127 08:26:10.984515 4971 scope.go:117] "RemoveContainer" containerID="f75b8b87c74209d0c677497d4eae7ef6d82aa8091a24df2d9e259cf84e797853" Nov 27 08:26:10 crc kubenswrapper[4971]: I1127 08:26:10.984745 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8ll4c" Nov 27 08:26:11 crc kubenswrapper[4971]: I1127 08:26:11.008016 4971 scope.go:117] "RemoveContainer" containerID="e9c6419359db20905346dd2b4e2e3fb0d223d18b2f6f97102f352f839611600d" Nov 27 08:26:11 crc kubenswrapper[4971]: I1127 08:26:11.025244 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8ll4c"] Nov 27 08:26:11 crc kubenswrapper[4971]: I1127 08:26:11.030756 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f7ef79f9-2133-467c-bcad-e29afcdb340c-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 08:26:11 crc kubenswrapper[4971]: I1127 08:26:11.034407 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-8ll4c"] Nov 27 08:26:11 crc kubenswrapper[4971]: I1127 08:26:11.038308 4971 scope.go:117] "RemoveContainer" containerID="524f8b979fc9c1a82c6c8be637440ac3b341b1c89e480914d36065efe6f61f7e" Nov 27 08:26:12 crc kubenswrapper[4971]: I1127 08:26:12.565342 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7ef79f9-2133-467c-bcad-e29afcdb340c" path="/var/lib/kubelet/pods/f7ef79f9-2133-467c-bcad-e29afcdb340c/volumes" Nov 27 08:26:13 crc kubenswrapper[4971]: I1127 08:26:13.920849 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2ftfb"] Nov 27 08:26:13 crc kubenswrapper[4971]: I1127 08:26:13.921181 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-2ftfb" podUID="a72e94b8-cad5-4697-9c5e-6c0ba792d155" containerName="registry-server" containerID="cri-o://755841fd55002a6a725bf30f86efe91a276890e0dc347c1e56534549caae41a7" gracePeriod=2 Nov 27 08:26:14 crc kubenswrapper[4971]: I1127 08:26:14.315489 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2ftfb" Nov 27 08:26:14 crc kubenswrapper[4971]: I1127 08:26:14.402297 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a72e94b8-cad5-4697-9c5e-6c0ba792d155-utilities\") pod \"a72e94b8-cad5-4697-9c5e-6c0ba792d155\" (UID: \"a72e94b8-cad5-4697-9c5e-6c0ba792d155\") " Nov 27 08:26:14 crc kubenswrapper[4971]: I1127 08:26:14.402560 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a72e94b8-cad5-4697-9c5e-6c0ba792d155-catalog-content\") pod \"a72e94b8-cad5-4697-9c5e-6c0ba792d155\" (UID: \"a72e94b8-cad5-4697-9c5e-6c0ba792d155\") " Nov 27 08:26:14 crc kubenswrapper[4971]: I1127 08:26:14.402610 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f8ksh\" (UniqueName: \"kubernetes.io/projected/a72e94b8-cad5-4697-9c5e-6c0ba792d155-kube-api-access-f8ksh\") pod \"a72e94b8-cad5-4697-9c5e-6c0ba792d155\" (UID: \"a72e94b8-cad5-4697-9c5e-6c0ba792d155\") " Nov 27 08:26:14 crc kubenswrapper[4971]: I1127 08:26:14.404393 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a72e94b8-cad5-4697-9c5e-6c0ba792d155-utilities" (OuterVolumeSpecName: "utilities") pod "a72e94b8-cad5-4697-9c5e-6c0ba792d155" (UID: "a72e94b8-cad5-4697-9c5e-6c0ba792d155"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:26:14 crc kubenswrapper[4971]: I1127 08:26:14.411132 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a72e94b8-cad5-4697-9c5e-6c0ba792d155-kube-api-access-f8ksh" (OuterVolumeSpecName: "kube-api-access-f8ksh") pod "a72e94b8-cad5-4697-9c5e-6c0ba792d155" (UID: "a72e94b8-cad5-4697-9c5e-6c0ba792d155"). InnerVolumeSpecName "kube-api-access-f8ksh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:26:14 crc kubenswrapper[4971]: I1127 08:26:14.465253 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a72e94b8-cad5-4697-9c5e-6c0ba792d155-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a72e94b8-cad5-4697-9c5e-6c0ba792d155" (UID: "a72e94b8-cad5-4697-9c5e-6c0ba792d155"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:26:14 crc kubenswrapper[4971]: I1127 08:26:14.506377 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a72e94b8-cad5-4697-9c5e-6c0ba792d155-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 08:26:14 crc kubenswrapper[4971]: I1127 08:26:14.506741 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f8ksh\" (UniqueName: \"kubernetes.io/projected/a72e94b8-cad5-4697-9c5e-6c0ba792d155-kube-api-access-f8ksh\") on node \"crc\" DevicePath \"\"" Nov 27 08:26:14 crc kubenswrapper[4971]: I1127 08:26:14.506834 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a72e94b8-cad5-4697-9c5e-6c0ba792d155-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 08:26:15 crc kubenswrapper[4971]: I1127 08:26:15.030935 4971 generic.go:334] "Generic (PLEG): container finished" podID="a72e94b8-cad5-4697-9c5e-6c0ba792d155" containerID="755841fd55002a6a725bf30f86efe91a276890e0dc347c1e56534549caae41a7" exitCode=0 Nov 27 08:26:15 crc kubenswrapper[4971]: I1127 08:26:15.031016 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2ftfb" event={"ID":"a72e94b8-cad5-4697-9c5e-6c0ba792d155","Type":"ContainerDied","Data":"755841fd55002a6a725bf30f86efe91a276890e0dc347c1e56534549caae41a7"} Nov 27 08:26:15 crc kubenswrapper[4971]: I1127 08:26:15.031063 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2ftfb" event={"ID":"a72e94b8-cad5-4697-9c5e-6c0ba792d155","Type":"ContainerDied","Data":"3cdb33f61792a81ce23e1acf7be31f66e56f09f9678e2357990082ad7760e741"} Nov 27 08:26:15 crc kubenswrapper[4971]: I1127 08:26:15.031094 4971 scope.go:117] "RemoveContainer" containerID="755841fd55002a6a725bf30f86efe91a276890e0dc347c1e56534549caae41a7" Nov 27 08:26:15 crc kubenswrapper[4971]: I1127 08:26:15.031342 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2ftfb" Nov 27 08:26:15 crc kubenswrapper[4971]: I1127 08:26:15.070909 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2ftfb"] Nov 27 08:26:15 crc kubenswrapper[4971]: I1127 08:26:15.083073 4971 scope.go:117] "RemoveContainer" containerID="19d6032051e20fef9414916aed7b5cd21bed89a347b615b646528ad48bafb2c1" Nov 27 08:26:15 crc kubenswrapper[4971]: I1127 08:26:15.086206 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-2ftfb"] Nov 27 08:26:15 crc kubenswrapper[4971]: I1127 08:26:15.120194 4971 scope.go:117] "RemoveContainer" containerID="9ee4144709e3b3815142ddcde5f3e55213992d2a216770fb328891d1e9d1a26f" Nov 27 08:26:15 crc kubenswrapper[4971]: I1127 08:26:15.140820 4971 scope.go:117] "RemoveContainer" containerID="755841fd55002a6a725bf30f86efe91a276890e0dc347c1e56534549caae41a7" Nov 27 08:26:15 crc kubenswrapper[4971]: E1127 08:26:15.141525 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"755841fd55002a6a725bf30f86efe91a276890e0dc347c1e56534549caae41a7\": container with ID starting with 755841fd55002a6a725bf30f86efe91a276890e0dc347c1e56534549caae41a7 not found: ID does not exist" containerID="755841fd55002a6a725bf30f86efe91a276890e0dc347c1e56534549caae41a7" Nov 27 08:26:15 crc kubenswrapper[4971]: I1127 08:26:15.141703 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"755841fd55002a6a725bf30f86efe91a276890e0dc347c1e56534549caae41a7"} err="failed to get container status \"755841fd55002a6a725bf30f86efe91a276890e0dc347c1e56534549caae41a7\": rpc error: code = NotFound desc = could not find container \"755841fd55002a6a725bf30f86efe91a276890e0dc347c1e56534549caae41a7\": container with ID starting with 755841fd55002a6a725bf30f86efe91a276890e0dc347c1e56534549caae41a7 not found: ID does not exist" Nov 27 08:26:15 crc kubenswrapper[4971]: I1127 08:26:15.141813 4971 scope.go:117] "RemoveContainer" containerID="19d6032051e20fef9414916aed7b5cd21bed89a347b615b646528ad48bafb2c1" Nov 27 08:26:15 crc kubenswrapper[4971]: E1127 08:26:15.142311 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19d6032051e20fef9414916aed7b5cd21bed89a347b615b646528ad48bafb2c1\": container with ID starting with 19d6032051e20fef9414916aed7b5cd21bed89a347b615b646528ad48bafb2c1 not found: ID does not exist" containerID="19d6032051e20fef9414916aed7b5cd21bed89a347b615b646528ad48bafb2c1" Nov 27 08:26:15 crc kubenswrapper[4971]: I1127 08:26:15.142381 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19d6032051e20fef9414916aed7b5cd21bed89a347b615b646528ad48bafb2c1"} err="failed to get container status \"19d6032051e20fef9414916aed7b5cd21bed89a347b615b646528ad48bafb2c1\": rpc error: code = NotFound desc = could not find container \"19d6032051e20fef9414916aed7b5cd21bed89a347b615b646528ad48bafb2c1\": container with ID starting with 19d6032051e20fef9414916aed7b5cd21bed89a347b615b646528ad48bafb2c1 not found: ID does not exist" Nov 27 08:26:15 crc kubenswrapper[4971]: I1127 08:26:15.142424 4971 scope.go:117] "RemoveContainer" containerID="9ee4144709e3b3815142ddcde5f3e55213992d2a216770fb328891d1e9d1a26f" Nov 27 08:26:15 crc kubenswrapper[4971]: E1127 08:26:15.142823 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9ee4144709e3b3815142ddcde5f3e55213992d2a216770fb328891d1e9d1a26f\": container with ID starting with 9ee4144709e3b3815142ddcde5f3e55213992d2a216770fb328891d1e9d1a26f not found: ID does not exist" containerID="9ee4144709e3b3815142ddcde5f3e55213992d2a216770fb328891d1e9d1a26f" Nov 27 08:26:15 crc kubenswrapper[4971]: I1127 08:26:15.142865 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9ee4144709e3b3815142ddcde5f3e55213992d2a216770fb328891d1e9d1a26f"} err="failed to get container status \"9ee4144709e3b3815142ddcde5f3e55213992d2a216770fb328891d1e9d1a26f\": rpc error: code = NotFound desc = could not find container \"9ee4144709e3b3815142ddcde5f3e55213992d2a216770fb328891d1e9d1a26f\": container with ID starting with 9ee4144709e3b3815142ddcde5f3e55213992d2a216770fb328891d1e9d1a26f not found: ID does not exist" Nov 27 08:26:15 crc kubenswrapper[4971]: I1127 08:26:15.714170 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-snrv6" Nov 27 08:26:16 crc kubenswrapper[4971]: I1127 08:26:16.564753 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a72e94b8-cad5-4697-9c5e-6c0ba792d155" path="/var/lib/kubelet/pods/a72e94b8-cad5-4697-9c5e-6c0ba792d155/volumes" Nov 27 08:26:22 crc kubenswrapper[4971]: I1127 08:26:22.128213 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-snrv6"] Nov 27 08:26:22 crc kubenswrapper[4971]: I1127 08:26:22.129270 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-snrv6" podUID="c27561fe-dbb7-41d1-95de-e1def1e99715" containerName="registry-server" containerID="cri-o://59e164888adc861ec684d87be96c6631b55750b9c7abfba2d92962fc785c769b" gracePeriod=2 Nov 27 08:26:22 crc kubenswrapper[4971]: I1127 08:26:22.606294 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-snrv6" Nov 27 08:26:22 crc kubenswrapper[4971]: I1127 08:26:22.760604 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c27561fe-dbb7-41d1-95de-e1def1e99715-utilities\") pod \"c27561fe-dbb7-41d1-95de-e1def1e99715\" (UID: \"c27561fe-dbb7-41d1-95de-e1def1e99715\") " Nov 27 08:26:22 crc kubenswrapper[4971]: I1127 08:26:22.761370 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c27561fe-dbb7-41d1-95de-e1def1e99715-catalog-content\") pod \"c27561fe-dbb7-41d1-95de-e1def1e99715\" (UID: \"c27561fe-dbb7-41d1-95de-e1def1e99715\") " Nov 27 08:26:22 crc kubenswrapper[4971]: I1127 08:26:22.761602 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cq9kl\" (UniqueName: \"kubernetes.io/projected/c27561fe-dbb7-41d1-95de-e1def1e99715-kube-api-access-cq9kl\") pod \"c27561fe-dbb7-41d1-95de-e1def1e99715\" (UID: \"c27561fe-dbb7-41d1-95de-e1def1e99715\") " Nov 27 08:26:22 crc kubenswrapper[4971]: I1127 08:26:22.761608 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c27561fe-dbb7-41d1-95de-e1def1e99715-utilities" (OuterVolumeSpecName: "utilities") pod "c27561fe-dbb7-41d1-95de-e1def1e99715" (UID: "c27561fe-dbb7-41d1-95de-e1def1e99715"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:26:22 crc kubenswrapper[4971]: I1127 08:26:22.761981 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c27561fe-dbb7-41d1-95de-e1def1e99715-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 08:26:22 crc kubenswrapper[4971]: I1127 08:26:22.769362 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c27561fe-dbb7-41d1-95de-e1def1e99715-kube-api-access-cq9kl" (OuterVolumeSpecName: "kube-api-access-cq9kl") pod "c27561fe-dbb7-41d1-95de-e1def1e99715" (UID: "c27561fe-dbb7-41d1-95de-e1def1e99715"). InnerVolumeSpecName "kube-api-access-cq9kl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:26:22 crc kubenswrapper[4971]: I1127 08:26:22.814116 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c27561fe-dbb7-41d1-95de-e1def1e99715-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c27561fe-dbb7-41d1-95de-e1def1e99715" (UID: "c27561fe-dbb7-41d1-95de-e1def1e99715"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:26:22 crc kubenswrapper[4971]: I1127 08:26:22.864238 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c27561fe-dbb7-41d1-95de-e1def1e99715-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 08:26:22 crc kubenswrapper[4971]: I1127 08:26:22.864303 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cq9kl\" (UniqueName: \"kubernetes.io/projected/c27561fe-dbb7-41d1-95de-e1def1e99715-kube-api-access-cq9kl\") on node \"crc\" DevicePath \"\"" Nov 27 08:26:23 crc kubenswrapper[4971]: I1127 08:26:23.107094 4971 generic.go:334] "Generic (PLEG): container finished" podID="c27561fe-dbb7-41d1-95de-e1def1e99715" containerID="59e164888adc861ec684d87be96c6631b55750b9c7abfba2d92962fc785c769b" exitCode=0 Nov 27 08:26:23 crc kubenswrapper[4971]: I1127 08:26:23.107155 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-snrv6" event={"ID":"c27561fe-dbb7-41d1-95de-e1def1e99715","Type":"ContainerDied","Data":"59e164888adc861ec684d87be96c6631b55750b9c7abfba2d92962fc785c769b"} Nov 27 08:26:23 crc kubenswrapper[4971]: I1127 08:26:23.107195 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-snrv6" event={"ID":"c27561fe-dbb7-41d1-95de-e1def1e99715","Type":"ContainerDied","Data":"232a43d8ef39f3ca3fd230f26ab1fe002f2310710ecd286d1e38eccde4c5cd55"} Nov 27 08:26:23 crc kubenswrapper[4971]: I1127 08:26:23.107222 4971 scope.go:117] "RemoveContainer" containerID="59e164888adc861ec684d87be96c6631b55750b9c7abfba2d92962fc785c769b" Nov 27 08:26:23 crc kubenswrapper[4971]: I1127 08:26:23.107778 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-snrv6" Nov 27 08:26:23 crc kubenswrapper[4971]: I1127 08:26:23.146617 4971 scope.go:117] "RemoveContainer" containerID="1f18a7168f683dbdfbb9acc3afbf0dcfcb8d62dc7840189ba44cecacda6a78a5" Nov 27 08:26:23 crc kubenswrapper[4971]: I1127 08:26:23.150025 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-snrv6"] Nov 27 08:26:23 crc kubenswrapper[4971]: I1127 08:26:23.155913 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-snrv6"] Nov 27 08:26:23 crc kubenswrapper[4971]: I1127 08:26:23.190603 4971 scope.go:117] "RemoveContainer" containerID="04bbc82bb853f58d2dd13eefbbcbf6f9d4a8c0deee1fc969d64c39e861a16842" Nov 27 08:26:23 crc kubenswrapper[4971]: I1127 08:26:23.233133 4971 scope.go:117] "RemoveContainer" containerID="59e164888adc861ec684d87be96c6631b55750b9c7abfba2d92962fc785c769b" Nov 27 08:26:23 crc kubenswrapper[4971]: E1127 08:26:23.233715 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59e164888adc861ec684d87be96c6631b55750b9c7abfba2d92962fc785c769b\": container with ID starting with 59e164888adc861ec684d87be96c6631b55750b9c7abfba2d92962fc785c769b not found: ID does not exist" containerID="59e164888adc861ec684d87be96c6631b55750b9c7abfba2d92962fc785c769b" Nov 27 08:26:23 crc kubenswrapper[4971]: I1127 08:26:23.233769 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59e164888adc861ec684d87be96c6631b55750b9c7abfba2d92962fc785c769b"} err="failed to get container status \"59e164888adc861ec684d87be96c6631b55750b9c7abfba2d92962fc785c769b\": rpc error: code = NotFound desc = could not find container \"59e164888adc861ec684d87be96c6631b55750b9c7abfba2d92962fc785c769b\": container with ID starting with 59e164888adc861ec684d87be96c6631b55750b9c7abfba2d92962fc785c769b not found: ID does not exist" Nov 27 08:26:23 crc kubenswrapper[4971]: I1127 08:26:23.233802 4971 scope.go:117] "RemoveContainer" containerID="1f18a7168f683dbdfbb9acc3afbf0dcfcb8d62dc7840189ba44cecacda6a78a5" Nov 27 08:26:23 crc kubenswrapper[4971]: E1127 08:26:23.234210 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f18a7168f683dbdfbb9acc3afbf0dcfcb8d62dc7840189ba44cecacda6a78a5\": container with ID starting with 1f18a7168f683dbdfbb9acc3afbf0dcfcb8d62dc7840189ba44cecacda6a78a5 not found: ID does not exist" containerID="1f18a7168f683dbdfbb9acc3afbf0dcfcb8d62dc7840189ba44cecacda6a78a5" Nov 27 08:26:23 crc kubenswrapper[4971]: I1127 08:26:23.234236 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f18a7168f683dbdfbb9acc3afbf0dcfcb8d62dc7840189ba44cecacda6a78a5"} err="failed to get container status \"1f18a7168f683dbdfbb9acc3afbf0dcfcb8d62dc7840189ba44cecacda6a78a5\": rpc error: code = NotFound desc = could not find container \"1f18a7168f683dbdfbb9acc3afbf0dcfcb8d62dc7840189ba44cecacda6a78a5\": container with ID starting with 1f18a7168f683dbdfbb9acc3afbf0dcfcb8d62dc7840189ba44cecacda6a78a5 not found: ID does not exist" Nov 27 08:26:23 crc kubenswrapper[4971]: I1127 08:26:23.234253 4971 scope.go:117] "RemoveContainer" containerID="04bbc82bb853f58d2dd13eefbbcbf6f9d4a8c0deee1fc969d64c39e861a16842" Nov 27 08:26:23 crc kubenswrapper[4971]: E1127 08:26:23.234599 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04bbc82bb853f58d2dd13eefbbcbf6f9d4a8c0deee1fc969d64c39e861a16842\": container with ID starting with 04bbc82bb853f58d2dd13eefbbcbf6f9d4a8c0deee1fc969d64c39e861a16842 not found: ID does not exist" containerID="04bbc82bb853f58d2dd13eefbbcbf6f9d4a8c0deee1fc969d64c39e861a16842" Nov 27 08:26:23 crc kubenswrapper[4971]: I1127 08:26:23.234644 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04bbc82bb853f58d2dd13eefbbcbf6f9d4a8c0deee1fc969d64c39e861a16842"} err="failed to get container status \"04bbc82bb853f58d2dd13eefbbcbf6f9d4a8c0deee1fc969d64c39e861a16842\": rpc error: code = NotFound desc = could not find container \"04bbc82bb853f58d2dd13eefbbcbf6f9d4a8c0deee1fc969d64c39e861a16842\": container with ID starting with 04bbc82bb853f58d2dd13eefbbcbf6f9d4a8c0deee1fc969d64c39e861a16842 not found: ID does not exist" Nov 27 08:26:24 crc kubenswrapper[4971]: I1127 08:26:24.561464 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c27561fe-dbb7-41d1-95de-e1def1e99715" path="/var/lib/kubelet/pods/c27561fe-dbb7-41d1-95de-e1def1e99715/volumes" Nov 27 08:26:26 crc kubenswrapper[4971]: I1127 08:26:26.413125 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 08:26:26 crc kubenswrapper[4971]: I1127 08:26:26.413240 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 08:26:56 crc kubenswrapper[4971]: I1127 08:26:56.413435 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 08:26:56 crc kubenswrapper[4971]: I1127 08:26:56.414363 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 08:27:26 crc kubenswrapper[4971]: I1127 08:27:26.413575 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 08:27:26 crc kubenswrapper[4971]: I1127 08:27:26.414177 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 08:27:26 crc kubenswrapper[4971]: I1127 08:27:26.414218 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 08:27:26 crc kubenswrapper[4971]: I1127 08:27:26.415016 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"93cd60eaf9341477b0a46d5e899a633f82782eec17c2d7bb6388b091c64ffa3a"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 08:27:26 crc kubenswrapper[4971]: I1127 08:27:26.415075 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://93cd60eaf9341477b0a46d5e899a633f82782eec17c2d7bb6388b091c64ffa3a" gracePeriod=600 Nov 27 08:27:26 crc kubenswrapper[4971]: E1127 08:27:26.551340 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:27:26 crc kubenswrapper[4971]: I1127 08:27:26.703443 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="93cd60eaf9341477b0a46d5e899a633f82782eec17c2d7bb6388b091c64ffa3a" exitCode=0 Nov 27 08:27:26 crc kubenswrapper[4971]: I1127 08:27:26.703484 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"93cd60eaf9341477b0a46d5e899a633f82782eec17c2d7bb6388b091c64ffa3a"} Nov 27 08:27:26 crc kubenswrapper[4971]: I1127 08:27:26.703573 4971 scope.go:117] "RemoveContainer" containerID="191774ed9bf782651f467ab8c8b78eb518cc6f5ac5f6914842a64746bf293caf" Nov 27 08:27:26 crc kubenswrapper[4971]: I1127 08:27:26.704584 4971 scope.go:117] "RemoveContainer" containerID="93cd60eaf9341477b0a46d5e899a633f82782eec17c2d7bb6388b091c64ffa3a" Nov 27 08:27:26 crc kubenswrapper[4971]: E1127 08:27:26.705075 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:27:37 crc kubenswrapper[4971]: I1127 08:27:37.550799 4971 scope.go:117] "RemoveContainer" containerID="93cd60eaf9341477b0a46d5e899a633f82782eec17c2d7bb6388b091c64ffa3a" Nov 27 08:27:37 crc kubenswrapper[4971]: E1127 08:27:37.552029 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:27:51 crc kubenswrapper[4971]: I1127 08:27:51.551583 4971 scope.go:117] "RemoveContainer" containerID="93cd60eaf9341477b0a46d5e899a633f82782eec17c2d7bb6388b091c64ffa3a" Nov 27 08:27:51 crc kubenswrapper[4971]: E1127 08:27:51.552725 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:28:02 crc kubenswrapper[4971]: I1127 08:28:02.550253 4971 scope.go:117] "RemoveContainer" containerID="93cd60eaf9341477b0a46d5e899a633f82782eec17c2d7bb6388b091c64ffa3a" Nov 27 08:28:02 crc kubenswrapper[4971]: E1127 08:28:02.551042 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:28:15 crc kubenswrapper[4971]: I1127 08:28:15.551579 4971 scope.go:117] "RemoveContainer" containerID="93cd60eaf9341477b0a46d5e899a633f82782eec17c2d7bb6388b091c64ffa3a" Nov 27 08:28:15 crc kubenswrapper[4971]: E1127 08:28:15.554980 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:28:26 crc kubenswrapper[4971]: I1127 08:28:26.550979 4971 scope.go:117] "RemoveContainer" containerID="93cd60eaf9341477b0a46d5e899a633f82782eec17c2d7bb6388b091c64ffa3a" Nov 27 08:28:26 crc kubenswrapper[4971]: E1127 08:28:26.552013 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:28:37 crc kubenswrapper[4971]: I1127 08:28:37.551509 4971 scope.go:117] "RemoveContainer" containerID="93cd60eaf9341477b0a46d5e899a633f82782eec17c2d7bb6388b091c64ffa3a" Nov 27 08:28:37 crc kubenswrapper[4971]: E1127 08:28:37.554477 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:28:50 crc kubenswrapper[4971]: I1127 08:28:50.550827 4971 scope.go:117] "RemoveContainer" containerID="93cd60eaf9341477b0a46d5e899a633f82782eec17c2d7bb6388b091c64ffa3a" Nov 27 08:28:50 crc kubenswrapper[4971]: E1127 08:28:50.551814 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:29:01 crc kubenswrapper[4971]: I1127 08:29:01.551597 4971 scope.go:117] "RemoveContainer" containerID="93cd60eaf9341477b0a46d5e899a633f82782eec17c2d7bb6388b091c64ffa3a" Nov 27 08:29:01 crc kubenswrapper[4971]: E1127 08:29:01.552642 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:29:13 crc kubenswrapper[4971]: I1127 08:29:13.551185 4971 scope.go:117] "RemoveContainer" containerID="93cd60eaf9341477b0a46d5e899a633f82782eec17c2d7bb6388b091c64ffa3a" Nov 27 08:29:13 crc kubenswrapper[4971]: E1127 08:29:13.552174 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:29:28 crc kubenswrapper[4971]: I1127 08:29:28.551279 4971 scope.go:117] "RemoveContainer" containerID="93cd60eaf9341477b0a46d5e899a633f82782eec17c2d7bb6388b091c64ffa3a" Nov 27 08:29:28 crc kubenswrapper[4971]: E1127 08:29:28.552464 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:29:39 crc kubenswrapper[4971]: I1127 08:29:39.550835 4971 scope.go:117] "RemoveContainer" containerID="93cd60eaf9341477b0a46d5e899a633f82782eec17c2d7bb6388b091c64ffa3a" Nov 27 08:29:39 crc kubenswrapper[4971]: E1127 08:29:39.551628 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:29:52 crc kubenswrapper[4971]: I1127 08:29:52.558083 4971 scope.go:117] "RemoveContainer" containerID="93cd60eaf9341477b0a46d5e899a633f82782eec17c2d7bb6388b091c64ffa3a" Nov 27 08:29:52 crc kubenswrapper[4971]: E1127 08:29:52.558982 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:30:00 crc kubenswrapper[4971]: I1127 08:30:00.161211 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403870-pw78r"] Nov 27 08:30:00 crc kubenswrapper[4971]: E1127 08:30:00.162705 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a72e94b8-cad5-4697-9c5e-6c0ba792d155" containerName="registry-server" Nov 27 08:30:00 crc kubenswrapper[4971]: I1127 08:30:00.162734 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="a72e94b8-cad5-4697-9c5e-6c0ba792d155" containerName="registry-server" Nov 27 08:30:00 crc kubenswrapper[4971]: E1127 08:30:00.162754 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c27561fe-dbb7-41d1-95de-e1def1e99715" containerName="registry-server" Nov 27 08:30:00 crc kubenswrapper[4971]: I1127 08:30:00.162762 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="c27561fe-dbb7-41d1-95de-e1def1e99715" containerName="registry-server" Nov 27 08:30:00 crc kubenswrapper[4971]: E1127 08:30:00.162775 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7ef79f9-2133-467c-bcad-e29afcdb340c" containerName="extract-content" Nov 27 08:30:00 crc kubenswrapper[4971]: I1127 08:30:00.162783 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7ef79f9-2133-467c-bcad-e29afcdb340c" containerName="extract-content" Nov 27 08:30:00 crc kubenswrapper[4971]: E1127 08:30:00.162793 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a72e94b8-cad5-4697-9c5e-6c0ba792d155" containerName="extract-utilities" Nov 27 08:30:00 crc kubenswrapper[4971]: I1127 08:30:00.162801 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="a72e94b8-cad5-4697-9c5e-6c0ba792d155" containerName="extract-utilities" Nov 27 08:30:00 crc kubenswrapper[4971]: E1127 08:30:00.162815 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a45ad671-96d6-4717-86da-34f906acbbb8" containerName="extract-content" Nov 27 08:30:00 crc kubenswrapper[4971]: I1127 08:30:00.162823 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="a45ad671-96d6-4717-86da-34f906acbbb8" containerName="extract-content" Nov 27 08:30:00 crc kubenswrapper[4971]: E1127 08:30:00.162856 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a45ad671-96d6-4717-86da-34f906acbbb8" containerName="registry-server" Nov 27 08:30:00 crc kubenswrapper[4971]: I1127 08:30:00.162865 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="a45ad671-96d6-4717-86da-34f906acbbb8" containerName="registry-server" Nov 27 08:30:00 crc kubenswrapper[4971]: E1127 08:30:00.162875 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7ef79f9-2133-467c-bcad-e29afcdb340c" containerName="registry-server" Nov 27 08:30:00 crc kubenswrapper[4971]: I1127 08:30:00.162883 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7ef79f9-2133-467c-bcad-e29afcdb340c" containerName="registry-server" Nov 27 08:30:00 crc kubenswrapper[4971]: E1127 08:30:00.162896 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c27561fe-dbb7-41d1-95de-e1def1e99715" containerName="extract-content" Nov 27 08:30:00 crc kubenswrapper[4971]: I1127 08:30:00.162904 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="c27561fe-dbb7-41d1-95de-e1def1e99715" containerName="extract-content" Nov 27 08:30:00 crc kubenswrapper[4971]: E1127 08:30:00.162922 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a45ad671-96d6-4717-86da-34f906acbbb8" containerName="extract-utilities" Nov 27 08:30:00 crc kubenswrapper[4971]: I1127 08:30:00.162949 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="a45ad671-96d6-4717-86da-34f906acbbb8" containerName="extract-utilities" Nov 27 08:30:00 crc kubenswrapper[4971]: E1127 08:30:00.162963 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c27561fe-dbb7-41d1-95de-e1def1e99715" containerName="extract-utilities" Nov 27 08:30:00 crc kubenswrapper[4971]: I1127 08:30:00.162972 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="c27561fe-dbb7-41d1-95de-e1def1e99715" containerName="extract-utilities" Nov 27 08:30:00 crc kubenswrapper[4971]: E1127 08:30:00.162992 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7ef79f9-2133-467c-bcad-e29afcdb340c" containerName="extract-utilities" Nov 27 08:30:00 crc kubenswrapper[4971]: I1127 08:30:00.162999 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7ef79f9-2133-467c-bcad-e29afcdb340c" containerName="extract-utilities" Nov 27 08:30:00 crc kubenswrapper[4971]: E1127 08:30:00.163010 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a72e94b8-cad5-4697-9c5e-6c0ba792d155" containerName="extract-content" Nov 27 08:30:00 crc kubenswrapper[4971]: I1127 08:30:00.163017 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="a72e94b8-cad5-4697-9c5e-6c0ba792d155" containerName="extract-content" Nov 27 08:30:00 crc kubenswrapper[4971]: I1127 08:30:00.163252 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="a45ad671-96d6-4717-86da-34f906acbbb8" containerName="registry-server" Nov 27 08:30:00 crc kubenswrapper[4971]: I1127 08:30:00.163274 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7ef79f9-2133-467c-bcad-e29afcdb340c" containerName="registry-server" Nov 27 08:30:00 crc kubenswrapper[4971]: I1127 08:30:00.163292 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="a72e94b8-cad5-4697-9c5e-6c0ba792d155" containerName="registry-server" Nov 27 08:30:00 crc kubenswrapper[4971]: I1127 08:30:00.163307 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="c27561fe-dbb7-41d1-95de-e1def1e99715" containerName="registry-server" Nov 27 08:30:00 crc kubenswrapper[4971]: I1127 08:30:00.164164 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403870-pw78r" Nov 27 08:30:00 crc kubenswrapper[4971]: I1127 08:30:00.167797 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403870-pw78r"] Nov 27 08:30:00 crc kubenswrapper[4971]: I1127 08:30:00.192693 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 27 08:30:00 crc kubenswrapper[4971]: I1127 08:30:00.192771 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 27 08:30:00 crc kubenswrapper[4971]: I1127 08:30:00.246171 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5c99677c-3c5d-410f-af02-63380ec76457-config-volume\") pod \"collect-profiles-29403870-pw78r\" (UID: \"5c99677c-3c5d-410f-af02-63380ec76457\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403870-pw78r" Nov 27 08:30:00 crc kubenswrapper[4971]: I1127 08:30:00.246296 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2pmb2\" (UniqueName: \"kubernetes.io/projected/5c99677c-3c5d-410f-af02-63380ec76457-kube-api-access-2pmb2\") pod \"collect-profiles-29403870-pw78r\" (UID: \"5c99677c-3c5d-410f-af02-63380ec76457\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403870-pw78r" Nov 27 08:30:00 crc kubenswrapper[4971]: I1127 08:30:00.246352 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5c99677c-3c5d-410f-af02-63380ec76457-secret-volume\") pod \"collect-profiles-29403870-pw78r\" (UID: \"5c99677c-3c5d-410f-af02-63380ec76457\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403870-pw78r" Nov 27 08:30:00 crc kubenswrapper[4971]: I1127 08:30:00.348258 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5c99677c-3c5d-410f-af02-63380ec76457-secret-volume\") pod \"collect-profiles-29403870-pw78r\" (UID: \"5c99677c-3c5d-410f-af02-63380ec76457\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403870-pw78r" Nov 27 08:30:00 crc kubenswrapper[4971]: I1127 08:30:00.348808 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5c99677c-3c5d-410f-af02-63380ec76457-config-volume\") pod \"collect-profiles-29403870-pw78r\" (UID: \"5c99677c-3c5d-410f-af02-63380ec76457\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403870-pw78r" Nov 27 08:30:00 crc kubenswrapper[4971]: I1127 08:30:00.348886 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2pmb2\" (UniqueName: \"kubernetes.io/projected/5c99677c-3c5d-410f-af02-63380ec76457-kube-api-access-2pmb2\") pod \"collect-profiles-29403870-pw78r\" (UID: \"5c99677c-3c5d-410f-af02-63380ec76457\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403870-pw78r" Nov 27 08:30:00 crc kubenswrapper[4971]: I1127 08:30:00.349921 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5c99677c-3c5d-410f-af02-63380ec76457-config-volume\") pod \"collect-profiles-29403870-pw78r\" (UID: \"5c99677c-3c5d-410f-af02-63380ec76457\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403870-pw78r" Nov 27 08:30:00 crc kubenswrapper[4971]: I1127 08:30:00.356015 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5c99677c-3c5d-410f-af02-63380ec76457-secret-volume\") pod \"collect-profiles-29403870-pw78r\" (UID: \"5c99677c-3c5d-410f-af02-63380ec76457\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403870-pw78r" Nov 27 08:30:00 crc kubenswrapper[4971]: I1127 08:30:00.382297 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2pmb2\" (UniqueName: \"kubernetes.io/projected/5c99677c-3c5d-410f-af02-63380ec76457-kube-api-access-2pmb2\") pod \"collect-profiles-29403870-pw78r\" (UID: \"5c99677c-3c5d-410f-af02-63380ec76457\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403870-pw78r" Nov 27 08:30:00 crc kubenswrapper[4971]: I1127 08:30:00.510036 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403870-pw78r" Nov 27 08:30:00 crc kubenswrapper[4971]: I1127 08:30:00.978441 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403870-pw78r"] Nov 27 08:30:01 crc kubenswrapper[4971]: I1127 08:30:01.146692 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403870-pw78r" event={"ID":"5c99677c-3c5d-410f-af02-63380ec76457","Type":"ContainerStarted","Data":"3355ef0013f2cbd8c31f4d2e4c0c166b808ac1a58637fd6f326b2f81ed45e8c4"} Nov 27 08:30:01 crc kubenswrapper[4971]: I1127 08:30:01.146759 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403870-pw78r" event={"ID":"5c99677c-3c5d-410f-af02-63380ec76457","Type":"ContainerStarted","Data":"a54cd4c3ad38768daa07ba9819747a3d8ddbacc19fe650bceb7c1f5fa16e737b"} Nov 27 08:30:02 crc kubenswrapper[4971]: I1127 08:30:02.155379 4971 generic.go:334] "Generic (PLEG): container finished" podID="5c99677c-3c5d-410f-af02-63380ec76457" containerID="3355ef0013f2cbd8c31f4d2e4c0c166b808ac1a58637fd6f326b2f81ed45e8c4" exitCode=0 Nov 27 08:30:02 crc kubenswrapper[4971]: I1127 08:30:02.155890 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403870-pw78r" event={"ID":"5c99677c-3c5d-410f-af02-63380ec76457","Type":"ContainerDied","Data":"3355ef0013f2cbd8c31f4d2e4c0c166b808ac1a58637fd6f326b2f81ed45e8c4"} Nov 27 08:30:03 crc kubenswrapper[4971]: I1127 08:30:03.464865 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403870-pw78r" Nov 27 08:30:03 crc kubenswrapper[4971]: I1127 08:30:03.606221 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5c99677c-3c5d-410f-af02-63380ec76457-config-volume\") pod \"5c99677c-3c5d-410f-af02-63380ec76457\" (UID: \"5c99677c-3c5d-410f-af02-63380ec76457\") " Nov 27 08:30:03 crc kubenswrapper[4971]: I1127 08:30:03.606498 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5c99677c-3c5d-410f-af02-63380ec76457-secret-volume\") pod \"5c99677c-3c5d-410f-af02-63380ec76457\" (UID: \"5c99677c-3c5d-410f-af02-63380ec76457\") " Nov 27 08:30:03 crc kubenswrapper[4971]: I1127 08:30:03.606616 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2pmb2\" (UniqueName: \"kubernetes.io/projected/5c99677c-3c5d-410f-af02-63380ec76457-kube-api-access-2pmb2\") pod \"5c99677c-3c5d-410f-af02-63380ec76457\" (UID: \"5c99677c-3c5d-410f-af02-63380ec76457\") " Nov 27 08:30:03 crc kubenswrapper[4971]: I1127 08:30:03.607230 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c99677c-3c5d-410f-af02-63380ec76457-config-volume" (OuterVolumeSpecName: "config-volume") pod "5c99677c-3c5d-410f-af02-63380ec76457" (UID: "5c99677c-3c5d-410f-af02-63380ec76457"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:30:03 crc kubenswrapper[4971]: I1127 08:30:03.612206 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c99677c-3c5d-410f-af02-63380ec76457-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "5c99677c-3c5d-410f-af02-63380ec76457" (UID: "5c99677c-3c5d-410f-af02-63380ec76457"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:30:03 crc kubenswrapper[4971]: I1127 08:30:03.612790 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c99677c-3c5d-410f-af02-63380ec76457-kube-api-access-2pmb2" (OuterVolumeSpecName: "kube-api-access-2pmb2") pod "5c99677c-3c5d-410f-af02-63380ec76457" (UID: "5c99677c-3c5d-410f-af02-63380ec76457"). InnerVolumeSpecName "kube-api-access-2pmb2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:30:03 crc kubenswrapper[4971]: I1127 08:30:03.709365 4971 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5c99677c-3c5d-410f-af02-63380ec76457-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 27 08:30:03 crc kubenswrapper[4971]: I1127 08:30:03.709417 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2pmb2\" (UniqueName: \"kubernetes.io/projected/5c99677c-3c5d-410f-af02-63380ec76457-kube-api-access-2pmb2\") on node \"crc\" DevicePath \"\"" Nov 27 08:30:03 crc kubenswrapper[4971]: I1127 08:30:03.709432 4971 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5c99677c-3c5d-410f-af02-63380ec76457-config-volume\") on node \"crc\" DevicePath \"\"" Nov 27 08:30:04 crc kubenswrapper[4971]: I1127 08:30:04.171440 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403870-pw78r" event={"ID":"5c99677c-3c5d-410f-af02-63380ec76457","Type":"ContainerDied","Data":"a54cd4c3ad38768daa07ba9819747a3d8ddbacc19fe650bceb7c1f5fa16e737b"} Nov 27 08:30:04 crc kubenswrapper[4971]: I1127 08:30:04.171493 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a54cd4c3ad38768daa07ba9819747a3d8ddbacc19fe650bceb7c1f5fa16e737b" Nov 27 08:30:04 crc kubenswrapper[4971]: I1127 08:30:04.171495 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403870-pw78r" Nov 27 08:30:04 crc kubenswrapper[4971]: I1127 08:30:04.580266 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403825-8f48b"] Nov 27 08:30:04 crc kubenswrapper[4971]: I1127 08:30:04.580331 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403825-8f48b"] Nov 27 08:30:05 crc kubenswrapper[4971]: I1127 08:30:05.550206 4971 scope.go:117] "RemoveContainer" containerID="93cd60eaf9341477b0a46d5e899a633f82782eec17c2d7bb6388b091c64ffa3a" Nov 27 08:30:05 crc kubenswrapper[4971]: E1127 08:30:05.551045 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:30:06 crc kubenswrapper[4971]: I1127 08:30:06.568239 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8562a4b0-3958-4e0d-bb1b-f98624c8e0a0" path="/var/lib/kubelet/pods/8562a4b0-3958-4e0d-bb1b-f98624c8e0a0/volumes" Nov 27 08:30:17 crc kubenswrapper[4971]: I1127 08:30:17.552175 4971 scope.go:117] "RemoveContainer" containerID="93cd60eaf9341477b0a46d5e899a633f82782eec17c2d7bb6388b091c64ffa3a" Nov 27 08:30:17 crc kubenswrapper[4971]: E1127 08:30:17.553151 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:30:29 crc kubenswrapper[4971]: I1127 08:30:29.551052 4971 scope.go:117] "RemoveContainer" containerID="93cd60eaf9341477b0a46d5e899a633f82782eec17c2d7bb6388b091c64ffa3a" Nov 27 08:30:29 crc kubenswrapper[4971]: E1127 08:30:29.552009 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:30:43 crc kubenswrapper[4971]: I1127 08:30:43.550365 4971 scope.go:117] "RemoveContainer" containerID="93cd60eaf9341477b0a46d5e899a633f82782eec17c2d7bb6388b091c64ffa3a" Nov 27 08:30:43 crc kubenswrapper[4971]: E1127 08:30:43.551264 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:30:52 crc kubenswrapper[4971]: I1127 08:30:52.198515 4971 scope.go:117] "RemoveContainer" containerID="c28e1eb4431d10612d0132b96a91d3622d0af46c80f61fcc774274ce1fc4a912" Nov 27 08:30:57 crc kubenswrapper[4971]: I1127 08:30:57.551181 4971 scope.go:117] "RemoveContainer" containerID="93cd60eaf9341477b0a46d5e899a633f82782eec17c2d7bb6388b091c64ffa3a" Nov 27 08:30:57 crc kubenswrapper[4971]: E1127 08:30:57.552039 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:31:11 crc kubenswrapper[4971]: I1127 08:31:11.550884 4971 scope.go:117] "RemoveContainer" containerID="93cd60eaf9341477b0a46d5e899a633f82782eec17c2d7bb6388b091c64ffa3a" Nov 27 08:31:11 crc kubenswrapper[4971]: E1127 08:31:11.551922 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:31:22 crc kubenswrapper[4971]: I1127 08:31:22.555330 4971 scope.go:117] "RemoveContainer" containerID="93cd60eaf9341477b0a46d5e899a633f82782eec17c2d7bb6388b091c64ffa3a" Nov 27 08:31:22 crc kubenswrapper[4971]: E1127 08:31:22.556319 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:31:35 crc kubenswrapper[4971]: I1127 08:31:35.116506 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-9dxfl"] Nov 27 08:31:35 crc kubenswrapper[4971]: I1127 08:31:35.123017 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-9dxfl"] Nov 27 08:31:35 crc kubenswrapper[4971]: I1127 08:31:35.237029 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-zzv4h"] Nov 27 08:31:35 crc kubenswrapper[4971]: E1127 08:31:35.237447 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c99677c-3c5d-410f-af02-63380ec76457" containerName="collect-profiles" Nov 27 08:31:35 crc kubenswrapper[4971]: I1127 08:31:35.237473 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c99677c-3c5d-410f-af02-63380ec76457" containerName="collect-profiles" Nov 27 08:31:35 crc kubenswrapper[4971]: I1127 08:31:35.237728 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c99677c-3c5d-410f-af02-63380ec76457" containerName="collect-profiles" Nov 27 08:31:35 crc kubenswrapper[4971]: I1127 08:31:35.238277 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-zzv4h" Nov 27 08:31:35 crc kubenswrapper[4971]: I1127 08:31:35.242042 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Nov 27 08:31:35 crc kubenswrapper[4971]: I1127 08:31:35.242447 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Nov 27 08:31:35 crc kubenswrapper[4971]: I1127 08:31:35.242477 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Nov 27 08:31:35 crc kubenswrapper[4971]: I1127 08:31:35.242729 4971 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-887xw" Nov 27 08:31:35 crc kubenswrapper[4971]: I1127 08:31:35.248748 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-zzv4h"] Nov 27 08:31:35 crc kubenswrapper[4971]: I1127 08:31:35.352114 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/4f7c22b6-f59e-44a1-b697-5a20473af06f-node-mnt\") pod \"crc-storage-crc-zzv4h\" (UID: \"4f7c22b6-f59e-44a1-b697-5a20473af06f\") " pod="crc-storage/crc-storage-crc-zzv4h" Nov 27 08:31:35 crc kubenswrapper[4971]: I1127 08:31:35.352209 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/4f7c22b6-f59e-44a1-b697-5a20473af06f-crc-storage\") pod \"crc-storage-crc-zzv4h\" (UID: \"4f7c22b6-f59e-44a1-b697-5a20473af06f\") " pod="crc-storage/crc-storage-crc-zzv4h" Nov 27 08:31:35 crc kubenswrapper[4971]: I1127 08:31:35.352266 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6txq\" (UniqueName: \"kubernetes.io/projected/4f7c22b6-f59e-44a1-b697-5a20473af06f-kube-api-access-v6txq\") pod \"crc-storage-crc-zzv4h\" (UID: \"4f7c22b6-f59e-44a1-b697-5a20473af06f\") " pod="crc-storage/crc-storage-crc-zzv4h" Nov 27 08:31:35 crc kubenswrapper[4971]: I1127 08:31:35.454203 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/4f7c22b6-f59e-44a1-b697-5a20473af06f-node-mnt\") pod \"crc-storage-crc-zzv4h\" (UID: \"4f7c22b6-f59e-44a1-b697-5a20473af06f\") " pod="crc-storage/crc-storage-crc-zzv4h" Nov 27 08:31:35 crc kubenswrapper[4971]: I1127 08:31:35.454345 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/4f7c22b6-f59e-44a1-b697-5a20473af06f-crc-storage\") pod \"crc-storage-crc-zzv4h\" (UID: \"4f7c22b6-f59e-44a1-b697-5a20473af06f\") " pod="crc-storage/crc-storage-crc-zzv4h" Nov 27 08:31:35 crc kubenswrapper[4971]: I1127 08:31:35.454439 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6txq\" (UniqueName: \"kubernetes.io/projected/4f7c22b6-f59e-44a1-b697-5a20473af06f-kube-api-access-v6txq\") pod \"crc-storage-crc-zzv4h\" (UID: \"4f7c22b6-f59e-44a1-b697-5a20473af06f\") " pod="crc-storage/crc-storage-crc-zzv4h" Nov 27 08:31:35 crc kubenswrapper[4971]: I1127 08:31:35.454659 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/4f7c22b6-f59e-44a1-b697-5a20473af06f-node-mnt\") pod \"crc-storage-crc-zzv4h\" (UID: \"4f7c22b6-f59e-44a1-b697-5a20473af06f\") " pod="crc-storage/crc-storage-crc-zzv4h" Nov 27 08:31:35 crc kubenswrapper[4971]: I1127 08:31:35.455443 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/4f7c22b6-f59e-44a1-b697-5a20473af06f-crc-storage\") pod \"crc-storage-crc-zzv4h\" (UID: \"4f7c22b6-f59e-44a1-b697-5a20473af06f\") " pod="crc-storage/crc-storage-crc-zzv4h" Nov 27 08:31:35 crc kubenswrapper[4971]: I1127 08:31:35.475689 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6txq\" (UniqueName: \"kubernetes.io/projected/4f7c22b6-f59e-44a1-b697-5a20473af06f-kube-api-access-v6txq\") pod \"crc-storage-crc-zzv4h\" (UID: \"4f7c22b6-f59e-44a1-b697-5a20473af06f\") " pod="crc-storage/crc-storage-crc-zzv4h" Nov 27 08:31:35 crc kubenswrapper[4971]: I1127 08:31:35.565299 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-zzv4h" Nov 27 08:31:36 crc kubenswrapper[4971]: I1127 08:31:36.035280 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-zzv4h"] Nov 27 08:31:36 crc kubenswrapper[4971]: I1127 08:31:36.058730 4971 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 27 08:31:36 crc kubenswrapper[4971]: I1127 08:31:36.568660 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a1c4271-d673-41e7-a19d-76717f9f9c31" path="/var/lib/kubelet/pods/5a1c4271-d673-41e7-a19d-76717f9f9c31/volumes" Nov 27 08:31:37 crc kubenswrapper[4971]: I1127 08:31:37.031013 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-zzv4h" event={"ID":"4f7c22b6-f59e-44a1-b697-5a20473af06f","Type":"ContainerStarted","Data":"8ac2d014b9a82b9f59b6be52733fb364ce456cf2299c4380fa51a61f23a8b7c2"} Nov 27 08:31:37 crc kubenswrapper[4971]: I1127 08:31:37.031624 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-zzv4h" event={"ID":"4f7c22b6-f59e-44a1-b697-5a20473af06f","Type":"ContainerStarted","Data":"b8046f2fd4a900ce0f75129e8a4b863ded59254df50ed872fdb1ce67f1b32db1"} Nov 27 08:31:37 crc kubenswrapper[4971]: I1127 08:31:37.059345 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="crc-storage/crc-storage-crc-zzv4h" podStartSLOduration=1.415446426 podStartE2EDuration="2.059322925s" podCreationTimestamp="2025-11-27 08:31:35 +0000 UTC" firstStartedPulling="2025-11-27 08:31:36.058445536 +0000 UTC m=+5934.250489454" lastFinishedPulling="2025-11-27 08:31:36.702322025 +0000 UTC m=+5934.894365953" observedRunningTime="2025-11-27 08:31:37.056897137 +0000 UTC m=+5935.248941065" watchObservedRunningTime="2025-11-27 08:31:37.059322925 +0000 UTC m=+5935.251366843" Nov 27 08:31:37 crc kubenswrapper[4971]: I1127 08:31:37.550911 4971 scope.go:117] "RemoveContainer" containerID="93cd60eaf9341477b0a46d5e899a633f82782eec17c2d7bb6388b091c64ffa3a" Nov 27 08:31:37 crc kubenswrapper[4971]: E1127 08:31:37.551581 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:31:38 crc kubenswrapper[4971]: I1127 08:31:38.042305 4971 generic.go:334] "Generic (PLEG): container finished" podID="4f7c22b6-f59e-44a1-b697-5a20473af06f" containerID="8ac2d014b9a82b9f59b6be52733fb364ce456cf2299c4380fa51a61f23a8b7c2" exitCode=0 Nov 27 08:31:38 crc kubenswrapper[4971]: I1127 08:31:38.042366 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-zzv4h" event={"ID":"4f7c22b6-f59e-44a1-b697-5a20473af06f","Type":"ContainerDied","Data":"8ac2d014b9a82b9f59b6be52733fb364ce456cf2299c4380fa51a61f23a8b7c2"} Nov 27 08:31:39 crc kubenswrapper[4971]: I1127 08:31:39.365425 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-zzv4h" Nov 27 08:31:39 crc kubenswrapper[4971]: I1127 08:31:39.533004 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v6txq\" (UniqueName: \"kubernetes.io/projected/4f7c22b6-f59e-44a1-b697-5a20473af06f-kube-api-access-v6txq\") pod \"4f7c22b6-f59e-44a1-b697-5a20473af06f\" (UID: \"4f7c22b6-f59e-44a1-b697-5a20473af06f\") " Nov 27 08:31:39 crc kubenswrapper[4971]: I1127 08:31:39.533249 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/4f7c22b6-f59e-44a1-b697-5a20473af06f-node-mnt\") pod \"4f7c22b6-f59e-44a1-b697-5a20473af06f\" (UID: \"4f7c22b6-f59e-44a1-b697-5a20473af06f\") " Nov 27 08:31:39 crc kubenswrapper[4971]: I1127 08:31:39.533322 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/4f7c22b6-f59e-44a1-b697-5a20473af06f-crc-storage\") pod \"4f7c22b6-f59e-44a1-b697-5a20473af06f\" (UID: \"4f7c22b6-f59e-44a1-b697-5a20473af06f\") " Nov 27 08:31:39 crc kubenswrapper[4971]: I1127 08:31:39.533455 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4f7c22b6-f59e-44a1-b697-5a20473af06f-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "4f7c22b6-f59e-44a1-b697-5a20473af06f" (UID: "4f7c22b6-f59e-44a1-b697-5a20473af06f"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 08:31:39 crc kubenswrapper[4971]: I1127 08:31:39.533693 4971 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/4f7c22b6-f59e-44a1-b697-5a20473af06f-node-mnt\") on node \"crc\" DevicePath \"\"" Nov 27 08:31:39 crc kubenswrapper[4971]: I1127 08:31:39.540552 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f7c22b6-f59e-44a1-b697-5a20473af06f-kube-api-access-v6txq" (OuterVolumeSpecName: "kube-api-access-v6txq") pod "4f7c22b6-f59e-44a1-b697-5a20473af06f" (UID: "4f7c22b6-f59e-44a1-b697-5a20473af06f"). InnerVolumeSpecName "kube-api-access-v6txq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:31:39 crc kubenswrapper[4971]: I1127 08:31:39.571073 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f7c22b6-f59e-44a1-b697-5a20473af06f-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "4f7c22b6-f59e-44a1-b697-5a20473af06f" (UID: "4f7c22b6-f59e-44a1-b697-5a20473af06f"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:31:39 crc kubenswrapper[4971]: I1127 08:31:39.636089 4971 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/4f7c22b6-f59e-44a1-b697-5a20473af06f-crc-storage\") on node \"crc\" DevicePath \"\"" Nov 27 08:31:39 crc kubenswrapper[4971]: I1127 08:31:39.636160 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v6txq\" (UniqueName: \"kubernetes.io/projected/4f7c22b6-f59e-44a1-b697-5a20473af06f-kube-api-access-v6txq\") on node \"crc\" DevicePath \"\"" Nov 27 08:31:40 crc kubenswrapper[4971]: I1127 08:31:40.062166 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-zzv4h" event={"ID":"4f7c22b6-f59e-44a1-b697-5a20473af06f","Type":"ContainerDied","Data":"b8046f2fd4a900ce0f75129e8a4b863ded59254df50ed872fdb1ce67f1b32db1"} Nov 27 08:31:40 crc kubenswrapper[4971]: I1127 08:31:40.062671 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b8046f2fd4a900ce0f75129e8a4b863ded59254df50ed872fdb1ce67f1b32db1" Nov 27 08:31:40 crc kubenswrapper[4971]: I1127 08:31:40.062223 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-zzv4h" Nov 27 08:31:41 crc kubenswrapper[4971]: I1127 08:31:41.361469 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-zzv4h"] Nov 27 08:31:41 crc kubenswrapper[4971]: I1127 08:31:41.369516 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-zzv4h"] Nov 27 08:31:41 crc kubenswrapper[4971]: I1127 08:31:41.501829 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-s7t9d"] Nov 27 08:31:41 crc kubenswrapper[4971]: E1127 08:31:41.502401 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f7c22b6-f59e-44a1-b697-5a20473af06f" containerName="storage" Nov 27 08:31:41 crc kubenswrapper[4971]: I1127 08:31:41.502436 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f7c22b6-f59e-44a1-b697-5a20473af06f" containerName="storage" Nov 27 08:31:41 crc kubenswrapper[4971]: I1127 08:31:41.502734 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f7c22b6-f59e-44a1-b697-5a20473af06f" containerName="storage" Nov 27 08:31:41 crc kubenswrapper[4971]: I1127 08:31:41.503689 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-s7t9d" Nov 27 08:31:41 crc kubenswrapper[4971]: I1127 08:31:41.506598 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Nov 27 08:31:41 crc kubenswrapper[4971]: I1127 08:31:41.507056 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Nov 27 08:31:41 crc kubenswrapper[4971]: I1127 08:31:41.507413 4971 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-887xw" Nov 27 08:31:41 crc kubenswrapper[4971]: I1127 08:31:41.509757 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Nov 27 08:31:41 crc kubenswrapper[4971]: I1127 08:31:41.513141 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-s7t9d"] Nov 27 08:31:41 crc kubenswrapper[4971]: I1127 08:31:41.674024 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/b6d6a1b3-660f-4de4-9439-d1477bac5eb2-crc-storage\") pod \"crc-storage-crc-s7t9d\" (UID: \"b6d6a1b3-660f-4de4-9439-d1477bac5eb2\") " pod="crc-storage/crc-storage-crc-s7t9d" Nov 27 08:31:41 crc kubenswrapper[4971]: I1127 08:31:41.674106 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/b6d6a1b3-660f-4de4-9439-d1477bac5eb2-node-mnt\") pod \"crc-storage-crc-s7t9d\" (UID: \"b6d6a1b3-660f-4de4-9439-d1477bac5eb2\") " pod="crc-storage/crc-storage-crc-s7t9d" Nov 27 08:31:41 crc kubenswrapper[4971]: I1127 08:31:41.674284 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v8mqd\" (UniqueName: \"kubernetes.io/projected/b6d6a1b3-660f-4de4-9439-d1477bac5eb2-kube-api-access-v8mqd\") pod \"crc-storage-crc-s7t9d\" (UID: \"b6d6a1b3-660f-4de4-9439-d1477bac5eb2\") " pod="crc-storage/crc-storage-crc-s7t9d" Nov 27 08:31:41 crc kubenswrapper[4971]: I1127 08:31:41.776243 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/b6d6a1b3-660f-4de4-9439-d1477bac5eb2-crc-storage\") pod \"crc-storage-crc-s7t9d\" (UID: \"b6d6a1b3-660f-4de4-9439-d1477bac5eb2\") " pod="crc-storage/crc-storage-crc-s7t9d" Nov 27 08:31:41 crc kubenswrapper[4971]: I1127 08:31:41.776324 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/b6d6a1b3-660f-4de4-9439-d1477bac5eb2-node-mnt\") pod \"crc-storage-crc-s7t9d\" (UID: \"b6d6a1b3-660f-4de4-9439-d1477bac5eb2\") " pod="crc-storage/crc-storage-crc-s7t9d" Nov 27 08:31:41 crc kubenswrapper[4971]: I1127 08:31:41.776379 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v8mqd\" (UniqueName: \"kubernetes.io/projected/b6d6a1b3-660f-4de4-9439-d1477bac5eb2-kube-api-access-v8mqd\") pod \"crc-storage-crc-s7t9d\" (UID: \"b6d6a1b3-660f-4de4-9439-d1477bac5eb2\") " pod="crc-storage/crc-storage-crc-s7t9d" Nov 27 08:31:41 crc kubenswrapper[4971]: I1127 08:31:41.777250 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/b6d6a1b3-660f-4de4-9439-d1477bac5eb2-crc-storage\") pod \"crc-storage-crc-s7t9d\" (UID: \"b6d6a1b3-660f-4de4-9439-d1477bac5eb2\") " pod="crc-storage/crc-storage-crc-s7t9d" Nov 27 08:31:41 crc kubenswrapper[4971]: I1127 08:31:41.777591 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/b6d6a1b3-660f-4de4-9439-d1477bac5eb2-node-mnt\") pod \"crc-storage-crc-s7t9d\" (UID: \"b6d6a1b3-660f-4de4-9439-d1477bac5eb2\") " pod="crc-storage/crc-storage-crc-s7t9d" Nov 27 08:31:41 crc kubenswrapper[4971]: I1127 08:31:41.798790 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v8mqd\" (UniqueName: \"kubernetes.io/projected/b6d6a1b3-660f-4de4-9439-d1477bac5eb2-kube-api-access-v8mqd\") pod \"crc-storage-crc-s7t9d\" (UID: \"b6d6a1b3-660f-4de4-9439-d1477bac5eb2\") " pod="crc-storage/crc-storage-crc-s7t9d" Nov 27 08:31:41 crc kubenswrapper[4971]: I1127 08:31:41.834465 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-s7t9d" Nov 27 08:31:42 crc kubenswrapper[4971]: I1127 08:31:42.112417 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-s7t9d"] Nov 27 08:31:42 crc kubenswrapper[4971]: I1127 08:31:42.565017 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f7c22b6-f59e-44a1-b697-5a20473af06f" path="/var/lib/kubelet/pods/4f7c22b6-f59e-44a1-b697-5a20473af06f/volumes" Nov 27 08:31:43 crc kubenswrapper[4971]: I1127 08:31:43.103989 4971 generic.go:334] "Generic (PLEG): container finished" podID="b6d6a1b3-660f-4de4-9439-d1477bac5eb2" containerID="d0e266abb3503d2a978950018d1fe9f2e4d9bcdfa7498daa043c0819d4c2f0ca" exitCode=0 Nov 27 08:31:43 crc kubenswrapper[4971]: I1127 08:31:43.104420 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-s7t9d" event={"ID":"b6d6a1b3-660f-4de4-9439-d1477bac5eb2","Type":"ContainerDied","Data":"d0e266abb3503d2a978950018d1fe9f2e4d9bcdfa7498daa043c0819d4c2f0ca"} Nov 27 08:31:43 crc kubenswrapper[4971]: I1127 08:31:43.104460 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-s7t9d" event={"ID":"b6d6a1b3-660f-4de4-9439-d1477bac5eb2","Type":"ContainerStarted","Data":"08d2127236f273b2b2277703ce81089fc37f8cd5faf735ff377f1b3eb7849347"} Nov 27 08:31:44 crc kubenswrapper[4971]: I1127 08:31:44.403333 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-s7t9d" Nov 27 08:31:44 crc kubenswrapper[4971]: I1127 08:31:44.526058 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/b6d6a1b3-660f-4de4-9439-d1477bac5eb2-node-mnt\") pod \"b6d6a1b3-660f-4de4-9439-d1477bac5eb2\" (UID: \"b6d6a1b3-660f-4de4-9439-d1477bac5eb2\") " Nov 27 08:31:44 crc kubenswrapper[4971]: I1127 08:31:44.526148 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/b6d6a1b3-660f-4de4-9439-d1477bac5eb2-crc-storage\") pod \"b6d6a1b3-660f-4de4-9439-d1477bac5eb2\" (UID: \"b6d6a1b3-660f-4de4-9439-d1477bac5eb2\") " Nov 27 08:31:44 crc kubenswrapper[4971]: I1127 08:31:44.526381 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v8mqd\" (UniqueName: \"kubernetes.io/projected/b6d6a1b3-660f-4de4-9439-d1477bac5eb2-kube-api-access-v8mqd\") pod \"b6d6a1b3-660f-4de4-9439-d1477bac5eb2\" (UID: \"b6d6a1b3-660f-4de4-9439-d1477bac5eb2\") " Nov 27 08:31:44 crc kubenswrapper[4971]: I1127 08:31:44.526628 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b6d6a1b3-660f-4de4-9439-d1477bac5eb2-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "b6d6a1b3-660f-4de4-9439-d1477bac5eb2" (UID: "b6d6a1b3-660f-4de4-9439-d1477bac5eb2"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 08:31:44 crc kubenswrapper[4971]: I1127 08:31:44.534692 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6d6a1b3-660f-4de4-9439-d1477bac5eb2-kube-api-access-v8mqd" (OuterVolumeSpecName: "kube-api-access-v8mqd") pod "b6d6a1b3-660f-4de4-9439-d1477bac5eb2" (UID: "b6d6a1b3-660f-4de4-9439-d1477bac5eb2"). InnerVolumeSpecName "kube-api-access-v8mqd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:31:44 crc kubenswrapper[4971]: I1127 08:31:44.549246 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6d6a1b3-660f-4de4-9439-d1477bac5eb2-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "b6d6a1b3-660f-4de4-9439-d1477bac5eb2" (UID: "b6d6a1b3-660f-4de4-9439-d1477bac5eb2"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:31:44 crc kubenswrapper[4971]: I1127 08:31:44.629034 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v8mqd\" (UniqueName: \"kubernetes.io/projected/b6d6a1b3-660f-4de4-9439-d1477bac5eb2-kube-api-access-v8mqd\") on node \"crc\" DevicePath \"\"" Nov 27 08:31:44 crc kubenswrapper[4971]: I1127 08:31:44.629082 4971 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/b6d6a1b3-660f-4de4-9439-d1477bac5eb2-node-mnt\") on node \"crc\" DevicePath \"\"" Nov 27 08:31:44 crc kubenswrapper[4971]: I1127 08:31:44.629095 4971 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/b6d6a1b3-660f-4de4-9439-d1477bac5eb2-crc-storage\") on node \"crc\" DevicePath \"\"" Nov 27 08:31:45 crc kubenswrapper[4971]: I1127 08:31:45.126900 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-s7t9d" event={"ID":"b6d6a1b3-660f-4de4-9439-d1477bac5eb2","Type":"ContainerDied","Data":"08d2127236f273b2b2277703ce81089fc37f8cd5faf735ff377f1b3eb7849347"} Nov 27 08:31:45 crc kubenswrapper[4971]: I1127 08:31:45.126969 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="08d2127236f273b2b2277703ce81089fc37f8cd5faf735ff377f1b3eb7849347" Nov 27 08:31:45 crc kubenswrapper[4971]: I1127 08:31:45.126985 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-s7t9d" Nov 27 08:31:49 crc kubenswrapper[4971]: I1127 08:31:49.551580 4971 scope.go:117] "RemoveContainer" containerID="93cd60eaf9341477b0a46d5e899a633f82782eec17c2d7bb6388b091c64ffa3a" Nov 27 08:31:49 crc kubenswrapper[4971]: E1127 08:31:49.554947 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:31:52 crc kubenswrapper[4971]: I1127 08:31:52.272283 4971 scope.go:117] "RemoveContainer" containerID="6aec07e7dbacd67ae3399b7961202c2d21558e123ae9b8b31a484ba1ea981319" Nov 27 08:32:04 crc kubenswrapper[4971]: I1127 08:32:04.550703 4971 scope.go:117] "RemoveContainer" containerID="93cd60eaf9341477b0a46d5e899a633f82782eec17c2d7bb6388b091c64ffa3a" Nov 27 08:32:04 crc kubenswrapper[4971]: E1127 08:32:04.551650 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:32:16 crc kubenswrapper[4971]: I1127 08:32:16.550805 4971 scope.go:117] "RemoveContainer" containerID="93cd60eaf9341477b0a46d5e899a633f82782eec17c2d7bb6388b091c64ffa3a" Nov 27 08:32:16 crc kubenswrapper[4971]: E1127 08:32:16.551453 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:32:30 crc kubenswrapper[4971]: I1127 08:32:30.550631 4971 scope.go:117] "RemoveContainer" containerID="93cd60eaf9341477b0a46d5e899a633f82782eec17c2d7bb6388b091c64ffa3a" Nov 27 08:32:31 crc kubenswrapper[4971]: I1127 08:32:31.530562 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"c5fdcd7a26114da98d92fa593d9bd5735d2520f363dead48d0a133e41557b50b"} Nov 27 08:33:50 crc kubenswrapper[4971]: I1127 08:33:50.226208 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-68b66b9b9f-q975r"] Nov 27 08:33:50 crc kubenswrapper[4971]: E1127 08:33:50.227659 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6d6a1b3-660f-4de4-9439-d1477bac5eb2" containerName="storage" Nov 27 08:33:50 crc kubenswrapper[4971]: I1127 08:33:50.227715 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6d6a1b3-660f-4de4-9439-d1477bac5eb2" containerName="storage" Nov 27 08:33:50 crc kubenswrapper[4971]: I1127 08:33:50.227963 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6d6a1b3-660f-4de4-9439-d1477bac5eb2" containerName="storage" Nov 27 08:33:50 crc kubenswrapper[4971]: I1127 08:33:50.229520 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68b66b9b9f-q975r" Nov 27 08:33:50 crc kubenswrapper[4971]: I1127 08:33:50.232954 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Nov 27 08:33:50 crc kubenswrapper[4971]: I1127 08:33:50.233219 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-6djqt" Nov 27 08:33:50 crc kubenswrapper[4971]: I1127 08:33:50.233909 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Nov 27 08:33:50 crc kubenswrapper[4971]: I1127 08:33:50.236326 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Nov 27 08:33:50 crc kubenswrapper[4971]: I1127 08:33:50.236499 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Nov 27 08:33:50 crc kubenswrapper[4971]: I1127 08:33:50.248135 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-68b66b9b9f-q975r"] Nov 27 08:33:50 crc kubenswrapper[4971]: I1127 08:33:50.389758 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s9brp\" (UniqueName: \"kubernetes.io/projected/952e4885-4771-4c1d-a2d9-4b5e772edb9b-kube-api-access-s9brp\") pod \"dnsmasq-dns-68b66b9b9f-q975r\" (UID: \"952e4885-4771-4c1d-a2d9-4b5e772edb9b\") " pod="openstack/dnsmasq-dns-68b66b9b9f-q975r" Nov 27 08:33:50 crc kubenswrapper[4971]: I1127 08:33:50.389890 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/952e4885-4771-4c1d-a2d9-4b5e772edb9b-config\") pod \"dnsmasq-dns-68b66b9b9f-q975r\" (UID: \"952e4885-4771-4c1d-a2d9-4b5e772edb9b\") " pod="openstack/dnsmasq-dns-68b66b9b9f-q975r" Nov 27 08:33:50 crc kubenswrapper[4971]: I1127 08:33:50.390542 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/952e4885-4771-4c1d-a2d9-4b5e772edb9b-dns-svc\") pod \"dnsmasq-dns-68b66b9b9f-q975r\" (UID: \"952e4885-4771-4c1d-a2d9-4b5e772edb9b\") " pod="openstack/dnsmasq-dns-68b66b9b9f-q975r" Nov 27 08:33:50 crc kubenswrapper[4971]: I1127 08:33:50.491853 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/952e4885-4771-4c1d-a2d9-4b5e772edb9b-config\") pod \"dnsmasq-dns-68b66b9b9f-q975r\" (UID: \"952e4885-4771-4c1d-a2d9-4b5e772edb9b\") " pod="openstack/dnsmasq-dns-68b66b9b9f-q975r" Nov 27 08:33:50 crc kubenswrapper[4971]: I1127 08:33:50.491935 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/952e4885-4771-4c1d-a2d9-4b5e772edb9b-dns-svc\") pod \"dnsmasq-dns-68b66b9b9f-q975r\" (UID: \"952e4885-4771-4c1d-a2d9-4b5e772edb9b\") " pod="openstack/dnsmasq-dns-68b66b9b9f-q975r" Nov 27 08:33:50 crc kubenswrapper[4971]: I1127 08:33:50.491995 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s9brp\" (UniqueName: \"kubernetes.io/projected/952e4885-4771-4c1d-a2d9-4b5e772edb9b-kube-api-access-s9brp\") pod \"dnsmasq-dns-68b66b9b9f-q975r\" (UID: \"952e4885-4771-4c1d-a2d9-4b5e772edb9b\") " pod="openstack/dnsmasq-dns-68b66b9b9f-q975r" Nov 27 08:33:50 crc kubenswrapper[4971]: I1127 08:33:50.493068 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/952e4885-4771-4c1d-a2d9-4b5e772edb9b-dns-svc\") pod \"dnsmasq-dns-68b66b9b9f-q975r\" (UID: \"952e4885-4771-4c1d-a2d9-4b5e772edb9b\") " pod="openstack/dnsmasq-dns-68b66b9b9f-q975r" Nov 27 08:33:50 crc kubenswrapper[4971]: I1127 08:33:50.493120 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/952e4885-4771-4c1d-a2d9-4b5e772edb9b-config\") pod \"dnsmasq-dns-68b66b9b9f-q975r\" (UID: \"952e4885-4771-4c1d-a2d9-4b5e772edb9b\") " pod="openstack/dnsmasq-dns-68b66b9b9f-q975r" Nov 27 08:33:50 crc kubenswrapper[4971]: I1127 08:33:50.531936 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s9brp\" (UniqueName: \"kubernetes.io/projected/952e4885-4771-4c1d-a2d9-4b5e772edb9b-kube-api-access-s9brp\") pod \"dnsmasq-dns-68b66b9b9f-q975r\" (UID: \"952e4885-4771-4c1d-a2d9-4b5e772edb9b\") " pod="openstack/dnsmasq-dns-68b66b9b9f-q975r" Nov 27 08:33:50 crc kubenswrapper[4971]: I1127 08:33:50.539565 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86766db695-js9qx"] Nov 27 08:33:50 crc kubenswrapper[4971]: I1127 08:33:50.543261 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86766db695-js9qx" Nov 27 08:33:50 crc kubenswrapper[4971]: I1127 08:33:50.557664 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68b66b9b9f-q975r" Nov 27 08:33:50 crc kubenswrapper[4971]: I1127 08:33:50.595269 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86766db695-js9qx"] Nov 27 08:33:50 crc kubenswrapper[4971]: I1127 08:33:50.694813 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e733141-e096-49ff-8b70-6d73873ae19d-config\") pod \"dnsmasq-dns-86766db695-js9qx\" (UID: \"5e733141-e096-49ff-8b70-6d73873ae19d\") " pod="openstack/dnsmasq-dns-86766db695-js9qx" Nov 27 08:33:50 crc kubenswrapper[4971]: I1127 08:33:50.694891 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m47zk\" (UniqueName: \"kubernetes.io/projected/5e733141-e096-49ff-8b70-6d73873ae19d-kube-api-access-m47zk\") pod \"dnsmasq-dns-86766db695-js9qx\" (UID: \"5e733141-e096-49ff-8b70-6d73873ae19d\") " pod="openstack/dnsmasq-dns-86766db695-js9qx" Nov 27 08:33:50 crc kubenswrapper[4971]: I1127 08:33:50.694942 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e733141-e096-49ff-8b70-6d73873ae19d-dns-svc\") pod \"dnsmasq-dns-86766db695-js9qx\" (UID: \"5e733141-e096-49ff-8b70-6d73873ae19d\") " pod="openstack/dnsmasq-dns-86766db695-js9qx" Nov 27 08:33:50 crc kubenswrapper[4971]: I1127 08:33:50.796764 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e733141-e096-49ff-8b70-6d73873ae19d-dns-svc\") pod \"dnsmasq-dns-86766db695-js9qx\" (UID: \"5e733141-e096-49ff-8b70-6d73873ae19d\") " pod="openstack/dnsmasq-dns-86766db695-js9qx" Nov 27 08:33:50 crc kubenswrapper[4971]: I1127 08:33:50.797208 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e733141-e096-49ff-8b70-6d73873ae19d-config\") pod \"dnsmasq-dns-86766db695-js9qx\" (UID: \"5e733141-e096-49ff-8b70-6d73873ae19d\") " pod="openstack/dnsmasq-dns-86766db695-js9qx" Nov 27 08:33:50 crc kubenswrapper[4971]: I1127 08:33:50.797244 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m47zk\" (UniqueName: \"kubernetes.io/projected/5e733141-e096-49ff-8b70-6d73873ae19d-kube-api-access-m47zk\") pod \"dnsmasq-dns-86766db695-js9qx\" (UID: \"5e733141-e096-49ff-8b70-6d73873ae19d\") " pod="openstack/dnsmasq-dns-86766db695-js9qx" Nov 27 08:33:50 crc kubenswrapper[4971]: I1127 08:33:50.797857 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e733141-e096-49ff-8b70-6d73873ae19d-dns-svc\") pod \"dnsmasq-dns-86766db695-js9qx\" (UID: \"5e733141-e096-49ff-8b70-6d73873ae19d\") " pod="openstack/dnsmasq-dns-86766db695-js9qx" Nov 27 08:33:50 crc kubenswrapper[4971]: I1127 08:33:50.798426 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e733141-e096-49ff-8b70-6d73873ae19d-config\") pod \"dnsmasq-dns-86766db695-js9qx\" (UID: \"5e733141-e096-49ff-8b70-6d73873ae19d\") " pod="openstack/dnsmasq-dns-86766db695-js9qx" Nov 27 08:33:50 crc kubenswrapper[4971]: I1127 08:33:50.819096 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m47zk\" (UniqueName: \"kubernetes.io/projected/5e733141-e096-49ff-8b70-6d73873ae19d-kube-api-access-m47zk\") pod \"dnsmasq-dns-86766db695-js9qx\" (UID: \"5e733141-e096-49ff-8b70-6d73873ae19d\") " pod="openstack/dnsmasq-dns-86766db695-js9qx" Nov 27 08:33:50 crc kubenswrapper[4971]: I1127 08:33:50.889888 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86766db695-js9qx" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.092774 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-68b66b9b9f-q975r"] Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.291429 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68b66b9b9f-q975r" event={"ID":"952e4885-4771-4c1d-a2d9-4b5e772edb9b","Type":"ContainerStarted","Data":"d02b80f287d4bc5b7a2f335d84ab37620ff6c365a5801ff57e63697637761dea"} Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.376819 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.380095 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.385286 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.385501 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.385566 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-f4zwm" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.386739 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.386958 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.390939 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.434491 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86766db695-js9qx"] Nov 27 08:33:51 crc kubenswrapper[4971]: W1127 08:33:51.441068 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5e733141_e096_49ff_8b70_6d73873ae19d.slice/crio-a04280c558ed9c12b9959855f37b67e8639ce53315f89bfded0c9b5aca3e139e WatchSource:0}: Error finding container a04280c558ed9c12b9959855f37b67e8639ce53315f89bfded0c9b5aca3e139e: Status 404 returned error can't find the container with id a04280c558ed9c12b9959855f37b67e8639ce53315f89bfded0c9b5aca3e139e Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.514675 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5398c358-10a0-4867-93bd-6b039b04584d-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"5398c358-10a0-4867-93bd-6b039b04584d\") " pod="openstack/rabbitmq-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.514723 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8npc2\" (UniqueName: \"kubernetes.io/projected/5398c358-10a0-4867-93bd-6b039b04584d-kube-api-access-8npc2\") pod \"rabbitmq-server-0\" (UID: \"5398c358-10a0-4867-93bd-6b039b04584d\") " pod="openstack/rabbitmq-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.514979 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5398c358-10a0-4867-93bd-6b039b04584d-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"5398c358-10a0-4867-93bd-6b039b04584d\") " pod="openstack/rabbitmq-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.515037 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5398c358-10a0-4867-93bd-6b039b04584d-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"5398c358-10a0-4867-93bd-6b039b04584d\") " pod="openstack/rabbitmq-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.515285 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5398c358-10a0-4867-93bd-6b039b04584d-server-conf\") pod \"rabbitmq-server-0\" (UID: \"5398c358-10a0-4867-93bd-6b039b04584d\") " pod="openstack/rabbitmq-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.515663 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5398c358-10a0-4867-93bd-6b039b04584d-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"5398c358-10a0-4867-93bd-6b039b04584d\") " pod="openstack/rabbitmq-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.515813 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5398c358-10a0-4867-93bd-6b039b04584d-pod-info\") pod \"rabbitmq-server-0\" (UID: \"5398c358-10a0-4867-93bd-6b039b04584d\") " pod="openstack/rabbitmq-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.515914 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5398c358-10a0-4867-93bd-6b039b04584d-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"5398c358-10a0-4867-93bd-6b039b04584d\") " pod="openstack/rabbitmq-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.516052 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-cd7e20d9-a658-41ab-97c4-78b17177d549\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cd7e20d9-a658-41ab-97c4-78b17177d549\") pod \"rabbitmq-server-0\" (UID: \"5398c358-10a0-4867-93bd-6b039b04584d\") " pod="openstack/rabbitmq-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.619210 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5398c358-10a0-4867-93bd-6b039b04584d-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"5398c358-10a0-4867-93bd-6b039b04584d\") " pod="openstack/rabbitmq-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.619272 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8npc2\" (UniqueName: \"kubernetes.io/projected/5398c358-10a0-4867-93bd-6b039b04584d-kube-api-access-8npc2\") pod \"rabbitmq-server-0\" (UID: \"5398c358-10a0-4867-93bd-6b039b04584d\") " pod="openstack/rabbitmq-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.619318 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5398c358-10a0-4867-93bd-6b039b04584d-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"5398c358-10a0-4867-93bd-6b039b04584d\") " pod="openstack/rabbitmq-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.619341 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5398c358-10a0-4867-93bd-6b039b04584d-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"5398c358-10a0-4867-93bd-6b039b04584d\") " pod="openstack/rabbitmq-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.619929 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5398c358-10a0-4867-93bd-6b039b04584d-server-conf\") pod \"rabbitmq-server-0\" (UID: \"5398c358-10a0-4867-93bd-6b039b04584d\") " pod="openstack/rabbitmq-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.619971 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5398c358-10a0-4867-93bd-6b039b04584d-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"5398c358-10a0-4867-93bd-6b039b04584d\") " pod="openstack/rabbitmq-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.619996 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5398c358-10a0-4867-93bd-6b039b04584d-pod-info\") pod \"rabbitmq-server-0\" (UID: \"5398c358-10a0-4867-93bd-6b039b04584d\") " pod="openstack/rabbitmq-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.620029 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5398c358-10a0-4867-93bd-6b039b04584d-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"5398c358-10a0-4867-93bd-6b039b04584d\") " pod="openstack/rabbitmq-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.620090 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-cd7e20d9-a658-41ab-97c4-78b17177d549\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cd7e20d9-a658-41ab-97c4-78b17177d549\") pod \"rabbitmq-server-0\" (UID: \"5398c358-10a0-4867-93bd-6b039b04584d\") " pod="openstack/rabbitmq-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.620440 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5398c358-10a0-4867-93bd-6b039b04584d-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"5398c358-10a0-4867-93bd-6b039b04584d\") " pod="openstack/rabbitmq-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.620789 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5398c358-10a0-4867-93bd-6b039b04584d-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"5398c358-10a0-4867-93bd-6b039b04584d\") " pod="openstack/rabbitmq-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.621014 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5398c358-10a0-4867-93bd-6b039b04584d-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"5398c358-10a0-4867-93bd-6b039b04584d\") " pod="openstack/rabbitmq-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.624392 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5398c358-10a0-4867-93bd-6b039b04584d-server-conf\") pod \"rabbitmq-server-0\" (UID: \"5398c358-10a0-4867-93bd-6b039b04584d\") " pod="openstack/rabbitmq-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.625478 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5398c358-10a0-4867-93bd-6b039b04584d-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"5398c358-10a0-4867-93bd-6b039b04584d\") " pod="openstack/rabbitmq-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.627230 4971 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.627653 4971 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-cd7e20d9-a658-41ab-97c4-78b17177d549\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cd7e20d9-a658-41ab-97c4-78b17177d549\") pod \"rabbitmq-server-0\" (UID: \"5398c358-10a0-4867-93bd-6b039b04584d\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/7b1a9f31da3f0acffca006ca5b1627887bf4693f5bc53509f19c460deb4900cb/globalmount\"" pod="openstack/rabbitmq-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.629134 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5398c358-10a0-4867-93bd-6b039b04584d-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"5398c358-10a0-4867-93bd-6b039b04584d\") " pod="openstack/rabbitmq-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.638516 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8npc2\" (UniqueName: \"kubernetes.io/projected/5398c358-10a0-4867-93bd-6b039b04584d-kube-api-access-8npc2\") pod \"rabbitmq-server-0\" (UID: \"5398c358-10a0-4867-93bd-6b039b04584d\") " pod="openstack/rabbitmq-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.646486 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5398c358-10a0-4867-93bd-6b039b04584d-pod-info\") pod \"rabbitmq-server-0\" (UID: \"5398c358-10a0-4867-93bd-6b039b04584d\") " pod="openstack/rabbitmq-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.675721 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-cd7e20d9-a658-41ab-97c4-78b17177d549\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cd7e20d9-a658-41ab-97c4-78b17177d549\") pod \"rabbitmq-server-0\" (UID: \"5398c358-10a0-4867-93bd-6b039b04584d\") " pod="openstack/rabbitmq-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.691208 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.692602 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.696803 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-jj442" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.696839 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.696803 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.698785 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.707896 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.712588 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.726480 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.823863 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.823947 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.823984 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.824013 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.824068 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.824165 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5g6c5\" (UniqueName: \"kubernetes.io/projected/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-kube-api-access-5g6c5\") pod \"rabbitmq-cell1-server-0\" (UID: \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.824203 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-490752b4-17e2-4c20-8854-ab1db27ae229\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-490752b4-17e2-4c20-8854-ab1db27ae229\") pod \"rabbitmq-cell1-server-0\" (UID: \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.824228 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.824283 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.925893 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.926446 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.926478 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.926657 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.927022 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5g6c5\" (UniqueName: \"kubernetes.io/projected/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-kube-api-access-5g6c5\") pod \"rabbitmq-cell1-server-0\" (UID: \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.927061 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-490752b4-17e2-4c20-8854-ab1db27ae229\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-490752b4-17e2-4c20-8854-ab1db27ae229\") pod \"rabbitmq-cell1-server-0\" (UID: \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.927120 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.927294 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.927331 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.928674 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.929731 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.931554 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.931637 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.940083 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.940887 4971 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.940938 4971 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-490752b4-17e2-4c20-8854-ab1db27ae229\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-490752b4-17e2-4c20-8854-ab1db27ae229\") pod \"rabbitmq-cell1-server-0\" (UID: \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/a55fc649a39f621dfcc767b4b595ecc4fa4f3cc28a66de70c3352c90166a0312/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.943469 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.946116 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.948794 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5g6c5\" (UniqueName: \"kubernetes.io/projected/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-kube-api-access-5g6c5\") pod \"rabbitmq-cell1-server-0\" (UID: \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:33:51 crc kubenswrapper[4971]: I1127 08:33:51.975348 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-490752b4-17e2-4c20-8854-ab1db27ae229\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-490752b4-17e2-4c20-8854-ab1db27ae229\") pod \"rabbitmq-cell1-server-0\" (UID: \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:33:52 crc kubenswrapper[4971]: I1127 08:33:52.016519 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:33:52 crc kubenswrapper[4971]: I1127 08:33:52.211501 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 27 08:33:52 crc kubenswrapper[4971]: W1127 08:33:52.247695 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5398c358_10a0_4867_93bd_6b039b04584d.slice/crio-0338b1101e518f5d10a70b939e91e7242af8c45949b6a4ea2e8bd791ee801c83 WatchSource:0}: Error finding container 0338b1101e518f5d10a70b939e91e7242af8c45949b6a4ea2e8bd791ee801c83: Status 404 returned error can't find the container with id 0338b1101e518f5d10a70b939e91e7242af8c45949b6a4ea2e8bd791ee801c83 Nov 27 08:33:52 crc kubenswrapper[4971]: I1127 08:33:52.305856 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86766db695-js9qx" event={"ID":"5e733141-e096-49ff-8b70-6d73873ae19d","Type":"ContainerStarted","Data":"a04280c558ed9c12b9959855f37b67e8639ce53315f89bfded0c9b5aca3e139e"} Nov 27 08:33:52 crc kubenswrapper[4971]: I1127 08:33:52.307568 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"5398c358-10a0-4867-93bd-6b039b04584d","Type":"ContainerStarted","Data":"0338b1101e518f5d10a70b939e91e7242af8c45949b6a4ea2e8bd791ee801c83"} Nov 27 08:33:52 crc kubenswrapper[4971]: I1127 08:33:52.533086 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 27 08:33:52 crc kubenswrapper[4971]: W1127 08:33:52.535759 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5b6c13cc_56c7_4f02_a51f_b6aab2e77525.slice/crio-50eac806b03094ba83e095ccb48f3328c5e85ec5484a101912570fbceae229e2 WatchSource:0}: Error finding container 50eac806b03094ba83e095ccb48f3328c5e85ec5484a101912570fbceae229e2: Status 404 returned error can't find the container with id 50eac806b03094ba83e095ccb48f3328c5e85ec5484a101912570fbceae229e2 Nov 27 08:33:53 crc kubenswrapper[4971]: I1127 08:33:53.320376 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"5b6c13cc-56c7-4f02-a51f-b6aab2e77525","Type":"ContainerStarted","Data":"50eac806b03094ba83e095ccb48f3328c5e85ec5484a101912570fbceae229e2"} Nov 27 08:33:53 crc kubenswrapper[4971]: I1127 08:33:53.904308 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Nov 27 08:33:53 crc kubenswrapper[4971]: I1127 08:33:53.906629 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Nov 27 08:33:53 crc kubenswrapper[4971]: I1127 08:33:53.915364 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-72jj2" Nov 27 08:33:53 crc kubenswrapper[4971]: I1127 08:33:53.915695 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Nov 27 08:33:53 crc kubenswrapper[4971]: I1127 08:33:53.915847 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Nov 27 08:33:53 crc kubenswrapper[4971]: I1127 08:33:53.916337 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Nov 27 08:33:53 crc kubenswrapper[4971]: I1127 08:33:53.919916 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Nov 27 08:33:53 crc kubenswrapper[4971]: I1127 08:33:53.944948 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.066281 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0-operator-scripts\") pod \"openstack-galera-0\" (UID: \"bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0\") " pod="openstack/openstack-galera-0" Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.066368 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0-kolla-config\") pod \"openstack-galera-0\" (UID: \"bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0\") " pod="openstack/openstack-galera-0" Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.066492 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-258zg\" (UniqueName: \"kubernetes.io/projected/bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0-kube-api-access-258zg\") pod \"openstack-galera-0\" (UID: \"bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0\") " pod="openstack/openstack-galera-0" Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.066731 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0-config-data-default\") pod \"openstack-galera-0\" (UID: \"bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0\") " pod="openstack/openstack-galera-0" Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.066791 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0\") " pod="openstack/openstack-galera-0" Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.066916 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0-config-data-generated\") pod \"openstack-galera-0\" (UID: \"bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0\") " pod="openstack/openstack-galera-0" Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.066987 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0\") " pod="openstack/openstack-galera-0" Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.067037 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-4eb39111-812e-4c2c-8fda-d292a4310560\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4eb39111-812e-4c2c-8fda-d292a4310560\") pod \"openstack-galera-0\" (UID: \"bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0\") " pod="openstack/openstack-galera-0" Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.168703 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0-config-data-generated\") pod \"openstack-galera-0\" (UID: \"bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0\") " pod="openstack/openstack-galera-0" Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.168760 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0\") " pod="openstack/openstack-galera-0" Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.168790 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-4eb39111-812e-4c2c-8fda-d292a4310560\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4eb39111-812e-4c2c-8fda-d292a4310560\") pod \"openstack-galera-0\" (UID: \"bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0\") " pod="openstack/openstack-galera-0" Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.168859 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0-operator-scripts\") pod \"openstack-galera-0\" (UID: \"bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0\") " pod="openstack/openstack-galera-0" Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.170224 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0-kolla-config\") pod \"openstack-galera-0\" (UID: \"bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0\") " pod="openstack/openstack-galera-0" Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.169348 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0-config-data-generated\") pod \"openstack-galera-0\" (UID: \"bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0\") " pod="openstack/openstack-galera-0" Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.170737 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0-operator-scripts\") pod \"openstack-galera-0\" (UID: \"bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0\") " pod="openstack/openstack-galera-0" Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.170962 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0-kolla-config\") pod \"openstack-galera-0\" (UID: \"bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0\") " pod="openstack/openstack-galera-0" Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.170251 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-258zg\" (UniqueName: \"kubernetes.io/projected/bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0-kube-api-access-258zg\") pod \"openstack-galera-0\" (UID: \"bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0\") " pod="openstack/openstack-galera-0" Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.171089 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0-config-data-default\") pod \"openstack-galera-0\" (UID: \"bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0\") " pod="openstack/openstack-galera-0" Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.171138 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0\") " pod="openstack/openstack-galera-0" Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.172207 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0-config-data-default\") pod \"openstack-galera-0\" (UID: \"bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0\") " pod="openstack/openstack-galera-0" Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.172935 4971 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.173003 4971 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-4eb39111-812e-4c2c-8fda-d292a4310560\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4eb39111-812e-4c2c-8fda-d292a4310560\") pod \"openstack-galera-0\" (UID: \"bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/b9c1877a740bec20e2ee5603e5470419f823967cb237fe360938e217eda4a50b/globalmount\"" pod="openstack/openstack-galera-0" Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.177108 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0\") " pod="openstack/openstack-galera-0" Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.189311 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-258zg\" (UniqueName: \"kubernetes.io/projected/bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0-kube-api-access-258zg\") pod \"openstack-galera-0\" (UID: \"bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0\") " pod="openstack/openstack-galera-0" Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.189708 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0\") " pod="openstack/openstack-galera-0" Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.253227 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-4eb39111-812e-4c2c-8fda-d292a4310560\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4eb39111-812e-4c2c-8fda-d292a4310560\") pod \"openstack-galera-0\" (UID: \"bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0\") " pod="openstack/openstack-galera-0" Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.359126 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.367799 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.371548 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.372602 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-c7n5s" Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.372681 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.477080 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wgbtg\" (UniqueName: \"kubernetes.io/projected/55c82719-efe6-4d3a-b9c4-1def29b2c252-kube-api-access-wgbtg\") pod \"memcached-0\" (UID: \"55c82719-efe6-4d3a-b9c4-1def29b2c252\") " pod="openstack/memcached-0" Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.477163 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/55c82719-efe6-4d3a-b9c4-1def29b2c252-kolla-config\") pod \"memcached-0\" (UID: \"55c82719-efe6-4d3a-b9c4-1def29b2c252\") " pod="openstack/memcached-0" Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.477224 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/55c82719-efe6-4d3a-b9c4-1def29b2c252-config-data\") pod \"memcached-0\" (UID: \"55c82719-efe6-4d3a-b9c4-1def29b2c252\") " pod="openstack/memcached-0" Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.542614 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.579424 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/55c82719-efe6-4d3a-b9c4-1def29b2c252-kolla-config\") pod \"memcached-0\" (UID: \"55c82719-efe6-4d3a-b9c4-1def29b2c252\") " pod="openstack/memcached-0" Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.579525 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/55c82719-efe6-4d3a-b9c4-1def29b2c252-config-data\") pod \"memcached-0\" (UID: \"55c82719-efe6-4d3a-b9c4-1def29b2c252\") " pod="openstack/memcached-0" Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.579637 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wgbtg\" (UniqueName: \"kubernetes.io/projected/55c82719-efe6-4d3a-b9c4-1def29b2c252-kube-api-access-wgbtg\") pod \"memcached-0\" (UID: \"55c82719-efe6-4d3a-b9c4-1def29b2c252\") " pod="openstack/memcached-0" Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.581841 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/55c82719-efe6-4d3a-b9c4-1def29b2c252-kolla-config\") pod \"memcached-0\" (UID: \"55c82719-efe6-4d3a-b9c4-1def29b2c252\") " pod="openstack/memcached-0" Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.582425 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/55c82719-efe6-4d3a-b9c4-1def29b2c252-config-data\") pod \"memcached-0\" (UID: \"55c82719-efe6-4d3a-b9c4-1def29b2c252\") " pod="openstack/memcached-0" Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.600984 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wgbtg\" (UniqueName: \"kubernetes.io/projected/55c82719-efe6-4d3a-b9c4-1def29b2c252-kube-api-access-wgbtg\") pod \"memcached-0\" (UID: \"55c82719-efe6-4d3a-b9c4-1def29b2c252\") " pod="openstack/memcached-0" Nov 27 08:33:54 crc kubenswrapper[4971]: I1127 08:33:54.702395 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Nov 27 08:33:55 crc kubenswrapper[4971]: I1127 08:33:55.342172 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 27 08:33:55 crc kubenswrapper[4971]: I1127 08:33:55.343696 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Nov 27 08:33:55 crc kubenswrapper[4971]: I1127 08:33:55.351772 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Nov 27 08:33:55 crc kubenswrapper[4971]: I1127 08:33:55.351976 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Nov 27 08:33:55 crc kubenswrapper[4971]: I1127 08:33:55.401189 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-ggg22" Nov 27 08:33:55 crc kubenswrapper[4971]: I1127 08:33:55.410564 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Nov 27 08:33:55 crc kubenswrapper[4971]: I1127 08:33:55.421932 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 27 08:33:55 crc kubenswrapper[4971]: I1127 08:33:55.515819 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30e1cda1-3bb1-4df3-9a75-998e7a88cab1-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"30e1cda1-3bb1-4df3-9a75-998e7a88cab1\") " pod="openstack/openstack-cell1-galera-0" Nov 27 08:33:55 crc kubenswrapper[4971]: I1127 08:33:55.515911 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmzl8\" (UniqueName: \"kubernetes.io/projected/30e1cda1-3bb1-4df3-9a75-998e7a88cab1-kube-api-access-qmzl8\") pod \"openstack-cell1-galera-0\" (UID: \"30e1cda1-3bb1-4df3-9a75-998e7a88cab1\") " pod="openstack/openstack-cell1-galera-0" Nov 27 08:33:55 crc kubenswrapper[4971]: I1127 08:33:55.515945 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/30e1cda1-3bb1-4df3-9a75-998e7a88cab1-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"30e1cda1-3bb1-4df3-9a75-998e7a88cab1\") " pod="openstack/openstack-cell1-galera-0" Nov 27 08:33:55 crc kubenswrapper[4971]: I1127 08:33:55.516410 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/30e1cda1-3bb1-4df3-9a75-998e7a88cab1-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"30e1cda1-3bb1-4df3-9a75-998e7a88cab1\") " pod="openstack/openstack-cell1-galera-0" Nov 27 08:33:55 crc kubenswrapper[4971]: I1127 08:33:55.516619 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-bf99b4ce-1ca8-4a24-9953-ca70447b9a23\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-bf99b4ce-1ca8-4a24-9953-ca70447b9a23\") pod \"openstack-cell1-galera-0\" (UID: \"30e1cda1-3bb1-4df3-9a75-998e7a88cab1\") " pod="openstack/openstack-cell1-galera-0" Nov 27 08:33:55 crc kubenswrapper[4971]: I1127 08:33:55.516655 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/30e1cda1-3bb1-4df3-9a75-998e7a88cab1-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"30e1cda1-3bb1-4df3-9a75-998e7a88cab1\") " pod="openstack/openstack-cell1-galera-0" Nov 27 08:33:55 crc kubenswrapper[4971]: I1127 08:33:55.516747 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/30e1cda1-3bb1-4df3-9a75-998e7a88cab1-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"30e1cda1-3bb1-4df3-9a75-998e7a88cab1\") " pod="openstack/openstack-cell1-galera-0" Nov 27 08:33:55 crc kubenswrapper[4971]: I1127 08:33:55.516850 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/30e1cda1-3bb1-4df3-9a75-998e7a88cab1-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"30e1cda1-3bb1-4df3-9a75-998e7a88cab1\") " pod="openstack/openstack-cell1-galera-0" Nov 27 08:33:55 crc kubenswrapper[4971]: I1127 08:33:55.618040 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/30e1cda1-3bb1-4df3-9a75-998e7a88cab1-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"30e1cda1-3bb1-4df3-9a75-998e7a88cab1\") " pod="openstack/openstack-cell1-galera-0" Nov 27 08:33:55 crc kubenswrapper[4971]: I1127 08:33:55.618106 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/30e1cda1-3bb1-4df3-9a75-998e7a88cab1-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"30e1cda1-3bb1-4df3-9a75-998e7a88cab1\") " pod="openstack/openstack-cell1-galera-0" Nov 27 08:33:55 crc kubenswrapper[4971]: I1127 08:33:55.618146 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-bf99b4ce-1ca8-4a24-9953-ca70447b9a23\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-bf99b4ce-1ca8-4a24-9953-ca70447b9a23\") pod \"openstack-cell1-galera-0\" (UID: \"30e1cda1-3bb1-4df3-9a75-998e7a88cab1\") " pod="openstack/openstack-cell1-galera-0" Nov 27 08:33:55 crc kubenswrapper[4971]: I1127 08:33:55.618163 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/30e1cda1-3bb1-4df3-9a75-998e7a88cab1-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"30e1cda1-3bb1-4df3-9a75-998e7a88cab1\") " pod="openstack/openstack-cell1-galera-0" Nov 27 08:33:55 crc kubenswrapper[4971]: I1127 08:33:55.618197 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/30e1cda1-3bb1-4df3-9a75-998e7a88cab1-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"30e1cda1-3bb1-4df3-9a75-998e7a88cab1\") " pod="openstack/openstack-cell1-galera-0" Nov 27 08:33:55 crc kubenswrapper[4971]: I1127 08:33:55.618224 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/30e1cda1-3bb1-4df3-9a75-998e7a88cab1-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"30e1cda1-3bb1-4df3-9a75-998e7a88cab1\") " pod="openstack/openstack-cell1-galera-0" Nov 27 08:33:55 crc kubenswrapper[4971]: I1127 08:33:55.618260 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30e1cda1-3bb1-4df3-9a75-998e7a88cab1-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"30e1cda1-3bb1-4df3-9a75-998e7a88cab1\") " pod="openstack/openstack-cell1-galera-0" Nov 27 08:33:55 crc kubenswrapper[4971]: I1127 08:33:55.618297 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmzl8\" (UniqueName: \"kubernetes.io/projected/30e1cda1-3bb1-4df3-9a75-998e7a88cab1-kube-api-access-qmzl8\") pod \"openstack-cell1-galera-0\" (UID: \"30e1cda1-3bb1-4df3-9a75-998e7a88cab1\") " pod="openstack/openstack-cell1-galera-0" Nov 27 08:33:55 crc kubenswrapper[4971]: I1127 08:33:55.620840 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/30e1cda1-3bb1-4df3-9a75-998e7a88cab1-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"30e1cda1-3bb1-4df3-9a75-998e7a88cab1\") " pod="openstack/openstack-cell1-galera-0" Nov 27 08:33:55 crc kubenswrapper[4971]: I1127 08:33:55.620842 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/30e1cda1-3bb1-4df3-9a75-998e7a88cab1-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"30e1cda1-3bb1-4df3-9a75-998e7a88cab1\") " pod="openstack/openstack-cell1-galera-0" Nov 27 08:33:55 crc kubenswrapper[4971]: I1127 08:33:55.620976 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/30e1cda1-3bb1-4df3-9a75-998e7a88cab1-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"30e1cda1-3bb1-4df3-9a75-998e7a88cab1\") " pod="openstack/openstack-cell1-galera-0" Nov 27 08:33:55 crc kubenswrapper[4971]: I1127 08:33:55.624033 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/30e1cda1-3bb1-4df3-9a75-998e7a88cab1-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"30e1cda1-3bb1-4df3-9a75-998e7a88cab1\") " pod="openstack/openstack-cell1-galera-0" Nov 27 08:33:55 crc kubenswrapper[4971]: I1127 08:33:55.624488 4971 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 27 08:33:55 crc kubenswrapper[4971]: I1127 08:33:55.624634 4971 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-bf99b4ce-1ca8-4a24-9953-ca70447b9a23\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-bf99b4ce-1ca8-4a24-9953-ca70447b9a23\") pod \"openstack-cell1-galera-0\" (UID: \"30e1cda1-3bb1-4df3-9a75-998e7a88cab1\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/ad91ce3c2e7f3c9e5badb1ec3db52e091f4f239fd2062799823ed1291bc1a7d0/globalmount\"" pod="openstack/openstack-cell1-galera-0" Nov 27 08:33:55 crc kubenswrapper[4971]: I1127 08:33:55.628917 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30e1cda1-3bb1-4df3-9a75-998e7a88cab1-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"30e1cda1-3bb1-4df3-9a75-998e7a88cab1\") " pod="openstack/openstack-cell1-galera-0" Nov 27 08:33:55 crc kubenswrapper[4971]: I1127 08:33:55.630019 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/30e1cda1-3bb1-4df3-9a75-998e7a88cab1-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"30e1cda1-3bb1-4df3-9a75-998e7a88cab1\") " pod="openstack/openstack-cell1-galera-0" Nov 27 08:33:55 crc kubenswrapper[4971]: I1127 08:33:55.637421 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmzl8\" (UniqueName: \"kubernetes.io/projected/30e1cda1-3bb1-4df3-9a75-998e7a88cab1-kube-api-access-qmzl8\") pod \"openstack-cell1-galera-0\" (UID: \"30e1cda1-3bb1-4df3-9a75-998e7a88cab1\") " pod="openstack/openstack-cell1-galera-0" Nov 27 08:33:55 crc kubenswrapper[4971]: I1127 08:33:55.656702 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-bf99b4ce-1ca8-4a24-9953-ca70447b9a23\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-bf99b4ce-1ca8-4a24-9953-ca70447b9a23\") pod \"openstack-cell1-galera-0\" (UID: \"30e1cda1-3bb1-4df3-9a75-998e7a88cab1\") " pod="openstack/openstack-cell1-galera-0" Nov 27 08:33:55 crc kubenswrapper[4971]: I1127 08:33:55.722631 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Nov 27 08:34:04 crc kubenswrapper[4971]: I1127 08:34:04.376127 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Nov 27 08:34:04 crc kubenswrapper[4971]: W1127 08:34:04.379664 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod30e1cda1_3bb1_4df3_9a75_998e7a88cab1.slice/crio-fb42be9c965046482d547c2db2f6b5358b0f3d0b8097dad7804f9bdebcb5f5e8 WatchSource:0}: Error finding container fb42be9c965046482d547c2db2f6b5358b0f3d0b8097dad7804f9bdebcb5f5e8: Status 404 returned error can't find the container with id fb42be9c965046482d547c2db2f6b5358b0f3d0b8097dad7804f9bdebcb5f5e8 Nov 27 08:34:04 crc kubenswrapper[4971]: I1127 08:34:04.382843 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 27 08:34:04 crc kubenswrapper[4971]: I1127 08:34:04.495159 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Nov 27 08:34:04 crc kubenswrapper[4971]: W1127 08:34:04.496997 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbbeec80f_deb5_4f4c_adb2_36eb45f4c7e0.slice/crio-42df7144d5bfef9c4b7b6dd199b73ae79a0211d8431b6b45fba0dd9739fc45bb WatchSource:0}: Error finding container 42df7144d5bfef9c4b7b6dd199b73ae79a0211d8431b6b45fba0dd9739fc45bb: Status 404 returned error can't find the container with id 42df7144d5bfef9c4b7b6dd199b73ae79a0211d8431b6b45fba0dd9739fc45bb Nov 27 08:34:04 crc kubenswrapper[4971]: I1127 08:34:04.506390 4971 generic.go:334] "Generic (PLEG): container finished" podID="5e733141-e096-49ff-8b70-6d73873ae19d" containerID="29e74990cf246a65b3b32392144c6b545d89182f0a786624446f71d002a71c12" exitCode=0 Nov 27 08:34:04 crc kubenswrapper[4971]: I1127 08:34:04.506584 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86766db695-js9qx" event={"ID":"5e733141-e096-49ff-8b70-6d73873ae19d","Type":"ContainerDied","Data":"29e74990cf246a65b3b32392144c6b545d89182f0a786624446f71d002a71c12"} Nov 27 08:34:04 crc kubenswrapper[4971]: I1127 08:34:04.512818 4971 generic.go:334] "Generic (PLEG): container finished" podID="952e4885-4771-4c1d-a2d9-4b5e772edb9b" containerID="ebce061fe842e1457d1d513061a329a8b958802fdaa41489833eea6f64bb4a03" exitCode=0 Nov 27 08:34:04 crc kubenswrapper[4971]: I1127 08:34:04.513015 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68b66b9b9f-q975r" event={"ID":"952e4885-4771-4c1d-a2d9-4b5e772edb9b","Type":"ContainerDied","Data":"ebce061fe842e1457d1d513061a329a8b958802fdaa41489833eea6f64bb4a03"} Nov 27 08:34:04 crc kubenswrapper[4971]: I1127 08:34:04.516449 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"55c82719-efe6-4d3a-b9c4-1def29b2c252","Type":"ContainerStarted","Data":"f9421a47988c4f634b1c86f8b745eec84d9150e19fccdd4f667eddea2130f2d9"} Nov 27 08:34:04 crc kubenswrapper[4971]: I1127 08:34:04.519523 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"30e1cda1-3bb1-4df3-9a75-998e7a88cab1","Type":"ContainerStarted","Data":"fb42be9c965046482d547c2db2f6b5358b0f3d0b8097dad7804f9bdebcb5f5e8"} Nov 27 08:34:05 crc kubenswrapper[4971]: I1127 08:34:05.528667 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86766db695-js9qx" event={"ID":"5e733141-e096-49ff-8b70-6d73873ae19d","Type":"ContainerStarted","Data":"8191b26d822be3eb20cbc202685741ab47ffb1c7f6ff76734f7afa06d3d9a6a0"} Nov 27 08:34:05 crc kubenswrapper[4971]: I1127 08:34:05.529994 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86766db695-js9qx" Nov 27 08:34:05 crc kubenswrapper[4971]: I1127 08:34:05.532580 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68b66b9b9f-q975r" event={"ID":"952e4885-4771-4c1d-a2d9-4b5e772edb9b","Type":"ContainerStarted","Data":"1185e052adcf3b9421cd2d32d2a1e51602249a0c565c317aea0ff94de6dc5193"} Nov 27 08:34:05 crc kubenswrapper[4971]: I1127 08:34:05.532713 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-68b66b9b9f-q975r" Nov 27 08:34:05 crc kubenswrapper[4971]: I1127 08:34:05.535874 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"5398c358-10a0-4867-93bd-6b039b04584d","Type":"ContainerStarted","Data":"f1d968e923d782f250dc5cbf7fd29f72ed832d86378bd23727043b538a1534d2"} Nov 27 08:34:05 crc kubenswrapper[4971]: I1127 08:34:05.538690 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0","Type":"ContainerStarted","Data":"42df7144d5bfef9c4b7b6dd199b73ae79a0211d8431b6b45fba0dd9739fc45bb"} Nov 27 08:34:05 crc kubenswrapper[4971]: I1127 08:34:05.561460 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-86766db695-js9qx" podStartSLOduration=3.07990495 podStartE2EDuration="15.561436364s" podCreationTimestamp="2025-11-27 08:33:50 +0000 UTC" firstStartedPulling="2025-11-27 08:33:51.445690578 +0000 UTC m=+6069.637734496" lastFinishedPulling="2025-11-27 08:34:03.927221992 +0000 UTC m=+6082.119265910" observedRunningTime="2025-11-27 08:34:05.553390868 +0000 UTC m=+6083.745434806" watchObservedRunningTime="2025-11-27 08:34:05.561436364 +0000 UTC m=+6083.753480302" Nov 27 08:34:05 crc kubenswrapper[4971]: I1127 08:34:05.575180 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-68b66b9b9f-q975r" podStartSLOduration=2.783579455 podStartE2EDuration="15.575160499s" podCreationTimestamp="2025-11-27 08:33:50 +0000 UTC" firstStartedPulling="2025-11-27 08:33:51.11327728 +0000 UTC m=+6069.305321208" lastFinishedPulling="2025-11-27 08:34:03.904858334 +0000 UTC m=+6082.096902252" observedRunningTime="2025-11-27 08:34:05.573335268 +0000 UTC m=+6083.765379206" watchObservedRunningTime="2025-11-27 08:34:05.575160499 +0000 UTC m=+6083.767204417" Nov 27 08:34:06 crc kubenswrapper[4971]: I1127 08:34:06.566847 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"5b6c13cc-56c7-4f02-a51f-b6aab2e77525","Type":"ContainerStarted","Data":"95155c3d100e3fb70f6a985c53fa95c4b2e7540bbe073ec542a3e0164f1da148"} Nov 27 08:34:07 crc kubenswrapper[4971]: I1127 08:34:07.567656 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"55c82719-efe6-4d3a-b9c4-1def29b2c252","Type":"ContainerStarted","Data":"c81e8ad6d0f1f2ad19717f6e172a134c1668cc6e12b2e686a903f74af6a51c43"} Nov 27 08:34:07 crc kubenswrapper[4971]: I1127 08:34:07.568702 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Nov 27 08:34:07 crc kubenswrapper[4971]: I1127 08:34:07.594496 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=11.5071969 podStartE2EDuration="13.594469639s" podCreationTimestamp="2025-11-27 08:33:54 +0000 UTC" firstStartedPulling="2025-11-27 08:34:04.379658523 +0000 UTC m=+6082.571702441" lastFinishedPulling="2025-11-27 08:34:06.466931262 +0000 UTC m=+6084.658975180" observedRunningTime="2025-11-27 08:34:07.592950666 +0000 UTC m=+6085.784994584" watchObservedRunningTime="2025-11-27 08:34:07.594469639 +0000 UTC m=+6085.786513557" Nov 27 08:34:09 crc kubenswrapper[4971]: I1127 08:34:09.587601 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"30e1cda1-3bb1-4df3-9a75-998e7a88cab1","Type":"ContainerStarted","Data":"d92712f8e447e52b05a58b42b0d0fdddc024350d2bff062080f8429d792f5c19"} Nov 27 08:34:09 crc kubenswrapper[4971]: I1127 08:34:09.589742 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0","Type":"ContainerStarted","Data":"492718be9fe884a870dc2c479a8ad36f20b05d722669b2593439c7b5869448c7"} Nov 27 08:34:10 crc kubenswrapper[4971]: I1127 08:34:10.560279 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-68b66b9b9f-q975r" Nov 27 08:34:10 crc kubenswrapper[4971]: I1127 08:34:10.891876 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-86766db695-js9qx" Nov 27 08:34:10 crc kubenswrapper[4971]: I1127 08:34:10.963784 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-68b66b9b9f-q975r"] Nov 27 08:34:10 crc kubenswrapper[4971]: I1127 08:34:10.964166 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-68b66b9b9f-q975r" podUID="952e4885-4771-4c1d-a2d9-4b5e772edb9b" containerName="dnsmasq-dns" containerID="cri-o://1185e052adcf3b9421cd2d32d2a1e51602249a0c565c317aea0ff94de6dc5193" gracePeriod=10 Nov 27 08:34:11 crc kubenswrapper[4971]: I1127 08:34:11.476338 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68b66b9b9f-q975r" Nov 27 08:34:11 crc kubenswrapper[4971]: I1127 08:34:11.608034 4971 generic.go:334] "Generic (PLEG): container finished" podID="952e4885-4771-4c1d-a2d9-4b5e772edb9b" containerID="1185e052adcf3b9421cd2d32d2a1e51602249a0c565c317aea0ff94de6dc5193" exitCode=0 Nov 27 08:34:11 crc kubenswrapper[4971]: I1127 08:34:11.608103 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68b66b9b9f-q975r" event={"ID":"952e4885-4771-4c1d-a2d9-4b5e772edb9b","Type":"ContainerDied","Data":"1185e052adcf3b9421cd2d32d2a1e51602249a0c565c317aea0ff94de6dc5193"} Nov 27 08:34:11 crc kubenswrapper[4971]: I1127 08:34:11.608123 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68b66b9b9f-q975r" Nov 27 08:34:11 crc kubenswrapper[4971]: I1127 08:34:11.608161 4971 scope.go:117] "RemoveContainer" containerID="1185e052adcf3b9421cd2d32d2a1e51602249a0c565c317aea0ff94de6dc5193" Nov 27 08:34:11 crc kubenswrapper[4971]: I1127 08:34:11.608143 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68b66b9b9f-q975r" event={"ID":"952e4885-4771-4c1d-a2d9-4b5e772edb9b","Type":"ContainerDied","Data":"d02b80f287d4bc5b7a2f335d84ab37620ff6c365a5801ff57e63697637761dea"} Nov 27 08:34:11 crc kubenswrapper[4971]: I1127 08:34:11.621352 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/952e4885-4771-4c1d-a2d9-4b5e772edb9b-dns-svc\") pod \"952e4885-4771-4c1d-a2d9-4b5e772edb9b\" (UID: \"952e4885-4771-4c1d-a2d9-4b5e772edb9b\") " Nov 27 08:34:11 crc kubenswrapper[4971]: I1127 08:34:11.621850 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/952e4885-4771-4c1d-a2d9-4b5e772edb9b-config\") pod \"952e4885-4771-4c1d-a2d9-4b5e772edb9b\" (UID: \"952e4885-4771-4c1d-a2d9-4b5e772edb9b\") " Nov 27 08:34:11 crc kubenswrapper[4971]: I1127 08:34:11.622072 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s9brp\" (UniqueName: \"kubernetes.io/projected/952e4885-4771-4c1d-a2d9-4b5e772edb9b-kube-api-access-s9brp\") pod \"952e4885-4771-4c1d-a2d9-4b5e772edb9b\" (UID: \"952e4885-4771-4c1d-a2d9-4b5e772edb9b\") " Nov 27 08:34:11 crc kubenswrapper[4971]: I1127 08:34:11.627266 4971 scope.go:117] "RemoveContainer" containerID="ebce061fe842e1457d1d513061a329a8b958802fdaa41489833eea6f64bb4a03" Nov 27 08:34:11 crc kubenswrapper[4971]: I1127 08:34:11.632097 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/952e4885-4771-4c1d-a2d9-4b5e772edb9b-kube-api-access-s9brp" (OuterVolumeSpecName: "kube-api-access-s9brp") pod "952e4885-4771-4c1d-a2d9-4b5e772edb9b" (UID: "952e4885-4771-4c1d-a2d9-4b5e772edb9b"). InnerVolumeSpecName "kube-api-access-s9brp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:34:11 crc kubenswrapper[4971]: I1127 08:34:11.670336 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/952e4885-4771-4c1d-a2d9-4b5e772edb9b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "952e4885-4771-4c1d-a2d9-4b5e772edb9b" (UID: "952e4885-4771-4c1d-a2d9-4b5e772edb9b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:34:11 crc kubenswrapper[4971]: I1127 08:34:11.671344 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/952e4885-4771-4c1d-a2d9-4b5e772edb9b-config" (OuterVolumeSpecName: "config") pod "952e4885-4771-4c1d-a2d9-4b5e772edb9b" (UID: "952e4885-4771-4c1d-a2d9-4b5e772edb9b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:34:11 crc kubenswrapper[4971]: I1127 08:34:11.711797 4971 scope.go:117] "RemoveContainer" containerID="1185e052adcf3b9421cd2d32d2a1e51602249a0c565c317aea0ff94de6dc5193" Nov 27 08:34:11 crc kubenswrapper[4971]: E1127 08:34:11.712405 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1185e052adcf3b9421cd2d32d2a1e51602249a0c565c317aea0ff94de6dc5193\": container with ID starting with 1185e052adcf3b9421cd2d32d2a1e51602249a0c565c317aea0ff94de6dc5193 not found: ID does not exist" containerID="1185e052adcf3b9421cd2d32d2a1e51602249a0c565c317aea0ff94de6dc5193" Nov 27 08:34:11 crc kubenswrapper[4971]: I1127 08:34:11.712481 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1185e052adcf3b9421cd2d32d2a1e51602249a0c565c317aea0ff94de6dc5193"} err="failed to get container status \"1185e052adcf3b9421cd2d32d2a1e51602249a0c565c317aea0ff94de6dc5193\": rpc error: code = NotFound desc = could not find container \"1185e052adcf3b9421cd2d32d2a1e51602249a0c565c317aea0ff94de6dc5193\": container with ID starting with 1185e052adcf3b9421cd2d32d2a1e51602249a0c565c317aea0ff94de6dc5193 not found: ID does not exist" Nov 27 08:34:11 crc kubenswrapper[4971]: I1127 08:34:11.712523 4971 scope.go:117] "RemoveContainer" containerID="ebce061fe842e1457d1d513061a329a8b958802fdaa41489833eea6f64bb4a03" Nov 27 08:34:11 crc kubenswrapper[4971]: E1127 08:34:11.712953 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ebce061fe842e1457d1d513061a329a8b958802fdaa41489833eea6f64bb4a03\": container with ID starting with ebce061fe842e1457d1d513061a329a8b958802fdaa41489833eea6f64bb4a03 not found: ID does not exist" containerID="ebce061fe842e1457d1d513061a329a8b958802fdaa41489833eea6f64bb4a03" Nov 27 08:34:11 crc kubenswrapper[4971]: I1127 08:34:11.712998 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ebce061fe842e1457d1d513061a329a8b958802fdaa41489833eea6f64bb4a03"} err="failed to get container status \"ebce061fe842e1457d1d513061a329a8b958802fdaa41489833eea6f64bb4a03\": rpc error: code = NotFound desc = could not find container \"ebce061fe842e1457d1d513061a329a8b958802fdaa41489833eea6f64bb4a03\": container with ID starting with ebce061fe842e1457d1d513061a329a8b958802fdaa41489833eea6f64bb4a03 not found: ID does not exist" Nov 27 08:34:11 crc kubenswrapper[4971]: I1127 08:34:11.724442 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s9brp\" (UniqueName: \"kubernetes.io/projected/952e4885-4771-4c1d-a2d9-4b5e772edb9b-kube-api-access-s9brp\") on node \"crc\" DevicePath \"\"" Nov 27 08:34:11 crc kubenswrapper[4971]: I1127 08:34:11.724506 4971 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/952e4885-4771-4c1d-a2d9-4b5e772edb9b-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 27 08:34:11 crc kubenswrapper[4971]: I1127 08:34:11.724517 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/952e4885-4771-4c1d-a2d9-4b5e772edb9b-config\") on node \"crc\" DevicePath \"\"" Nov 27 08:34:11 crc kubenswrapper[4971]: I1127 08:34:11.946744 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-68b66b9b9f-q975r"] Nov 27 08:34:11 crc kubenswrapper[4971]: I1127 08:34:11.956843 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-68b66b9b9f-q975r"] Nov 27 08:34:12 crc kubenswrapper[4971]: I1127 08:34:12.560751 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="952e4885-4771-4c1d-a2d9-4b5e772edb9b" path="/var/lib/kubelet/pods/952e4885-4771-4c1d-a2d9-4b5e772edb9b/volumes" Nov 27 08:34:13 crc kubenswrapper[4971]: I1127 08:34:13.630790 4971 generic.go:334] "Generic (PLEG): container finished" podID="bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0" containerID="492718be9fe884a870dc2c479a8ad36f20b05d722669b2593439c7b5869448c7" exitCode=0 Nov 27 08:34:13 crc kubenswrapper[4971]: I1127 08:34:13.630918 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0","Type":"ContainerDied","Data":"492718be9fe884a870dc2c479a8ad36f20b05d722669b2593439c7b5869448c7"} Nov 27 08:34:13 crc kubenswrapper[4971]: I1127 08:34:13.635214 4971 generic.go:334] "Generic (PLEG): container finished" podID="30e1cda1-3bb1-4df3-9a75-998e7a88cab1" containerID="d92712f8e447e52b05a58b42b0d0fdddc024350d2bff062080f8429d792f5c19" exitCode=0 Nov 27 08:34:13 crc kubenswrapper[4971]: I1127 08:34:13.635280 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"30e1cda1-3bb1-4df3-9a75-998e7a88cab1","Type":"ContainerDied","Data":"d92712f8e447e52b05a58b42b0d0fdddc024350d2bff062080f8429d792f5c19"} Nov 27 08:34:14 crc kubenswrapper[4971]: I1127 08:34:14.649218 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0","Type":"ContainerStarted","Data":"fda631eef6a7cf626b2c66e2d9be06a3fbb9b54cb54b6fab7f764d4edbe87ee9"} Nov 27 08:34:14 crc kubenswrapper[4971]: I1127 08:34:14.652132 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"30e1cda1-3bb1-4df3-9a75-998e7a88cab1","Type":"ContainerStarted","Data":"4fffe885747ffbb764cc7f6a624e417169522526b4228e8231e0b74dee33706d"} Nov 27 08:34:14 crc kubenswrapper[4971]: I1127 08:34:14.679977 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=17.934098399 podStartE2EDuration="22.679952907s" podCreationTimestamp="2025-11-27 08:33:52 +0000 UTC" firstStartedPulling="2025-11-27 08:34:04.499569062 +0000 UTC m=+6082.691612980" lastFinishedPulling="2025-11-27 08:34:09.24542357 +0000 UTC m=+6087.437467488" observedRunningTime="2025-11-27 08:34:14.6754182 +0000 UTC m=+6092.867462148" watchObservedRunningTime="2025-11-27 08:34:14.679952907 +0000 UTC m=+6092.871996825" Nov 27 08:34:14 crc kubenswrapper[4971]: I1127 08:34:14.707965 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Nov 27 08:34:14 crc kubenswrapper[4971]: I1127 08:34:14.718035 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=15.846081735 podStartE2EDuration="20.718010316s" podCreationTimestamp="2025-11-27 08:33:54 +0000 UTC" firstStartedPulling="2025-11-27 08:34:04.382012249 +0000 UTC m=+6082.574056167" lastFinishedPulling="2025-11-27 08:34:09.25394083 +0000 UTC m=+6087.445984748" observedRunningTime="2025-11-27 08:34:14.707206153 +0000 UTC m=+6092.899250081" watchObservedRunningTime="2025-11-27 08:34:14.718010316 +0000 UTC m=+6092.910054254" Nov 27 08:34:15 crc kubenswrapper[4971]: I1127 08:34:15.723558 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Nov 27 08:34:15 crc kubenswrapper[4971]: I1127 08:34:15.723611 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Nov 27 08:34:18 crc kubenswrapper[4971]: E1127 08:34:18.488057 4971 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.50:46342->38.102.83.50:35357: write tcp 38.102.83.50:46342->38.102.83.50:35357: write: broken pipe Nov 27 08:34:19 crc kubenswrapper[4971]: I1127 08:34:19.802152 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Nov 27 08:34:19 crc kubenswrapper[4971]: I1127 08:34:19.894382 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Nov 27 08:34:24 crc kubenswrapper[4971]: I1127 08:34:24.543643 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Nov 27 08:34:24 crc kubenswrapper[4971]: I1127 08:34:24.544085 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Nov 27 08:34:24 crc kubenswrapper[4971]: I1127 08:34:24.622912 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Nov 27 08:34:24 crc kubenswrapper[4971]: I1127 08:34:24.864384 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Nov 27 08:34:37 crc kubenswrapper[4971]: I1127 08:34:37.935253 4971 generic.go:334] "Generic (PLEG): container finished" podID="5b6c13cc-56c7-4f02-a51f-b6aab2e77525" containerID="95155c3d100e3fb70f6a985c53fa95c4b2e7540bbe073ec542a3e0164f1da148" exitCode=0 Nov 27 08:34:37 crc kubenswrapper[4971]: I1127 08:34:37.935326 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"5b6c13cc-56c7-4f02-a51f-b6aab2e77525","Type":"ContainerDied","Data":"95155c3d100e3fb70f6a985c53fa95c4b2e7540bbe073ec542a3e0164f1da148"} Nov 27 08:34:37 crc kubenswrapper[4971]: I1127 08:34:37.940027 4971 generic.go:334] "Generic (PLEG): container finished" podID="5398c358-10a0-4867-93bd-6b039b04584d" containerID="f1d968e923d782f250dc5cbf7fd29f72ed832d86378bd23727043b538a1534d2" exitCode=0 Nov 27 08:34:37 crc kubenswrapper[4971]: I1127 08:34:37.940069 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"5398c358-10a0-4867-93bd-6b039b04584d","Type":"ContainerDied","Data":"f1d968e923d782f250dc5cbf7fd29f72ed832d86378bd23727043b538a1534d2"} Nov 27 08:34:38 crc kubenswrapper[4971]: I1127 08:34:38.952400 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"5398c358-10a0-4867-93bd-6b039b04584d","Type":"ContainerStarted","Data":"a891b44553b504dd182eb8a4d3c5a0bdb878ea43596d7f476d8d07aa87fb0025"} Nov 27 08:34:38 crc kubenswrapper[4971]: I1127 08:34:38.953435 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Nov 27 08:34:38 crc kubenswrapper[4971]: I1127 08:34:38.954419 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"5b6c13cc-56c7-4f02-a51f-b6aab2e77525","Type":"ContainerStarted","Data":"ac16b3eb4879c86b49853a89ee78cc9d33a43ac3f6a5b04f2314442188b50f3d"} Nov 27 08:34:38 crc kubenswrapper[4971]: I1127 08:34:38.954936 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:34:38 crc kubenswrapper[4971]: I1127 08:34:38.978714 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.342220719 podStartE2EDuration="48.978691362s" podCreationTimestamp="2025-11-27 08:33:50 +0000 UTC" firstStartedPulling="2025-11-27 08:33:52.252966378 +0000 UTC m=+6070.445010296" lastFinishedPulling="2025-11-27 08:34:03.889437021 +0000 UTC m=+6082.081480939" observedRunningTime="2025-11-27 08:34:38.976637204 +0000 UTC m=+6117.168681132" watchObservedRunningTime="2025-11-27 08:34:38.978691362 +0000 UTC m=+6117.170735280" Nov 27 08:34:39 crc kubenswrapper[4971]: I1127 08:34:39.014248 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.662577109 podStartE2EDuration="49.01420852s" podCreationTimestamp="2025-11-27 08:33:50 +0000 UTC" firstStartedPulling="2025-11-27 08:33:52.540691151 +0000 UTC m=+6070.732735069" lastFinishedPulling="2025-11-27 08:34:03.892322562 +0000 UTC m=+6082.084366480" observedRunningTime="2025-11-27 08:34:39.005610058 +0000 UTC m=+6117.197653976" watchObservedRunningTime="2025-11-27 08:34:39.01420852 +0000 UTC m=+6117.206252428" Nov 27 08:34:51 crc kubenswrapper[4971]: I1127 08:34:51.730709 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Nov 27 08:34:52 crc kubenswrapper[4971]: I1127 08:34:52.019728 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:34:56 crc kubenswrapper[4971]: I1127 08:34:56.414142 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 08:34:56 crc kubenswrapper[4971]: I1127 08:34:56.414757 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 08:34:57 crc kubenswrapper[4971]: I1127 08:34:57.853699 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-76cb69558f-l5qnx"] Nov 27 08:34:57 crc kubenswrapper[4971]: E1127 08:34:57.854572 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="952e4885-4771-4c1d-a2d9-4b5e772edb9b" containerName="init" Nov 27 08:34:57 crc kubenswrapper[4971]: I1127 08:34:57.854592 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="952e4885-4771-4c1d-a2d9-4b5e772edb9b" containerName="init" Nov 27 08:34:57 crc kubenswrapper[4971]: E1127 08:34:57.854610 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="952e4885-4771-4c1d-a2d9-4b5e772edb9b" containerName="dnsmasq-dns" Nov 27 08:34:57 crc kubenswrapper[4971]: I1127 08:34:57.854618 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="952e4885-4771-4c1d-a2d9-4b5e772edb9b" containerName="dnsmasq-dns" Nov 27 08:34:57 crc kubenswrapper[4971]: I1127 08:34:57.854806 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="952e4885-4771-4c1d-a2d9-4b5e772edb9b" containerName="dnsmasq-dns" Nov 27 08:34:57 crc kubenswrapper[4971]: I1127 08:34:57.856236 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76cb69558f-l5qnx" Nov 27 08:34:57 crc kubenswrapper[4971]: I1127 08:34:57.869840 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-76cb69558f-l5qnx"] Nov 27 08:34:57 crc kubenswrapper[4971]: I1127 08:34:57.968069 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dae38fc6-dc8c-4958-a470-69de89a0e908-dns-svc\") pod \"dnsmasq-dns-76cb69558f-l5qnx\" (UID: \"dae38fc6-dc8c-4958-a470-69de89a0e908\") " pod="openstack/dnsmasq-dns-76cb69558f-l5qnx" Nov 27 08:34:57 crc kubenswrapper[4971]: I1127 08:34:57.968154 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dae38fc6-dc8c-4958-a470-69de89a0e908-config\") pod \"dnsmasq-dns-76cb69558f-l5qnx\" (UID: \"dae38fc6-dc8c-4958-a470-69de89a0e908\") " pod="openstack/dnsmasq-dns-76cb69558f-l5qnx" Nov 27 08:34:57 crc kubenswrapper[4971]: I1127 08:34:57.968615 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xzsd\" (UniqueName: \"kubernetes.io/projected/dae38fc6-dc8c-4958-a470-69de89a0e908-kube-api-access-6xzsd\") pod \"dnsmasq-dns-76cb69558f-l5qnx\" (UID: \"dae38fc6-dc8c-4958-a470-69de89a0e908\") " pod="openstack/dnsmasq-dns-76cb69558f-l5qnx" Nov 27 08:34:58 crc kubenswrapper[4971]: I1127 08:34:58.070833 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dae38fc6-dc8c-4958-a470-69de89a0e908-dns-svc\") pod \"dnsmasq-dns-76cb69558f-l5qnx\" (UID: \"dae38fc6-dc8c-4958-a470-69de89a0e908\") " pod="openstack/dnsmasq-dns-76cb69558f-l5qnx" Nov 27 08:34:58 crc kubenswrapper[4971]: I1127 08:34:58.070909 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dae38fc6-dc8c-4958-a470-69de89a0e908-config\") pod \"dnsmasq-dns-76cb69558f-l5qnx\" (UID: \"dae38fc6-dc8c-4958-a470-69de89a0e908\") " pod="openstack/dnsmasq-dns-76cb69558f-l5qnx" Nov 27 08:34:58 crc kubenswrapper[4971]: I1127 08:34:58.070998 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xzsd\" (UniqueName: \"kubernetes.io/projected/dae38fc6-dc8c-4958-a470-69de89a0e908-kube-api-access-6xzsd\") pod \"dnsmasq-dns-76cb69558f-l5qnx\" (UID: \"dae38fc6-dc8c-4958-a470-69de89a0e908\") " pod="openstack/dnsmasq-dns-76cb69558f-l5qnx" Nov 27 08:34:58 crc kubenswrapper[4971]: I1127 08:34:58.072422 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dae38fc6-dc8c-4958-a470-69de89a0e908-config\") pod \"dnsmasq-dns-76cb69558f-l5qnx\" (UID: \"dae38fc6-dc8c-4958-a470-69de89a0e908\") " pod="openstack/dnsmasq-dns-76cb69558f-l5qnx" Nov 27 08:34:58 crc kubenswrapper[4971]: I1127 08:34:58.072419 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dae38fc6-dc8c-4958-a470-69de89a0e908-dns-svc\") pod \"dnsmasq-dns-76cb69558f-l5qnx\" (UID: \"dae38fc6-dc8c-4958-a470-69de89a0e908\") " pod="openstack/dnsmasq-dns-76cb69558f-l5qnx" Nov 27 08:34:58 crc kubenswrapper[4971]: I1127 08:34:58.094954 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xzsd\" (UniqueName: \"kubernetes.io/projected/dae38fc6-dc8c-4958-a470-69de89a0e908-kube-api-access-6xzsd\") pod \"dnsmasq-dns-76cb69558f-l5qnx\" (UID: \"dae38fc6-dc8c-4958-a470-69de89a0e908\") " pod="openstack/dnsmasq-dns-76cb69558f-l5qnx" Nov 27 08:34:58 crc kubenswrapper[4971]: I1127 08:34:58.182333 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76cb69558f-l5qnx" Nov 27 08:34:58 crc kubenswrapper[4971]: I1127 08:34:58.434466 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-76cb69558f-l5qnx"] Nov 27 08:34:58 crc kubenswrapper[4971]: I1127 08:34:58.840513 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 27 08:34:59 crc kubenswrapper[4971]: I1127 08:34:59.122686 4971 generic.go:334] "Generic (PLEG): container finished" podID="dae38fc6-dc8c-4958-a470-69de89a0e908" containerID="553f443adf450b65b4198bfee2afe73ceb17950e1d0742bf60d40cc687c6fd78" exitCode=0 Nov 27 08:34:59 crc kubenswrapper[4971]: I1127 08:34:59.122752 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76cb69558f-l5qnx" event={"ID":"dae38fc6-dc8c-4958-a470-69de89a0e908","Type":"ContainerDied","Data":"553f443adf450b65b4198bfee2afe73ceb17950e1d0742bf60d40cc687c6fd78"} Nov 27 08:34:59 crc kubenswrapper[4971]: I1127 08:34:59.122845 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76cb69558f-l5qnx" event={"ID":"dae38fc6-dc8c-4958-a470-69de89a0e908","Type":"ContainerStarted","Data":"bd56ed579974d7ef649501f3b2e65a865bd86914bb45581c8e367cb9abc6822f"} Nov 27 08:34:59 crc kubenswrapper[4971]: I1127 08:34:59.582284 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 27 08:35:00 crc kubenswrapper[4971]: I1127 08:35:00.136227 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76cb69558f-l5qnx" event={"ID":"dae38fc6-dc8c-4958-a470-69de89a0e908","Type":"ContainerStarted","Data":"219cf0599a216103fa5f9a3d6a8cd13bfd2bf0149a211f5f98cc4a53530ff558"} Nov 27 08:35:00 crc kubenswrapper[4971]: I1127 08:35:00.137634 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-76cb69558f-l5qnx" Nov 27 08:35:00 crc kubenswrapper[4971]: I1127 08:35:00.163202 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-76cb69558f-l5qnx" podStartSLOduration=3.163171055 podStartE2EDuration="3.163171055s" podCreationTimestamp="2025-11-27 08:34:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:35:00.157917598 +0000 UTC m=+6138.349961526" watchObservedRunningTime="2025-11-27 08:35:00.163171055 +0000 UTC m=+6138.355214973" Nov 27 08:35:00 crc kubenswrapper[4971]: I1127 08:35:00.822729 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="5398c358-10a0-4867-93bd-6b039b04584d" containerName="rabbitmq" containerID="cri-o://a891b44553b504dd182eb8a4d3c5a0bdb878ea43596d7f476d8d07aa87fb0025" gracePeriod=604799 Nov 27 08:35:01 crc kubenswrapper[4971]: I1127 08:35:01.728274 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="5398c358-10a0-4867-93bd-6b039b04584d" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.249:5672: connect: connection refused" Nov 27 08:35:01 crc kubenswrapper[4971]: I1127 08:35:01.847684 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="5b6c13cc-56c7-4f02-a51f-b6aab2e77525" containerName="rabbitmq" containerID="cri-o://ac16b3eb4879c86b49853a89ee78cc9d33a43ac3f6a5b04f2314442188b50f3d" gracePeriod=604798 Nov 27 08:35:02 crc kubenswrapper[4971]: I1127 08:35:02.018168 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="5b6c13cc-56c7-4f02-a51f-b6aab2e77525" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.250:5672: connect: connection refused" Nov 27 08:35:07 crc kubenswrapper[4971]: I1127 08:35:07.210105 4971 generic.go:334] "Generic (PLEG): container finished" podID="5398c358-10a0-4867-93bd-6b039b04584d" containerID="a891b44553b504dd182eb8a4d3c5a0bdb878ea43596d7f476d8d07aa87fb0025" exitCode=0 Nov 27 08:35:07 crc kubenswrapper[4971]: I1127 08:35:07.210225 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"5398c358-10a0-4867-93bd-6b039b04584d","Type":"ContainerDied","Data":"a891b44553b504dd182eb8a4d3c5a0bdb878ea43596d7f476d8d07aa87fb0025"} Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:07.477472 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:07.532651 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5398c358-10a0-4867-93bd-6b039b04584d-plugins-conf\") pod \"5398c358-10a0-4867-93bd-6b039b04584d\" (UID: \"5398c358-10a0-4867-93bd-6b039b04584d\") " Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:07.532948 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cd7e20d9-a658-41ab-97c4-78b17177d549\") pod \"5398c358-10a0-4867-93bd-6b039b04584d\" (UID: \"5398c358-10a0-4867-93bd-6b039b04584d\") " Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:07.532997 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5398c358-10a0-4867-93bd-6b039b04584d-rabbitmq-erlang-cookie\") pod \"5398c358-10a0-4867-93bd-6b039b04584d\" (UID: \"5398c358-10a0-4867-93bd-6b039b04584d\") " Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:07.533025 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5398c358-10a0-4867-93bd-6b039b04584d-rabbitmq-confd\") pod \"5398c358-10a0-4867-93bd-6b039b04584d\" (UID: \"5398c358-10a0-4867-93bd-6b039b04584d\") " Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:07.533045 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5398c358-10a0-4867-93bd-6b039b04584d-erlang-cookie-secret\") pod \"5398c358-10a0-4867-93bd-6b039b04584d\" (UID: \"5398c358-10a0-4867-93bd-6b039b04584d\") " Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:07.533122 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5398c358-10a0-4867-93bd-6b039b04584d-rabbitmq-plugins\") pod \"5398c358-10a0-4867-93bd-6b039b04584d\" (UID: \"5398c358-10a0-4867-93bd-6b039b04584d\") " Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:07.533144 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8npc2\" (UniqueName: \"kubernetes.io/projected/5398c358-10a0-4867-93bd-6b039b04584d-kube-api-access-8npc2\") pod \"5398c358-10a0-4867-93bd-6b039b04584d\" (UID: \"5398c358-10a0-4867-93bd-6b039b04584d\") " Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:07.533179 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5398c358-10a0-4867-93bd-6b039b04584d-pod-info\") pod \"5398c358-10a0-4867-93bd-6b039b04584d\" (UID: \"5398c358-10a0-4867-93bd-6b039b04584d\") " Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:07.533253 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5398c358-10a0-4867-93bd-6b039b04584d-server-conf\") pod \"5398c358-10a0-4867-93bd-6b039b04584d\" (UID: \"5398c358-10a0-4867-93bd-6b039b04584d\") " Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:07.533568 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5398c358-10a0-4867-93bd-6b039b04584d-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "5398c358-10a0-4867-93bd-6b039b04584d" (UID: "5398c358-10a0-4867-93bd-6b039b04584d"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:07.534417 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5398c358-10a0-4867-93bd-6b039b04584d-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "5398c358-10a0-4867-93bd-6b039b04584d" (UID: "5398c358-10a0-4867-93bd-6b039b04584d"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:07.536311 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5398c358-10a0-4867-93bd-6b039b04584d-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "5398c358-10a0-4867-93bd-6b039b04584d" (UID: "5398c358-10a0-4867-93bd-6b039b04584d"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:07.546839 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5398c358-10a0-4867-93bd-6b039b04584d-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "5398c358-10a0-4867-93bd-6b039b04584d" (UID: "5398c358-10a0-4867-93bd-6b039b04584d"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:07.547114 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5398c358-10a0-4867-93bd-6b039b04584d-kube-api-access-8npc2" (OuterVolumeSpecName: "kube-api-access-8npc2") pod "5398c358-10a0-4867-93bd-6b039b04584d" (UID: "5398c358-10a0-4867-93bd-6b039b04584d"). InnerVolumeSpecName "kube-api-access-8npc2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:07.564609 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cd7e20d9-a658-41ab-97c4-78b17177d549" (OuterVolumeSpecName: "persistence") pod "5398c358-10a0-4867-93bd-6b039b04584d" (UID: "5398c358-10a0-4867-93bd-6b039b04584d"). InnerVolumeSpecName "pvc-cd7e20d9-a658-41ab-97c4-78b17177d549". PluginName "kubernetes.io/csi", VolumeGidValue "" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:07.574733 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/5398c358-10a0-4867-93bd-6b039b04584d-pod-info" (OuterVolumeSpecName: "pod-info") pod "5398c358-10a0-4867-93bd-6b039b04584d" (UID: "5398c358-10a0-4867-93bd-6b039b04584d"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:07.590709 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5398c358-10a0-4867-93bd-6b039b04584d-server-conf" (OuterVolumeSpecName: "server-conf") pod "5398c358-10a0-4867-93bd-6b039b04584d" (UID: "5398c358-10a0-4867-93bd-6b039b04584d"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:07.633468 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5398c358-10a0-4867-93bd-6b039b04584d-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "5398c358-10a0-4867-93bd-6b039b04584d" (UID: "5398c358-10a0-4867-93bd-6b039b04584d"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:07.634770 4971 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5398c358-10a0-4867-93bd-6b039b04584d-server-conf\") on node \"crc\" DevicePath \"\"" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:07.634787 4971 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5398c358-10a0-4867-93bd-6b039b04584d-plugins-conf\") on node \"crc\" DevicePath \"\"" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:07.634833 4971 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-cd7e20d9-a658-41ab-97c4-78b17177d549\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cd7e20d9-a658-41ab-97c4-78b17177d549\") on node \"crc\" " Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:07.634845 4971 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5398c358-10a0-4867-93bd-6b039b04584d-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:07.634857 4971 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5398c358-10a0-4867-93bd-6b039b04584d-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:07.634868 4971 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5398c358-10a0-4867-93bd-6b039b04584d-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:07.634877 4971 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5398c358-10a0-4867-93bd-6b039b04584d-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:07.634889 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8npc2\" (UniqueName: \"kubernetes.io/projected/5398c358-10a0-4867-93bd-6b039b04584d-kube-api-access-8npc2\") on node \"crc\" DevicePath \"\"" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:07.634896 4971 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5398c358-10a0-4867-93bd-6b039b04584d-pod-info\") on node \"crc\" DevicePath \"\"" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:07.665349 4971 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:07.665618 4971 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-cd7e20d9-a658-41ab-97c4-78b17177d549" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cd7e20d9-a658-41ab-97c4-78b17177d549") on node "crc" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:07.736193 4971 reconciler_common.go:293] "Volume detached for volume \"pvc-cd7e20d9-a658-41ab-97c4-78b17177d549\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cd7e20d9-a658-41ab-97c4-78b17177d549\") on node \"crc\" DevicePath \"\"" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.184855 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-76cb69558f-l5qnx" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.244009 4971 generic.go:334] "Generic (PLEG): container finished" podID="5b6c13cc-56c7-4f02-a51f-b6aab2e77525" containerID="ac16b3eb4879c86b49853a89ee78cc9d33a43ac3f6a5b04f2314442188b50f3d" exitCode=0 Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.244101 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"5b6c13cc-56c7-4f02-a51f-b6aab2e77525","Type":"ContainerDied","Data":"ac16b3eb4879c86b49853a89ee78cc9d33a43ac3f6a5b04f2314442188b50f3d"} Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.276920 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"5398c358-10a0-4867-93bd-6b039b04584d","Type":"ContainerDied","Data":"0338b1101e518f5d10a70b939e91e7242af8c45949b6a4ea2e8bd791ee801c83"} Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.276992 4971 scope.go:117] "RemoveContainer" containerID="a891b44553b504dd182eb8a4d3c5a0bdb878ea43596d7f476d8d07aa87fb0025" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.277211 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.282460 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86766db695-js9qx"] Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.282802 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-86766db695-js9qx" podUID="5e733141-e096-49ff-8b70-6d73873ae19d" containerName="dnsmasq-dns" containerID="cri-o://8191b26d822be3eb20cbc202685741ab47ffb1c7f6ff76734f7afa06d3d9a6a0" gracePeriod=10 Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.309741 4971 scope.go:117] "RemoveContainer" containerID="f1d968e923d782f250dc5cbf7fd29f72ed832d86378bd23727043b538a1534d2" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.356628 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.374052 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.406728 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Nov 27 08:35:08 crc kubenswrapper[4971]: E1127 08:35:08.407627 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5398c358-10a0-4867-93bd-6b039b04584d" containerName="setup-container" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.407757 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="5398c358-10a0-4867-93bd-6b039b04584d" containerName="setup-container" Nov 27 08:35:08 crc kubenswrapper[4971]: E1127 08:35:08.407833 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5398c358-10a0-4867-93bd-6b039b04584d" containerName="rabbitmq" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.407907 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="5398c358-10a0-4867-93bd-6b039b04584d" containerName="rabbitmq" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.408131 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="5398c358-10a0-4867-93bd-6b039b04584d" containerName="rabbitmq" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.420735 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.420944 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.427953 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.428179 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.428382 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-f4zwm" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.428508 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.429442 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.551563 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/90837c23-3be4-4046-8b88-b328e3ddd9e9-server-conf\") pod \"rabbitmq-server-0\" (UID: \"90837c23-3be4-4046-8b88-b328e3ddd9e9\") " pod="openstack/rabbitmq-server-0" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.551627 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/90837c23-3be4-4046-8b88-b328e3ddd9e9-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"90837c23-3be4-4046-8b88-b328e3ddd9e9\") " pod="openstack/rabbitmq-server-0" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.551659 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/90837c23-3be4-4046-8b88-b328e3ddd9e9-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"90837c23-3be4-4046-8b88-b328e3ddd9e9\") " pod="openstack/rabbitmq-server-0" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.551697 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/90837c23-3be4-4046-8b88-b328e3ddd9e9-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"90837c23-3be4-4046-8b88-b328e3ddd9e9\") " pod="openstack/rabbitmq-server-0" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.551753 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-cd7e20d9-a658-41ab-97c4-78b17177d549\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cd7e20d9-a658-41ab-97c4-78b17177d549\") pod \"rabbitmq-server-0\" (UID: \"90837c23-3be4-4046-8b88-b328e3ddd9e9\") " pod="openstack/rabbitmq-server-0" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.551782 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/90837c23-3be4-4046-8b88-b328e3ddd9e9-pod-info\") pod \"rabbitmq-server-0\" (UID: \"90837c23-3be4-4046-8b88-b328e3ddd9e9\") " pod="openstack/rabbitmq-server-0" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.551804 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/90837c23-3be4-4046-8b88-b328e3ddd9e9-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"90837c23-3be4-4046-8b88-b328e3ddd9e9\") " pod="openstack/rabbitmq-server-0" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.551824 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/90837c23-3be4-4046-8b88-b328e3ddd9e9-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"90837c23-3be4-4046-8b88-b328e3ddd9e9\") " pod="openstack/rabbitmq-server-0" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.551842 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-smws9\" (UniqueName: \"kubernetes.io/projected/90837c23-3be4-4046-8b88-b328e3ddd9e9-kube-api-access-smws9\") pod \"rabbitmq-server-0\" (UID: \"90837c23-3be4-4046-8b88-b328e3ddd9e9\") " pod="openstack/rabbitmq-server-0" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.565006 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5398c358-10a0-4867-93bd-6b039b04584d" path="/var/lib/kubelet/pods/5398c358-10a0-4867-93bd-6b039b04584d/volumes" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.622647 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.653651 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-rabbitmq-plugins\") pod \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\" (UID: \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\") " Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.654300 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-rabbitmq-erlang-cookie\") pod \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\" (UID: \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\") " Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.654530 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-490752b4-17e2-4c20-8854-ab1db27ae229\") pod \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\" (UID: \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\") " Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.654629 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-pod-info\") pod \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\" (UID: \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\") " Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.654687 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5g6c5\" (UniqueName: \"kubernetes.io/projected/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-kube-api-access-5g6c5\") pod \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\" (UID: \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\") " Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.654755 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-rabbitmq-confd\") pod \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\" (UID: \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\") " Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.654822 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-erlang-cookie-secret\") pod \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\" (UID: \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\") " Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.654879 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-plugins-conf\") pod \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\" (UID: \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\") " Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.654967 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-server-conf\") pod \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\" (UID: \"5b6c13cc-56c7-4f02-a51f-b6aab2e77525\") " Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.654977 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "5b6c13cc-56c7-4f02-a51f-b6aab2e77525" (UID: "5b6c13cc-56c7-4f02-a51f-b6aab2e77525"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.655281 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/90837c23-3be4-4046-8b88-b328e3ddd9e9-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"90837c23-3be4-4046-8b88-b328e3ddd9e9\") " pod="openstack/rabbitmq-server-0" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.655402 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-cd7e20d9-a658-41ab-97c4-78b17177d549\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cd7e20d9-a658-41ab-97c4-78b17177d549\") pod \"rabbitmq-server-0\" (UID: \"90837c23-3be4-4046-8b88-b328e3ddd9e9\") " pod="openstack/rabbitmq-server-0" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.655449 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/90837c23-3be4-4046-8b88-b328e3ddd9e9-pod-info\") pod \"rabbitmq-server-0\" (UID: \"90837c23-3be4-4046-8b88-b328e3ddd9e9\") " pod="openstack/rabbitmq-server-0" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.655486 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/90837c23-3be4-4046-8b88-b328e3ddd9e9-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"90837c23-3be4-4046-8b88-b328e3ddd9e9\") " pod="openstack/rabbitmq-server-0" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.655526 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/90837c23-3be4-4046-8b88-b328e3ddd9e9-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"90837c23-3be4-4046-8b88-b328e3ddd9e9\") " pod="openstack/rabbitmq-server-0" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.655823 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-smws9\" (UniqueName: \"kubernetes.io/projected/90837c23-3be4-4046-8b88-b328e3ddd9e9-kube-api-access-smws9\") pod \"rabbitmq-server-0\" (UID: \"90837c23-3be4-4046-8b88-b328e3ddd9e9\") " pod="openstack/rabbitmq-server-0" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.655925 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/90837c23-3be4-4046-8b88-b328e3ddd9e9-server-conf\") pod \"rabbitmq-server-0\" (UID: \"90837c23-3be4-4046-8b88-b328e3ddd9e9\") " pod="openstack/rabbitmq-server-0" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.655970 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/90837c23-3be4-4046-8b88-b328e3ddd9e9-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"90837c23-3be4-4046-8b88-b328e3ddd9e9\") " pod="openstack/rabbitmq-server-0" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.656047 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/90837c23-3be4-4046-8b88-b328e3ddd9e9-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"90837c23-3be4-4046-8b88-b328e3ddd9e9\") " pod="openstack/rabbitmq-server-0" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.656114 4971 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.656840 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/90837c23-3be4-4046-8b88-b328e3ddd9e9-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"90837c23-3be4-4046-8b88-b328e3ddd9e9\") " pod="openstack/rabbitmq-server-0" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.657707 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "5b6c13cc-56c7-4f02-a51f-b6aab2e77525" (UID: "5b6c13cc-56c7-4f02-a51f-b6aab2e77525"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.658272 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "5b6c13cc-56c7-4f02-a51f-b6aab2e77525" (UID: "5b6c13cc-56c7-4f02-a51f-b6aab2e77525"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.666037 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/90837c23-3be4-4046-8b88-b328e3ddd9e9-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"90837c23-3be4-4046-8b88-b328e3ddd9e9\") " pod="openstack/rabbitmq-server-0" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.666994 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/90837c23-3be4-4046-8b88-b328e3ddd9e9-server-conf\") pod \"rabbitmq-server-0\" (UID: \"90837c23-3be4-4046-8b88-b328e3ddd9e9\") " pod="openstack/rabbitmq-server-0" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.668998 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/90837c23-3be4-4046-8b88-b328e3ddd9e9-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"90837c23-3be4-4046-8b88-b328e3ddd9e9\") " pod="openstack/rabbitmq-server-0" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.670007 4971 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.670892 4971 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-cd7e20d9-a658-41ab-97c4-78b17177d549\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cd7e20d9-a658-41ab-97c4-78b17177d549\") pod \"rabbitmq-server-0\" (UID: \"90837c23-3be4-4046-8b88-b328e3ddd9e9\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/7b1a9f31da3f0acffca006ca5b1627887bf4693f5bc53509f19c460deb4900cb/globalmount\"" pod="openstack/rabbitmq-server-0" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.671968 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/90837c23-3be4-4046-8b88-b328e3ddd9e9-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"90837c23-3be4-4046-8b88-b328e3ddd9e9\") " pod="openstack/rabbitmq-server-0" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.678120 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/90837c23-3be4-4046-8b88-b328e3ddd9e9-pod-info\") pod \"rabbitmq-server-0\" (UID: \"90837c23-3be4-4046-8b88-b328e3ddd9e9\") " pod="openstack/rabbitmq-server-0" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.678864 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "5b6c13cc-56c7-4f02-a51f-b6aab2e77525" (UID: "5b6c13cc-56c7-4f02-a51f-b6aab2e77525"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.679013 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-pod-info" (OuterVolumeSpecName: "pod-info") pod "5b6c13cc-56c7-4f02-a51f-b6aab2e77525" (UID: "5b6c13cc-56c7-4f02-a51f-b6aab2e77525"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.680051 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/90837c23-3be4-4046-8b88-b328e3ddd9e9-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"90837c23-3be4-4046-8b88-b328e3ddd9e9\") " pod="openstack/rabbitmq-server-0" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.688228 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-server-conf" (OuterVolumeSpecName: "server-conf") pod "5b6c13cc-56c7-4f02-a51f-b6aab2e77525" (UID: "5b6c13cc-56c7-4f02-a51f-b6aab2e77525"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.691683 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-kube-api-access-5g6c5" (OuterVolumeSpecName: "kube-api-access-5g6c5") pod "5b6c13cc-56c7-4f02-a51f-b6aab2e77525" (UID: "5b6c13cc-56c7-4f02-a51f-b6aab2e77525"). InnerVolumeSpecName "kube-api-access-5g6c5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.702207 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-490752b4-17e2-4c20-8854-ab1db27ae229" (OuterVolumeSpecName: "persistence") pod "5b6c13cc-56c7-4f02-a51f-b6aab2e77525" (UID: "5b6c13cc-56c7-4f02-a51f-b6aab2e77525"). InnerVolumeSpecName "pvc-490752b4-17e2-4c20-8854-ab1db27ae229". PluginName "kubernetes.io/csi", VolumeGidValue "" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.705712 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-smws9\" (UniqueName: \"kubernetes.io/projected/90837c23-3be4-4046-8b88-b328e3ddd9e9-kube-api-access-smws9\") pod \"rabbitmq-server-0\" (UID: \"90837c23-3be4-4046-8b88-b328e3ddd9e9\") " pod="openstack/rabbitmq-server-0" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.750910 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-cd7e20d9-a658-41ab-97c4-78b17177d549\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cd7e20d9-a658-41ab-97c4-78b17177d549\") pod \"rabbitmq-server-0\" (UID: \"90837c23-3be4-4046-8b88-b328e3ddd9e9\") " pod="openstack/rabbitmq-server-0" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.759326 4971 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.759400 4971 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-490752b4-17e2-4c20-8854-ab1db27ae229\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-490752b4-17e2-4c20-8854-ab1db27ae229\") on node \"crc\" " Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.759417 4971 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-pod-info\") on node \"crc\" DevicePath \"\"" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.759427 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5g6c5\" (UniqueName: \"kubernetes.io/projected/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-kube-api-access-5g6c5\") on node \"crc\" DevicePath \"\"" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.759443 4971 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.759453 4971 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-plugins-conf\") on node \"crc\" DevicePath \"\"" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.759461 4971 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-server-conf\") on node \"crc\" DevicePath \"\"" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.784289 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.804141 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "5b6c13cc-56c7-4f02-a51f-b6aab2e77525" (UID: "5b6c13cc-56c7-4f02-a51f-b6aab2e77525"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.832810 4971 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.833029 4971 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-490752b4-17e2-4c20-8854-ab1db27ae229" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-490752b4-17e2-4c20-8854-ab1db27ae229") on node "crc" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.861465 4971 reconciler_common.go:293] "Volume detached for volume \"pvc-490752b4-17e2-4c20-8854-ab1db27ae229\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-490752b4-17e2-4c20-8854-ab1db27ae229\") on node \"crc\" DevicePath \"\"" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.861523 4971 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5b6c13cc-56c7-4f02-a51f-b6aab2e77525-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.862445 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86766db695-js9qx" Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.962824 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e733141-e096-49ff-8b70-6d73873ae19d-config\") pod \"5e733141-e096-49ff-8b70-6d73873ae19d\" (UID: \"5e733141-e096-49ff-8b70-6d73873ae19d\") " Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.963045 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m47zk\" (UniqueName: \"kubernetes.io/projected/5e733141-e096-49ff-8b70-6d73873ae19d-kube-api-access-m47zk\") pod \"5e733141-e096-49ff-8b70-6d73873ae19d\" (UID: \"5e733141-e096-49ff-8b70-6d73873ae19d\") " Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.963136 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e733141-e096-49ff-8b70-6d73873ae19d-dns-svc\") pod \"5e733141-e096-49ff-8b70-6d73873ae19d\" (UID: \"5e733141-e096-49ff-8b70-6d73873ae19d\") " Nov 27 08:35:08 crc kubenswrapper[4971]: I1127 08:35:08.973137 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e733141-e096-49ff-8b70-6d73873ae19d-kube-api-access-m47zk" (OuterVolumeSpecName: "kube-api-access-m47zk") pod "5e733141-e096-49ff-8b70-6d73873ae19d" (UID: "5e733141-e096-49ff-8b70-6d73873ae19d"). InnerVolumeSpecName "kube-api-access-m47zk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.012021 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e733141-e096-49ff-8b70-6d73873ae19d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5e733141-e096-49ff-8b70-6d73873ae19d" (UID: "5e733141-e096-49ff-8b70-6d73873ae19d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.012659 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e733141-e096-49ff-8b70-6d73873ae19d-config" (OuterVolumeSpecName: "config") pod "5e733141-e096-49ff-8b70-6d73873ae19d" (UID: "5e733141-e096-49ff-8b70-6d73873ae19d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.064792 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e733141-e096-49ff-8b70-6d73873ae19d-config\") on node \"crc\" DevicePath \"\"" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.064835 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m47zk\" (UniqueName: \"kubernetes.io/projected/5e733141-e096-49ff-8b70-6d73873ae19d-kube-api-access-m47zk\") on node \"crc\" DevicePath \"\"" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.064851 4971 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e733141-e096-49ff-8b70-6d73873ae19d-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.270974 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.297953 4971 generic.go:334] "Generic (PLEG): container finished" podID="5e733141-e096-49ff-8b70-6d73873ae19d" containerID="8191b26d822be3eb20cbc202685741ab47ffb1c7f6ff76734f7afa06d3d9a6a0" exitCode=0 Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.298029 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86766db695-js9qx" event={"ID":"5e733141-e096-49ff-8b70-6d73873ae19d","Type":"ContainerDied","Data":"8191b26d822be3eb20cbc202685741ab47ffb1c7f6ff76734f7afa06d3d9a6a0"} Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.298096 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86766db695-js9qx" event={"ID":"5e733141-e096-49ff-8b70-6d73873ae19d","Type":"ContainerDied","Data":"a04280c558ed9c12b9959855f37b67e8639ce53315f89bfded0c9b5aca3e139e"} Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.298099 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86766db695-js9qx" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.298129 4971 scope.go:117] "RemoveContainer" containerID="8191b26d822be3eb20cbc202685741ab47ffb1c7f6ff76734f7afa06d3d9a6a0" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.304984 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"5b6c13cc-56c7-4f02-a51f-b6aab2e77525","Type":"ContainerDied","Data":"50eac806b03094ba83e095ccb48f3328c5e85ec5484a101912570fbceae229e2"} Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.305120 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.363236 4971 scope.go:117] "RemoveContainer" containerID="29e74990cf246a65b3b32392144c6b545d89182f0a786624446f71d002a71c12" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.405649 4971 scope.go:117] "RemoveContainer" containerID="8191b26d822be3eb20cbc202685741ab47ffb1c7f6ff76734f7afa06d3d9a6a0" Nov 27 08:35:09 crc kubenswrapper[4971]: E1127 08:35:09.406275 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8191b26d822be3eb20cbc202685741ab47ffb1c7f6ff76734f7afa06d3d9a6a0\": container with ID starting with 8191b26d822be3eb20cbc202685741ab47ffb1c7f6ff76734f7afa06d3d9a6a0 not found: ID does not exist" containerID="8191b26d822be3eb20cbc202685741ab47ffb1c7f6ff76734f7afa06d3d9a6a0" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.406317 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8191b26d822be3eb20cbc202685741ab47ffb1c7f6ff76734f7afa06d3d9a6a0"} err="failed to get container status \"8191b26d822be3eb20cbc202685741ab47ffb1c7f6ff76734f7afa06d3d9a6a0\": rpc error: code = NotFound desc = could not find container \"8191b26d822be3eb20cbc202685741ab47ffb1c7f6ff76734f7afa06d3d9a6a0\": container with ID starting with 8191b26d822be3eb20cbc202685741ab47ffb1c7f6ff76734f7afa06d3d9a6a0 not found: ID does not exist" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.406352 4971 scope.go:117] "RemoveContainer" containerID="29e74990cf246a65b3b32392144c6b545d89182f0a786624446f71d002a71c12" Nov 27 08:35:09 crc kubenswrapper[4971]: E1127 08:35:09.406821 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"29e74990cf246a65b3b32392144c6b545d89182f0a786624446f71d002a71c12\": container with ID starting with 29e74990cf246a65b3b32392144c6b545d89182f0a786624446f71d002a71c12 not found: ID does not exist" containerID="29e74990cf246a65b3b32392144c6b545d89182f0a786624446f71d002a71c12" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.406869 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29e74990cf246a65b3b32392144c6b545d89182f0a786624446f71d002a71c12"} err="failed to get container status \"29e74990cf246a65b3b32392144c6b545d89182f0a786624446f71d002a71c12\": rpc error: code = NotFound desc = could not find container \"29e74990cf246a65b3b32392144c6b545d89182f0a786624446f71d002a71c12\": container with ID starting with 29e74990cf246a65b3b32392144c6b545d89182f0a786624446f71d002a71c12 not found: ID does not exist" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.406905 4971 scope.go:117] "RemoveContainer" containerID="ac16b3eb4879c86b49853a89ee78cc9d33a43ac3f6a5b04f2314442188b50f3d" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.458044 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86766db695-js9qx"] Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.459324 4971 scope.go:117] "RemoveContainer" containerID="95155c3d100e3fb70f6a985c53fa95c4b2e7540bbe073ec542a3e0164f1da148" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.466158 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86766db695-js9qx"] Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.477455 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.483789 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.500800 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 27 08:35:09 crc kubenswrapper[4971]: E1127 08:35:09.501227 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e733141-e096-49ff-8b70-6d73873ae19d" containerName="init" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.501246 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e733141-e096-49ff-8b70-6d73873ae19d" containerName="init" Nov 27 08:35:09 crc kubenswrapper[4971]: E1127 08:35:09.501266 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e733141-e096-49ff-8b70-6d73873ae19d" containerName="dnsmasq-dns" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.501275 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e733141-e096-49ff-8b70-6d73873ae19d" containerName="dnsmasq-dns" Nov 27 08:35:09 crc kubenswrapper[4971]: E1127 08:35:09.501292 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b6c13cc-56c7-4f02-a51f-b6aab2e77525" containerName="setup-container" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.501299 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b6c13cc-56c7-4f02-a51f-b6aab2e77525" containerName="setup-container" Nov 27 08:35:09 crc kubenswrapper[4971]: E1127 08:35:09.501331 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b6c13cc-56c7-4f02-a51f-b6aab2e77525" containerName="rabbitmq" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.501337 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b6c13cc-56c7-4f02-a51f-b6aab2e77525" containerName="rabbitmq" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.501501 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b6c13cc-56c7-4f02-a51f-b6aab2e77525" containerName="rabbitmq" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.501547 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e733141-e096-49ff-8b70-6d73873ae19d" containerName="dnsmasq-dns" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.502627 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.505779 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.505902 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.505958 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-jj442" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.505779 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.505906 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.518762 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.572981 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/15383cc2-2672-447c-8556-62f1b78de474-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"15383cc2-2672-447c-8556-62f1b78de474\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.573066 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-490752b4-17e2-4c20-8854-ab1db27ae229\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-490752b4-17e2-4c20-8854-ab1db27ae229\") pod \"rabbitmq-cell1-server-0\" (UID: \"15383cc2-2672-447c-8556-62f1b78de474\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.573101 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/15383cc2-2672-447c-8556-62f1b78de474-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"15383cc2-2672-447c-8556-62f1b78de474\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.573130 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-452c7\" (UniqueName: \"kubernetes.io/projected/15383cc2-2672-447c-8556-62f1b78de474-kube-api-access-452c7\") pod \"rabbitmq-cell1-server-0\" (UID: \"15383cc2-2672-447c-8556-62f1b78de474\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.573170 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/15383cc2-2672-447c-8556-62f1b78de474-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"15383cc2-2672-447c-8556-62f1b78de474\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.573192 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/15383cc2-2672-447c-8556-62f1b78de474-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"15383cc2-2672-447c-8556-62f1b78de474\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.573212 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/15383cc2-2672-447c-8556-62f1b78de474-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"15383cc2-2672-447c-8556-62f1b78de474\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.573248 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/15383cc2-2672-447c-8556-62f1b78de474-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"15383cc2-2672-447c-8556-62f1b78de474\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.573271 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/15383cc2-2672-447c-8556-62f1b78de474-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"15383cc2-2672-447c-8556-62f1b78de474\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.674552 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/15383cc2-2672-447c-8556-62f1b78de474-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"15383cc2-2672-447c-8556-62f1b78de474\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.674616 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/15383cc2-2672-447c-8556-62f1b78de474-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"15383cc2-2672-447c-8556-62f1b78de474\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.674664 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/15383cc2-2672-447c-8556-62f1b78de474-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"15383cc2-2672-447c-8556-62f1b78de474\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.674713 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-490752b4-17e2-4c20-8854-ab1db27ae229\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-490752b4-17e2-4c20-8854-ab1db27ae229\") pod \"rabbitmq-cell1-server-0\" (UID: \"15383cc2-2672-447c-8556-62f1b78de474\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.674740 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/15383cc2-2672-447c-8556-62f1b78de474-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"15383cc2-2672-447c-8556-62f1b78de474\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.674765 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-452c7\" (UniqueName: \"kubernetes.io/projected/15383cc2-2672-447c-8556-62f1b78de474-kube-api-access-452c7\") pod \"rabbitmq-cell1-server-0\" (UID: \"15383cc2-2672-447c-8556-62f1b78de474\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.674798 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/15383cc2-2672-447c-8556-62f1b78de474-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"15383cc2-2672-447c-8556-62f1b78de474\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.674817 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/15383cc2-2672-447c-8556-62f1b78de474-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"15383cc2-2672-447c-8556-62f1b78de474\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.674838 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/15383cc2-2672-447c-8556-62f1b78de474-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"15383cc2-2672-447c-8556-62f1b78de474\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.675592 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/15383cc2-2672-447c-8556-62f1b78de474-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"15383cc2-2672-447c-8556-62f1b78de474\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.675782 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/15383cc2-2672-447c-8556-62f1b78de474-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"15383cc2-2672-447c-8556-62f1b78de474\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.676455 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/15383cc2-2672-447c-8556-62f1b78de474-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"15383cc2-2672-447c-8556-62f1b78de474\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.676693 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/15383cc2-2672-447c-8556-62f1b78de474-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"15383cc2-2672-447c-8556-62f1b78de474\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.678514 4971 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.678555 4971 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-490752b4-17e2-4c20-8854-ab1db27ae229\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-490752b4-17e2-4c20-8854-ab1db27ae229\") pod \"rabbitmq-cell1-server-0\" (UID: \"15383cc2-2672-447c-8556-62f1b78de474\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/a55fc649a39f621dfcc767b4b595ecc4fa4f3cc28a66de70c3352c90166a0312/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.679112 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/15383cc2-2672-447c-8556-62f1b78de474-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"15383cc2-2672-447c-8556-62f1b78de474\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.679592 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/15383cc2-2672-447c-8556-62f1b78de474-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"15383cc2-2672-447c-8556-62f1b78de474\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.680139 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/15383cc2-2672-447c-8556-62f1b78de474-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"15383cc2-2672-447c-8556-62f1b78de474\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.697465 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-452c7\" (UniqueName: \"kubernetes.io/projected/15383cc2-2672-447c-8556-62f1b78de474-kube-api-access-452c7\") pod \"rabbitmq-cell1-server-0\" (UID: \"15383cc2-2672-447c-8556-62f1b78de474\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.716614 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-490752b4-17e2-4c20-8854-ab1db27ae229\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-490752b4-17e2-4c20-8854-ab1db27ae229\") pod \"rabbitmq-cell1-server-0\" (UID: \"15383cc2-2672-447c-8556-62f1b78de474\") " pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:35:09 crc kubenswrapper[4971]: I1127 08:35:09.825273 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:35:10 crc kubenswrapper[4971]: I1127 08:35:10.233666 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 27 08:35:10 crc kubenswrapper[4971]: I1127 08:35:10.317387 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"90837c23-3be4-4046-8b88-b328e3ddd9e9","Type":"ContainerStarted","Data":"a8f1ce09e13332110d307718cc8e85fd3e7dd28707ef988207ecf514102b89ad"} Nov 27 08:35:10 crc kubenswrapper[4971]: I1127 08:35:10.319489 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"15383cc2-2672-447c-8556-62f1b78de474","Type":"ContainerStarted","Data":"d837bb4d565f666489f175f8e9d062454c7836d813fa3bf5538c3e3d9587d53f"} Nov 27 08:35:10 crc kubenswrapper[4971]: I1127 08:35:10.567364 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b6c13cc-56c7-4f02-a51f-b6aab2e77525" path="/var/lib/kubelet/pods/5b6c13cc-56c7-4f02-a51f-b6aab2e77525/volumes" Nov 27 08:35:10 crc kubenswrapper[4971]: I1127 08:35:10.568337 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e733141-e096-49ff-8b70-6d73873ae19d" path="/var/lib/kubelet/pods/5e733141-e096-49ff-8b70-6d73873ae19d/volumes" Nov 27 08:35:11 crc kubenswrapper[4971]: I1127 08:35:11.329892 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"90837c23-3be4-4046-8b88-b328e3ddd9e9","Type":"ContainerStarted","Data":"c8780683eb037ed556575d4364c2f1de61786120f797d6a27265235673eb687b"} Nov 27 08:35:12 crc kubenswrapper[4971]: I1127 08:35:12.340273 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"15383cc2-2672-447c-8556-62f1b78de474","Type":"ContainerStarted","Data":"ee903146bf6a46c159669f5a6f25e8ff3c4c867aaee224201bb8668d81db4a58"} Nov 27 08:35:26 crc kubenswrapper[4971]: I1127 08:35:26.413130 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 08:35:26 crc kubenswrapper[4971]: I1127 08:35:26.413932 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 08:35:43 crc kubenswrapper[4971]: I1127 08:35:43.653747 4971 generic.go:334] "Generic (PLEG): container finished" podID="90837c23-3be4-4046-8b88-b328e3ddd9e9" containerID="c8780683eb037ed556575d4364c2f1de61786120f797d6a27265235673eb687b" exitCode=0 Nov 27 08:35:43 crc kubenswrapper[4971]: I1127 08:35:43.653872 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"90837c23-3be4-4046-8b88-b328e3ddd9e9","Type":"ContainerDied","Data":"c8780683eb037ed556575d4364c2f1de61786120f797d6a27265235673eb687b"} Nov 27 08:35:44 crc kubenswrapper[4971]: I1127 08:35:44.666874 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"90837c23-3be4-4046-8b88-b328e3ddd9e9","Type":"ContainerStarted","Data":"2b23b9eb252a2d359e1e86b2cbee76b1e75e9e1030aee3c68bc2a4611604bdbc"} Nov 27 08:35:44 crc kubenswrapper[4971]: I1127 08:35:44.667687 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Nov 27 08:35:44 crc kubenswrapper[4971]: I1127 08:35:44.669093 4971 generic.go:334] "Generic (PLEG): container finished" podID="15383cc2-2672-447c-8556-62f1b78de474" containerID="ee903146bf6a46c159669f5a6f25e8ff3c4c867aaee224201bb8668d81db4a58" exitCode=0 Nov 27 08:35:44 crc kubenswrapper[4971]: I1127 08:35:44.669137 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"15383cc2-2672-447c-8556-62f1b78de474","Type":"ContainerDied","Data":"ee903146bf6a46c159669f5a6f25e8ff3c4c867aaee224201bb8668d81db4a58"} Nov 27 08:35:44 crc kubenswrapper[4971]: I1127 08:35:44.706500 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=36.706471108 podStartE2EDuration="36.706471108s" podCreationTimestamp="2025-11-27 08:35:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:35:44.691905038 +0000 UTC m=+6182.883948956" watchObservedRunningTime="2025-11-27 08:35:44.706471108 +0000 UTC m=+6182.898515026" Nov 27 08:35:45 crc kubenswrapper[4971]: I1127 08:35:45.679715 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"15383cc2-2672-447c-8556-62f1b78de474","Type":"ContainerStarted","Data":"3fdc25fdf0380db55b92e40ad25c5bfb4652f4476179bfdde341a77cdf89b681"} Nov 27 08:35:45 crc kubenswrapper[4971]: I1127 08:35:45.680710 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:35:45 crc kubenswrapper[4971]: I1127 08:35:45.718645 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.718620533 podStartE2EDuration="36.718620533s" podCreationTimestamp="2025-11-27 08:35:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:35:45.713928881 +0000 UTC m=+6183.905972819" watchObservedRunningTime="2025-11-27 08:35:45.718620533 +0000 UTC m=+6183.910664451" Nov 27 08:35:56 crc kubenswrapper[4971]: I1127 08:35:56.413495 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 08:35:56 crc kubenswrapper[4971]: I1127 08:35:56.414404 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 08:35:56 crc kubenswrapper[4971]: I1127 08:35:56.414476 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 08:35:56 crc kubenswrapper[4971]: I1127 08:35:56.415524 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c5fdcd7a26114da98d92fa593d9bd5735d2520f363dead48d0a133e41557b50b"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 08:35:56 crc kubenswrapper[4971]: I1127 08:35:56.415637 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://c5fdcd7a26114da98d92fa593d9bd5735d2520f363dead48d0a133e41557b50b" gracePeriod=600 Nov 27 08:35:56 crc kubenswrapper[4971]: I1127 08:35:56.770570 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="c5fdcd7a26114da98d92fa593d9bd5735d2520f363dead48d0a133e41557b50b" exitCode=0 Nov 27 08:35:56 crc kubenswrapper[4971]: I1127 08:35:56.770974 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"c5fdcd7a26114da98d92fa593d9bd5735d2520f363dead48d0a133e41557b50b"} Nov 27 08:35:56 crc kubenswrapper[4971]: I1127 08:35:56.771027 4971 scope.go:117] "RemoveContainer" containerID="93cd60eaf9341477b0a46d5e899a633f82782eec17c2d7bb6388b091c64ffa3a" Nov 27 08:35:57 crc kubenswrapper[4971]: I1127 08:35:57.782938 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"8779d2c056ac81dbc96266ed24a74e46ca57847727d1c7a9ce34603cabd43c6d"} Nov 27 08:35:58 crc kubenswrapper[4971]: I1127 08:35:58.787569 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Nov 27 08:35:59 crc kubenswrapper[4971]: I1127 08:35:59.829868 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Nov 27 08:36:11 crc kubenswrapper[4971]: I1127 08:36:11.634804 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-1-default"] Nov 27 08:36:11 crc kubenswrapper[4971]: I1127 08:36:11.636973 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Nov 27 08:36:11 crc kubenswrapper[4971]: I1127 08:36:11.639136 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-8nz9c" Nov 27 08:36:11 crc kubenswrapper[4971]: I1127 08:36:11.646379 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1-default"] Nov 27 08:36:11 crc kubenswrapper[4971]: I1127 08:36:11.821358 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-76ths\" (UniqueName: \"kubernetes.io/projected/c1753496-a420-4bdd-a51b-03b270f7edaa-kube-api-access-76ths\") pod \"mariadb-client-1-default\" (UID: \"c1753496-a420-4bdd-a51b-03b270f7edaa\") " pod="openstack/mariadb-client-1-default" Nov 27 08:36:11 crc kubenswrapper[4971]: I1127 08:36:11.922833 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-76ths\" (UniqueName: \"kubernetes.io/projected/c1753496-a420-4bdd-a51b-03b270f7edaa-kube-api-access-76ths\") pod \"mariadb-client-1-default\" (UID: \"c1753496-a420-4bdd-a51b-03b270f7edaa\") " pod="openstack/mariadb-client-1-default" Nov 27 08:36:11 crc kubenswrapper[4971]: I1127 08:36:11.948341 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-76ths\" (UniqueName: \"kubernetes.io/projected/c1753496-a420-4bdd-a51b-03b270f7edaa-kube-api-access-76ths\") pod \"mariadb-client-1-default\" (UID: \"c1753496-a420-4bdd-a51b-03b270f7edaa\") " pod="openstack/mariadb-client-1-default" Nov 27 08:36:11 crc kubenswrapper[4971]: I1127 08:36:11.961338 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Nov 27 08:36:12 crc kubenswrapper[4971]: I1127 08:36:12.500395 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1-default"] Nov 27 08:36:12 crc kubenswrapper[4971]: I1127 08:36:12.918548 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1-default" event={"ID":"c1753496-a420-4bdd-a51b-03b270f7edaa","Type":"ContainerStarted","Data":"054c80298afb9cadaf4556aee375d40c913f3424d2d30a3ea546cd28c9d2a1c7"} Nov 27 08:36:13 crc kubenswrapper[4971]: I1127 08:36:13.937816 4971 generic.go:334] "Generic (PLEG): container finished" podID="c1753496-a420-4bdd-a51b-03b270f7edaa" containerID="3b907a1368309dda8e7331ab54c94892e08c5bcf69b28266b2f72d0fb973f099" exitCode=0 Nov 27 08:36:13 crc kubenswrapper[4971]: I1127 08:36:13.937904 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1-default" event={"ID":"c1753496-a420-4bdd-a51b-03b270f7edaa","Type":"ContainerDied","Data":"3b907a1368309dda8e7331ab54c94892e08c5bcf69b28266b2f72d0fb973f099"} Nov 27 08:36:15 crc kubenswrapper[4971]: I1127 08:36:15.329388 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Nov 27 08:36:15 crc kubenswrapper[4971]: I1127 08:36:15.366696 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-1-default_c1753496-a420-4bdd-a51b-03b270f7edaa/mariadb-client-1-default/0.log" Nov 27 08:36:15 crc kubenswrapper[4971]: I1127 08:36:15.400804 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-1-default"] Nov 27 08:36:15 crc kubenswrapper[4971]: I1127 08:36:15.409866 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-1-default"] Nov 27 08:36:15 crc kubenswrapper[4971]: I1127 08:36:15.494057 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-76ths\" (UniqueName: \"kubernetes.io/projected/c1753496-a420-4bdd-a51b-03b270f7edaa-kube-api-access-76ths\") pod \"c1753496-a420-4bdd-a51b-03b270f7edaa\" (UID: \"c1753496-a420-4bdd-a51b-03b270f7edaa\") " Nov 27 08:36:15 crc kubenswrapper[4971]: I1127 08:36:15.501631 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1753496-a420-4bdd-a51b-03b270f7edaa-kube-api-access-76ths" (OuterVolumeSpecName: "kube-api-access-76ths") pod "c1753496-a420-4bdd-a51b-03b270f7edaa" (UID: "c1753496-a420-4bdd-a51b-03b270f7edaa"). InnerVolumeSpecName "kube-api-access-76ths". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:36:15 crc kubenswrapper[4971]: I1127 08:36:15.597350 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-76ths\" (UniqueName: \"kubernetes.io/projected/c1753496-a420-4bdd-a51b-03b270f7edaa-kube-api-access-76ths\") on node \"crc\" DevicePath \"\"" Nov 27 08:36:15 crc kubenswrapper[4971]: I1127 08:36:15.800550 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-2-default"] Nov 27 08:36:15 crc kubenswrapper[4971]: E1127 08:36:15.800982 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1753496-a420-4bdd-a51b-03b270f7edaa" containerName="mariadb-client-1-default" Nov 27 08:36:15 crc kubenswrapper[4971]: I1127 08:36:15.800995 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1753496-a420-4bdd-a51b-03b270f7edaa" containerName="mariadb-client-1-default" Nov 27 08:36:15 crc kubenswrapper[4971]: I1127 08:36:15.801216 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1753496-a420-4bdd-a51b-03b270f7edaa" containerName="mariadb-client-1-default" Nov 27 08:36:15 crc kubenswrapper[4971]: I1127 08:36:15.802401 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Nov 27 08:36:15 crc kubenswrapper[4971]: I1127 08:36:15.812938 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2-default"] Nov 27 08:36:15 crc kubenswrapper[4971]: I1127 08:36:15.954740 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="054c80298afb9cadaf4556aee375d40c913f3424d2d30a3ea546cd28c9d2a1c7" Nov 27 08:36:15 crc kubenswrapper[4971]: I1127 08:36:15.954826 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Nov 27 08:36:16 crc kubenswrapper[4971]: I1127 08:36:16.004726 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2ndz\" (UniqueName: \"kubernetes.io/projected/aa0ec078-683c-485e-aef9-5b31577ab8c7-kube-api-access-h2ndz\") pod \"mariadb-client-2-default\" (UID: \"aa0ec078-683c-485e-aef9-5b31577ab8c7\") " pod="openstack/mariadb-client-2-default" Nov 27 08:36:16 crc kubenswrapper[4971]: I1127 08:36:16.106433 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2ndz\" (UniqueName: \"kubernetes.io/projected/aa0ec078-683c-485e-aef9-5b31577ab8c7-kube-api-access-h2ndz\") pod \"mariadb-client-2-default\" (UID: \"aa0ec078-683c-485e-aef9-5b31577ab8c7\") " pod="openstack/mariadb-client-2-default" Nov 27 08:36:16 crc kubenswrapper[4971]: I1127 08:36:16.128964 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2ndz\" (UniqueName: \"kubernetes.io/projected/aa0ec078-683c-485e-aef9-5b31577ab8c7-kube-api-access-h2ndz\") pod \"mariadb-client-2-default\" (UID: \"aa0ec078-683c-485e-aef9-5b31577ab8c7\") " pod="openstack/mariadb-client-2-default" Nov 27 08:36:16 crc kubenswrapper[4971]: I1127 08:36:16.129329 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Nov 27 08:36:16 crc kubenswrapper[4971]: I1127 08:36:16.561338 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1753496-a420-4bdd-a51b-03b270f7edaa" path="/var/lib/kubelet/pods/c1753496-a420-4bdd-a51b-03b270f7edaa/volumes" Nov 27 08:36:16 crc kubenswrapper[4971]: I1127 08:36:16.680423 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2-default"] Nov 27 08:36:16 crc kubenswrapper[4971]: W1127 08:36:16.689907 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaa0ec078_683c_485e_aef9_5b31577ab8c7.slice/crio-9696cdc1f69e6d08c11a5eeaff26dde74c91bcbbeef88a855c462a75957b07dd WatchSource:0}: Error finding container 9696cdc1f69e6d08c11a5eeaff26dde74c91bcbbeef88a855c462a75957b07dd: Status 404 returned error can't find the container with id 9696cdc1f69e6d08c11a5eeaff26dde74c91bcbbeef88a855c462a75957b07dd Nov 27 08:36:16 crc kubenswrapper[4971]: I1127 08:36:16.971121 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"aa0ec078-683c-485e-aef9-5b31577ab8c7","Type":"ContainerStarted","Data":"664a17b3bc26c68302e7f441f65dee5328f8e5fd8bec7b6ec07f8801977387dc"} Nov 27 08:36:16 crc kubenswrapper[4971]: I1127 08:36:16.971213 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"aa0ec078-683c-485e-aef9-5b31577ab8c7","Type":"ContainerStarted","Data":"9696cdc1f69e6d08c11a5eeaff26dde74c91bcbbeef88a855c462a75957b07dd"} Nov 27 08:36:16 crc kubenswrapper[4971]: I1127 08:36:16.996877 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-client-2-default" podStartSLOduration=1.9968459790000002 podStartE2EDuration="1.996845979s" podCreationTimestamp="2025-11-27 08:36:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:36:16.988885796 +0000 UTC m=+6215.180929714" watchObservedRunningTime="2025-11-27 08:36:16.996845979 +0000 UTC m=+6215.188889897" Nov 27 08:36:17 crc kubenswrapper[4971]: I1127 08:36:17.983866 4971 generic.go:334] "Generic (PLEG): container finished" podID="aa0ec078-683c-485e-aef9-5b31577ab8c7" containerID="664a17b3bc26c68302e7f441f65dee5328f8e5fd8bec7b6ec07f8801977387dc" exitCode=1 Nov 27 08:36:17 crc kubenswrapper[4971]: I1127 08:36:17.984047 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"aa0ec078-683c-485e-aef9-5b31577ab8c7","Type":"ContainerDied","Data":"664a17b3bc26c68302e7f441f65dee5328f8e5fd8bec7b6ec07f8801977387dc"} Nov 27 08:36:19 crc kubenswrapper[4971]: I1127 08:36:19.417277 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Nov 27 08:36:19 crc kubenswrapper[4971]: I1127 08:36:19.458849 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-2-default"] Nov 27 08:36:19 crc kubenswrapper[4971]: I1127 08:36:19.468456 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-2-default"] Nov 27 08:36:19 crc kubenswrapper[4971]: I1127 08:36:19.475008 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h2ndz\" (UniqueName: \"kubernetes.io/projected/aa0ec078-683c-485e-aef9-5b31577ab8c7-kube-api-access-h2ndz\") pod \"aa0ec078-683c-485e-aef9-5b31577ab8c7\" (UID: \"aa0ec078-683c-485e-aef9-5b31577ab8c7\") " Nov 27 08:36:19 crc kubenswrapper[4971]: I1127 08:36:19.484072 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa0ec078-683c-485e-aef9-5b31577ab8c7-kube-api-access-h2ndz" (OuterVolumeSpecName: "kube-api-access-h2ndz") pod "aa0ec078-683c-485e-aef9-5b31577ab8c7" (UID: "aa0ec078-683c-485e-aef9-5b31577ab8c7"). InnerVolumeSpecName "kube-api-access-h2ndz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:36:19 crc kubenswrapper[4971]: I1127 08:36:19.577850 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h2ndz\" (UniqueName: \"kubernetes.io/projected/aa0ec078-683c-485e-aef9-5b31577ab8c7-kube-api-access-h2ndz\") on node \"crc\" DevicePath \"\"" Nov 27 08:36:19 crc kubenswrapper[4971]: I1127 08:36:19.860314 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-1"] Nov 27 08:36:19 crc kubenswrapper[4971]: E1127 08:36:19.860865 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa0ec078-683c-485e-aef9-5b31577ab8c7" containerName="mariadb-client-2-default" Nov 27 08:36:19 crc kubenswrapper[4971]: I1127 08:36:19.860889 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa0ec078-683c-485e-aef9-5b31577ab8c7" containerName="mariadb-client-2-default" Nov 27 08:36:19 crc kubenswrapper[4971]: I1127 08:36:19.861098 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa0ec078-683c-485e-aef9-5b31577ab8c7" containerName="mariadb-client-2-default" Nov 27 08:36:19 crc kubenswrapper[4971]: I1127 08:36:19.861782 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Nov 27 08:36:19 crc kubenswrapper[4971]: I1127 08:36:19.873784 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1"] Nov 27 08:36:19 crc kubenswrapper[4971]: I1127 08:36:19.881453 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lw4ss\" (UniqueName: \"kubernetes.io/projected/679c8f95-db75-4a2f-ae1c-e451c6b7449f-kube-api-access-lw4ss\") pod \"mariadb-client-1\" (UID: \"679c8f95-db75-4a2f-ae1c-e451c6b7449f\") " pod="openstack/mariadb-client-1" Nov 27 08:36:19 crc kubenswrapper[4971]: I1127 08:36:19.982999 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lw4ss\" (UniqueName: \"kubernetes.io/projected/679c8f95-db75-4a2f-ae1c-e451c6b7449f-kube-api-access-lw4ss\") pod \"mariadb-client-1\" (UID: \"679c8f95-db75-4a2f-ae1c-e451c6b7449f\") " pod="openstack/mariadb-client-1" Nov 27 08:36:20 crc kubenswrapper[4971]: I1127 08:36:20.000832 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lw4ss\" (UniqueName: \"kubernetes.io/projected/679c8f95-db75-4a2f-ae1c-e451c6b7449f-kube-api-access-lw4ss\") pod \"mariadb-client-1\" (UID: \"679c8f95-db75-4a2f-ae1c-e451c6b7449f\") " pod="openstack/mariadb-client-1" Nov 27 08:36:20 crc kubenswrapper[4971]: I1127 08:36:20.005781 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9696cdc1f69e6d08c11a5eeaff26dde74c91bcbbeef88a855c462a75957b07dd" Nov 27 08:36:20 crc kubenswrapper[4971]: I1127 08:36:20.005968 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Nov 27 08:36:20 crc kubenswrapper[4971]: I1127 08:36:20.191518 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Nov 27 08:36:20 crc kubenswrapper[4971]: I1127 08:36:20.563438 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa0ec078-683c-485e-aef9-5b31577ab8c7" path="/var/lib/kubelet/pods/aa0ec078-683c-485e-aef9-5b31577ab8c7/volumes" Nov 27 08:36:20 crc kubenswrapper[4971]: I1127 08:36:20.803091 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1"] Nov 27 08:36:20 crc kubenswrapper[4971]: W1127 08:36:20.812850 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod679c8f95_db75_4a2f_ae1c_e451c6b7449f.slice/crio-f665594c3f6abe61bc74af983a3bcb02a9aafc7948a5fc63eb358ae120d3301d WatchSource:0}: Error finding container f665594c3f6abe61bc74af983a3bcb02a9aafc7948a5fc63eb358ae120d3301d: Status 404 returned error can't find the container with id f665594c3f6abe61bc74af983a3bcb02a9aafc7948a5fc63eb358ae120d3301d Nov 27 08:36:21 crc kubenswrapper[4971]: I1127 08:36:21.020165 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1" event={"ID":"679c8f95-db75-4a2f-ae1c-e451c6b7449f","Type":"ContainerStarted","Data":"6a2a95472c871c99c30f2a6bdcff16430decb8294899f5a45a9abf90fb5e5e1c"} Nov 27 08:36:21 crc kubenswrapper[4971]: I1127 08:36:21.020659 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1" event={"ID":"679c8f95-db75-4a2f-ae1c-e451c6b7449f","Type":"ContainerStarted","Data":"f665594c3f6abe61bc74af983a3bcb02a9aafc7948a5fc63eb358ae120d3301d"} Nov 27 08:36:21 crc kubenswrapper[4971]: I1127 08:36:21.046449 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-client-1" podStartSLOduration=2.046425708 podStartE2EDuration="2.046425708s" podCreationTimestamp="2025-11-27 08:36:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:36:21.03829141 +0000 UTC m=+6219.230335338" watchObservedRunningTime="2025-11-27 08:36:21.046425708 +0000 UTC m=+6219.238469626" Nov 27 08:36:21 crc kubenswrapper[4971]: I1127 08:36:21.091300 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-1_679c8f95-db75-4a2f-ae1c-e451c6b7449f/mariadb-client-1/0.log" Nov 27 08:36:22 crc kubenswrapper[4971]: I1127 08:36:22.032948 4971 generic.go:334] "Generic (PLEG): container finished" podID="679c8f95-db75-4a2f-ae1c-e451c6b7449f" containerID="6a2a95472c871c99c30f2a6bdcff16430decb8294899f5a45a9abf90fb5e5e1c" exitCode=0 Nov 27 08:36:22 crc kubenswrapper[4971]: I1127 08:36:22.032988 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1" event={"ID":"679c8f95-db75-4a2f-ae1c-e451c6b7449f","Type":"ContainerDied","Data":"6a2a95472c871c99c30f2a6bdcff16430decb8294899f5a45a9abf90fb5e5e1c"} Nov 27 08:36:23 crc kubenswrapper[4971]: I1127 08:36:23.459246 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Nov 27 08:36:23 crc kubenswrapper[4971]: I1127 08:36:23.508054 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-1"] Nov 27 08:36:23 crc kubenswrapper[4971]: I1127 08:36:23.516132 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-1"] Nov 27 08:36:23 crc kubenswrapper[4971]: I1127 08:36:23.648609 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lw4ss\" (UniqueName: \"kubernetes.io/projected/679c8f95-db75-4a2f-ae1c-e451c6b7449f-kube-api-access-lw4ss\") pod \"679c8f95-db75-4a2f-ae1c-e451c6b7449f\" (UID: \"679c8f95-db75-4a2f-ae1c-e451c6b7449f\") " Nov 27 08:36:23 crc kubenswrapper[4971]: I1127 08:36:23.660813 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/679c8f95-db75-4a2f-ae1c-e451c6b7449f-kube-api-access-lw4ss" (OuterVolumeSpecName: "kube-api-access-lw4ss") pod "679c8f95-db75-4a2f-ae1c-e451c6b7449f" (UID: "679c8f95-db75-4a2f-ae1c-e451c6b7449f"). InnerVolumeSpecName "kube-api-access-lw4ss". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:36:23 crc kubenswrapper[4971]: I1127 08:36:23.751701 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lw4ss\" (UniqueName: \"kubernetes.io/projected/679c8f95-db75-4a2f-ae1c-e451c6b7449f-kube-api-access-lw4ss\") on node \"crc\" DevicePath \"\"" Nov 27 08:36:23 crc kubenswrapper[4971]: I1127 08:36:23.929239 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-4-default"] Nov 27 08:36:23 crc kubenswrapper[4971]: E1127 08:36:23.929789 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="679c8f95-db75-4a2f-ae1c-e451c6b7449f" containerName="mariadb-client-1" Nov 27 08:36:23 crc kubenswrapper[4971]: I1127 08:36:23.929814 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="679c8f95-db75-4a2f-ae1c-e451c6b7449f" containerName="mariadb-client-1" Nov 27 08:36:23 crc kubenswrapper[4971]: I1127 08:36:23.930059 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="679c8f95-db75-4a2f-ae1c-e451c6b7449f" containerName="mariadb-client-1" Nov 27 08:36:23 crc kubenswrapper[4971]: I1127 08:36:23.930822 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Nov 27 08:36:23 crc kubenswrapper[4971]: I1127 08:36:23.935744 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-4-default"] Nov 27 08:36:23 crc kubenswrapper[4971]: I1127 08:36:23.953975 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9d7bn\" (UniqueName: \"kubernetes.io/projected/03aff38a-a1a7-424b-a036-1e234df890b2-kube-api-access-9d7bn\") pod \"mariadb-client-4-default\" (UID: \"03aff38a-a1a7-424b-a036-1e234df890b2\") " pod="openstack/mariadb-client-4-default" Nov 27 08:36:24 crc kubenswrapper[4971]: I1127 08:36:24.052705 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f665594c3f6abe61bc74af983a3bcb02a9aafc7948a5fc63eb358ae120d3301d" Nov 27 08:36:24 crc kubenswrapper[4971]: I1127 08:36:24.052759 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Nov 27 08:36:24 crc kubenswrapper[4971]: I1127 08:36:24.055633 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9d7bn\" (UniqueName: \"kubernetes.io/projected/03aff38a-a1a7-424b-a036-1e234df890b2-kube-api-access-9d7bn\") pod \"mariadb-client-4-default\" (UID: \"03aff38a-a1a7-424b-a036-1e234df890b2\") " pod="openstack/mariadb-client-4-default" Nov 27 08:36:24 crc kubenswrapper[4971]: I1127 08:36:24.073428 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9d7bn\" (UniqueName: \"kubernetes.io/projected/03aff38a-a1a7-424b-a036-1e234df890b2-kube-api-access-9d7bn\") pod \"mariadb-client-4-default\" (UID: \"03aff38a-a1a7-424b-a036-1e234df890b2\") " pod="openstack/mariadb-client-4-default" Nov 27 08:36:24 crc kubenswrapper[4971]: I1127 08:36:24.291471 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Nov 27 08:36:24 crc kubenswrapper[4971]: I1127 08:36:24.559362 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="679c8f95-db75-4a2f-ae1c-e451c6b7449f" path="/var/lib/kubelet/pods/679c8f95-db75-4a2f-ae1c-e451c6b7449f/volumes" Nov 27 08:36:24 crc kubenswrapper[4971]: I1127 08:36:24.875032 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-4-default"] Nov 27 08:36:24 crc kubenswrapper[4971]: W1127 08:36:24.877347 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod03aff38a_a1a7_424b_a036_1e234df890b2.slice/crio-1baac7fdec74a911bc4c12910045bffe11023b3dcbc3c048a61352e759113306 WatchSource:0}: Error finding container 1baac7fdec74a911bc4c12910045bffe11023b3dcbc3c048a61352e759113306: Status 404 returned error can't find the container with id 1baac7fdec74a911bc4c12910045bffe11023b3dcbc3c048a61352e759113306 Nov 27 08:36:25 crc kubenswrapper[4971]: I1127 08:36:25.062507 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-4-default" event={"ID":"03aff38a-a1a7-424b-a036-1e234df890b2","Type":"ContainerStarted","Data":"105853af7f95005457dacd3abc12e0196c2dd817c139d620eb809d7fde09edfd"} Nov 27 08:36:25 crc kubenswrapper[4971]: I1127 08:36:25.063032 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-4-default" event={"ID":"03aff38a-a1a7-424b-a036-1e234df890b2","Type":"ContainerStarted","Data":"1baac7fdec74a911bc4c12910045bffe11023b3dcbc3c048a61352e759113306"} Nov 27 08:36:25 crc kubenswrapper[4971]: I1127 08:36:25.085902 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-client-4-default" podStartSLOduration=2.085875543 podStartE2EDuration="2.085875543s" podCreationTimestamp="2025-11-27 08:36:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:36:25.081171221 +0000 UTC m=+6223.273215139" watchObservedRunningTime="2025-11-27 08:36:25.085875543 +0000 UTC m=+6223.277919451" Nov 27 08:36:25 crc kubenswrapper[4971]: I1127 08:36:25.128681 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-4-default_03aff38a-a1a7-424b-a036-1e234df890b2/mariadb-client-4-default/0.log" Nov 27 08:36:26 crc kubenswrapper[4971]: I1127 08:36:26.074389 4971 generic.go:334] "Generic (PLEG): container finished" podID="03aff38a-a1a7-424b-a036-1e234df890b2" containerID="105853af7f95005457dacd3abc12e0196c2dd817c139d620eb809d7fde09edfd" exitCode=0 Nov 27 08:36:26 crc kubenswrapper[4971]: I1127 08:36:26.074452 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-4-default" event={"ID":"03aff38a-a1a7-424b-a036-1e234df890b2","Type":"ContainerDied","Data":"105853af7f95005457dacd3abc12e0196c2dd817c139d620eb809d7fde09edfd"} Nov 27 08:36:27 crc kubenswrapper[4971]: I1127 08:36:27.475009 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Nov 27 08:36:27 crc kubenswrapper[4971]: I1127 08:36:27.519324 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-4-default"] Nov 27 08:36:27 crc kubenswrapper[4971]: I1127 08:36:27.527205 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-4-default"] Nov 27 08:36:27 crc kubenswrapper[4971]: I1127 08:36:27.622199 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9d7bn\" (UniqueName: \"kubernetes.io/projected/03aff38a-a1a7-424b-a036-1e234df890b2-kube-api-access-9d7bn\") pod \"03aff38a-a1a7-424b-a036-1e234df890b2\" (UID: \"03aff38a-a1a7-424b-a036-1e234df890b2\") " Nov 27 08:36:27 crc kubenswrapper[4971]: I1127 08:36:27.630771 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03aff38a-a1a7-424b-a036-1e234df890b2-kube-api-access-9d7bn" (OuterVolumeSpecName: "kube-api-access-9d7bn") pod "03aff38a-a1a7-424b-a036-1e234df890b2" (UID: "03aff38a-a1a7-424b-a036-1e234df890b2"). InnerVolumeSpecName "kube-api-access-9d7bn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:36:27 crc kubenswrapper[4971]: I1127 08:36:27.725806 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9d7bn\" (UniqueName: \"kubernetes.io/projected/03aff38a-a1a7-424b-a036-1e234df890b2-kube-api-access-9d7bn\") on node \"crc\" DevicePath \"\"" Nov 27 08:36:28 crc kubenswrapper[4971]: I1127 08:36:28.094697 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1baac7fdec74a911bc4c12910045bffe11023b3dcbc3c048a61352e759113306" Nov 27 08:36:28 crc kubenswrapper[4971]: I1127 08:36:28.094815 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Nov 27 08:36:28 crc kubenswrapper[4971]: I1127 08:36:28.561295 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03aff38a-a1a7-424b-a036-1e234df890b2" path="/var/lib/kubelet/pods/03aff38a-a1a7-424b-a036-1e234df890b2/volumes" Nov 27 08:36:31 crc kubenswrapper[4971]: I1127 08:36:31.209287 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-5-default"] Nov 27 08:36:31 crc kubenswrapper[4971]: E1127 08:36:31.210069 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03aff38a-a1a7-424b-a036-1e234df890b2" containerName="mariadb-client-4-default" Nov 27 08:36:31 crc kubenswrapper[4971]: I1127 08:36:31.210086 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="03aff38a-a1a7-424b-a036-1e234df890b2" containerName="mariadb-client-4-default" Nov 27 08:36:31 crc kubenswrapper[4971]: I1127 08:36:31.210294 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="03aff38a-a1a7-424b-a036-1e234df890b2" containerName="mariadb-client-4-default" Nov 27 08:36:31 crc kubenswrapper[4971]: I1127 08:36:31.211101 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Nov 27 08:36:31 crc kubenswrapper[4971]: I1127 08:36:31.222163 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-5-default"] Nov 27 08:36:31 crc kubenswrapper[4971]: I1127 08:36:31.253803 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-8nz9c" Nov 27 08:36:31 crc kubenswrapper[4971]: I1127 08:36:31.298904 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h6z6r\" (UniqueName: \"kubernetes.io/projected/b57de533-e1b6-4cec-aa1e-510abad03ff5-kube-api-access-h6z6r\") pod \"mariadb-client-5-default\" (UID: \"b57de533-e1b6-4cec-aa1e-510abad03ff5\") " pod="openstack/mariadb-client-5-default" Nov 27 08:36:31 crc kubenswrapper[4971]: I1127 08:36:31.400703 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h6z6r\" (UniqueName: \"kubernetes.io/projected/b57de533-e1b6-4cec-aa1e-510abad03ff5-kube-api-access-h6z6r\") pod \"mariadb-client-5-default\" (UID: \"b57de533-e1b6-4cec-aa1e-510abad03ff5\") " pod="openstack/mariadb-client-5-default" Nov 27 08:36:31 crc kubenswrapper[4971]: I1127 08:36:31.431303 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h6z6r\" (UniqueName: \"kubernetes.io/projected/b57de533-e1b6-4cec-aa1e-510abad03ff5-kube-api-access-h6z6r\") pod \"mariadb-client-5-default\" (UID: \"b57de533-e1b6-4cec-aa1e-510abad03ff5\") " pod="openstack/mariadb-client-5-default" Nov 27 08:36:31 crc kubenswrapper[4971]: I1127 08:36:31.573788 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Nov 27 08:36:31 crc kubenswrapper[4971]: I1127 08:36:31.670583 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-st8kl"] Nov 27 08:36:31 crc kubenswrapper[4971]: I1127 08:36:31.677109 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-st8kl" Nov 27 08:36:31 crc kubenswrapper[4971]: I1127 08:36:31.687384 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-st8kl"] Nov 27 08:36:31 crc kubenswrapper[4971]: I1127 08:36:31.705838 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e1f60f0-f534-46e7-a463-648084a52735-utilities\") pod \"redhat-marketplace-st8kl\" (UID: \"2e1f60f0-f534-46e7-a463-648084a52735\") " pod="openshift-marketplace/redhat-marketplace-st8kl" Nov 27 08:36:31 crc kubenswrapper[4971]: I1127 08:36:31.705915 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7kzpn\" (UniqueName: \"kubernetes.io/projected/2e1f60f0-f534-46e7-a463-648084a52735-kube-api-access-7kzpn\") pod \"redhat-marketplace-st8kl\" (UID: \"2e1f60f0-f534-46e7-a463-648084a52735\") " pod="openshift-marketplace/redhat-marketplace-st8kl" Nov 27 08:36:31 crc kubenswrapper[4971]: I1127 08:36:31.705973 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e1f60f0-f534-46e7-a463-648084a52735-catalog-content\") pod \"redhat-marketplace-st8kl\" (UID: \"2e1f60f0-f534-46e7-a463-648084a52735\") " pod="openshift-marketplace/redhat-marketplace-st8kl" Nov 27 08:36:31 crc kubenswrapper[4971]: I1127 08:36:31.808260 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e1f60f0-f534-46e7-a463-648084a52735-catalog-content\") pod \"redhat-marketplace-st8kl\" (UID: \"2e1f60f0-f534-46e7-a463-648084a52735\") " pod="openshift-marketplace/redhat-marketplace-st8kl" Nov 27 08:36:31 crc kubenswrapper[4971]: I1127 08:36:31.808466 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e1f60f0-f534-46e7-a463-648084a52735-utilities\") pod \"redhat-marketplace-st8kl\" (UID: \"2e1f60f0-f534-46e7-a463-648084a52735\") " pod="openshift-marketplace/redhat-marketplace-st8kl" Nov 27 08:36:31 crc kubenswrapper[4971]: I1127 08:36:31.808515 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7kzpn\" (UniqueName: \"kubernetes.io/projected/2e1f60f0-f534-46e7-a463-648084a52735-kube-api-access-7kzpn\") pod \"redhat-marketplace-st8kl\" (UID: \"2e1f60f0-f534-46e7-a463-648084a52735\") " pod="openshift-marketplace/redhat-marketplace-st8kl" Nov 27 08:36:31 crc kubenswrapper[4971]: I1127 08:36:31.808920 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e1f60f0-f534-46e7-a463-648084a52735-catalog-content\") pod \"redhat-marketplace-st8kl\" (UID: \"2e1f60f0-f534-46e7-a463-648084a52735\") " pod="openshift-marketplace/redhat-marketplace-st8kl" Nov 27 08:36:31 crc kubenswrapper[4971]: I1127 08:36:31.809273 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e1f60f0-f534-46e7-a463-648084a52735-utilities\") pod \"redhat-marketplace-st8kl\" (UID: \"2e1f60f0-f534-46e7-a463-648084a52735\") " pod="openshift-marketplace/redhat-marketplace-st8kl" Nov 27 08:36:31 crc kubenswrapper[4971]: I1127 08:36:31.832198 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7kzpn\" (UniqueName: \"kubernetes.io/projected/2e1f60f0-f534-46e7-a463-648084a52735-kube-api-access-7kzpn\") pod \"redhat-marketplace-st8kl\" (UID: \"2e1f60f0-f534-46e7-a463-648084a52735\") " pod="openshift-marketplace/redhat-marketplace-st8kl" Nov 27 08:36:32 crc kubenswrapper[4971]: I1127 08:36:32.034077 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-st8kl" Nov 27 08:36:32 crc kubenswrapper[4971]: I1127 08:36:32.186378 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-5-default"] Nov 27 08:36:32 crc kubenswrapper[4971]: I1127 08:36:32.546068 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-st8kl"] Nov 27 08:36:32 crc kubenswrapper[4971]: W1127 08:36:32.555614 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2e1f60f0_f534_46e7_a463_648084a52735.slice/crio-c4f1a333b77b2b3a74d3cf72d7a866433e00cacaa2527b4e49390f70bfcde7fa WatchSource:0}: Error finding container c4f1a333b77b2b3a74d3cf72d7a866433e00cacaa2527b4e49390f70bfcde7fa: Status 404 returned error can't find the container with id c4f1a333b77b2b3a74d3cf72d7a866433e00cacaa2527b4e49390f70bfcde7fa Nov 27 08:36:33 crc kubenswrapper[4971]: I1127 08:36:33.138141 4971 generic.go:334] "Generic (PLEG): container finished" podID="2e1f60f0-f534-46e7-a463-648084a52735" containerID="3e5cfae167062f384499315c7a0e2096ff6bad9bc46af29437b142b102138aff" exitCode=0 Nov 27 08:36:33 crc kubenswrapper[4971]: I1127 08:36:33.138271 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-st8kl" event={"ID":"2e1f60f0-f534-46e7-a463-648084a52735","Type":"ContainerDied","Data":"3e5cfae167062f384499315c7a0e2096ff6bad9bc46af29437b142b102138aff"} Nov 27 08:36:33 crc kubenswrapper[4971]: I1127 08:36:33.138373 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-st8kl" event={"ID":"2e1f60f0-f534-46e7-a463-648084a52735","Type":"ContainerStarted","Data":"c4f1a333b77b2b3a74d3cf72d7a866433e00cacaa2527b4e49390f70bfcde7fa"} Nov 27 08:36:33 crc kubenswrapper[4971]: I1127 08:36:33.142159 4971 generic.go:334] "Generic (PLEG): container finished" podID="b57de533-e1b6-4cec-aa1e-510abad03ff5" containerID="5ef0927681e9ea7f290248ba6100b7954aca82b758961aad1c3cb961bf30a48c" exitCode=0 Nov 27 08:36:33 crc kubenswrapper[4971]: I1127 08:36:33.142196 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-5-default" event={"ID":"b57de533-e1b6-4cec-aa1e-510abad03ff5","Type":"ContainerDied","Data":"5ef0927681e9ea7f290248ba6100b7954aca82b758961aad1c3cb961bf30a48c"} Nov 27 08:36:33 crc kubenswrapper[4971]: I1127 08:36:33.142211 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-5-default" event={"ID":"b57de533-e1b6-4cec-aa1e-510abad03ff5","Type":"ContainerStarted","Data":"469cc7ceb8b7411d0174117cfa7d34026cf9da7ef388f8485fd474cb39e1540c"} Nov 27 08:36:34 crc kubenswrapper[4971]: I1127 08:36:34.154994 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-st8kl" event={"ID":"2e1f60f0-f534-46e7-a463-648084a52735","Type":"ContainerStarted","Data":"757e4f6c5d31f9fdb4b1fdae4d606fffff3f6c3d8bfa656fb96e03bb2cd4fbac"} Nov 27 08:36:34 crc kubenswrapper[4971]: I1127 08:36:34.535104 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Nov 27 08:36:34 crc kubenswrapper[4971]: I1127 08:36:34.553737 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h6z6r\" (UniqueName: \"kubernetes.io/projected/b57de533-e1b6-4cec-aa1e-510abad03ff5-kube-api-access-h6z6r\") pod \"b57de533-e1b6-4cec-aa1e-510abad03ff5\" (UID: \"b57de533-e1b6-4cec-aa1e-510abad03ff5\") " Nov 27 08:36:34 crc kubenswrapper[4971]: I1127 08:36:34.571958 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b57de533-e1b6-4cec-aa1e-510abad03ff5-kube-api-access-h6z6r" (OuterVolumeSpecName: "kube-api-access-h6z6r") pod "b57de533-e1b6-4cec-aa1e-510abad03ff5" (UID: "b57de533-e1b6-4cec-aa1e-510abad03ff5"). InnerVolumeSpecName "kube-api-access-h6z6r". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:36:34 crc kubenswrapper[4971]: I1127 08:36:34.600663 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-5-default_b57de533-e1b6-4cec-aa1e-510abad03ff5/mariadb-client-5-default/0.log" Nov 27 08:36:34 crc kubenswrapper[4971]: I1127 08:36:34.631337 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-5-default"] Nov 27 08:36:34 crc kubenswrapper[4971]: I1127 08:36:34.637524 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-5-default"] Nov 27 08:36:34 crc kubenswrapper[4971]: I1127 08:36:34.656620 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h6z6r\" (UniqueName: \"kubernetes.io/projected/b57de533-e1b6-4cec-aa1e-510abad03ff5-kube-api-access-h6z6r\") on node \"crc\" DevicePath \"\"" Nov 27 08:36:34 crc kubenswrapper[4971]: I1127 08:36:34.803766 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-6-default"] Nov 27 08:36:34 crc kubenswrapper[4971]: E1127 08:36:34.804286 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b57de533-e1b6-4cec-aa1e-510abad03ff5" containerName="mariadb-client-5-default" Nov 27 08:36:34 crc kubenswrapper[4971]: I1127 08:36:34.804301 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b57de533-e1b6-4cec-aa1e-510abad03ff5" containerName="mariadb-client-5-default" Nov 27 08:36:34 crc kubenswrapper[4971]: I1127 08:36:34.804499 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="b57de533-e1b6-4cec-aa1e-510abad03ff5" containerName="mariadb-client-5-default" Nov 27 08:36:34 crc kubenswrapper[4971]: I1127 08:36:34.805196 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Nov 27 08:36:34 crc kubenswrapper[4971]: I1127 08:36:34.810466 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-6-default"] Nov 27 08:36:34 crc kubenswrapper[4971]: I1127 08:36:34.860376 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgdtg\" (UniqueName: \"kubernetes.io/projected/41f4cbc9-1f79-4cf8-928f-782afdc845ae-kube-api-access-hgdtg\") pod \"mariadb-client-6-default\" (UID: \"41f4cbc9-1f79-4cf8-928f-782afdc845ae\") " pod="openstack/mariadb-client-6-default" Nov 27 08:36:34 crc kubenswrapper[4971]: I1127 08:36:34.963058 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgdtg\" (UniqueName: \"kubernetes.io/projected/41f4cbc9-1f79-4cf8-928f-782afdc845ae-kube-api-access-hgdtg\") pod \"mariadb-client-6-default\" (UID: \"41f4cbc9-1f79-4cf8-928f-782afdc845ae\") " pod="openstack/mariadb-client-6-default" Nov 27 08:36:34 crc kubenswrapper[4971]: I1127 08:36:34.983248 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgdtg\" (UniqueName: \"kubernetes.io/projected/41f4cbc9-1f79-4cf8-928f-782afdc845ae-kube-api-access-hgdtg\") pod \"mariadb-client-6-default\" (UID: \"41f4cbc9-1f79-4cf8-928f-782afdc845ae\") " pod="openstack/mariadb-client-6-default" Nov 27 08:36:35 crc kubenswrapper[4971]: I1127 08:36:35.154127 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Nov 27 08:36:35 crc kubenswrapper[4971]: I1127 08:36:35.178737 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="469cc7ceb8b7411d0174117cfa7d34026cf9da7ef388f8485fd474cb39e1540c" Nov 27 08:36:35 crc kubenswrapper[4971]: I1127 08:36:35.178819 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Nov 27 08:36:35 crc kubenswrapper[4971]: I1127 08:36:35.182067 4971 generic.go:334] "Generic (PLEG): container finished" podID="2e1f60f0-f534-46e7-a463-648084a52735" containerID="757e4f6c5d31f9fdb4b1fdae4d606fffff3f6c3d8bfa656fb96e03bb2cd4fbac" exitCode=0 Nov 27 08:36:35 crc kubenswrapper[4971]: I1127 08:36:35.182124 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-st8kl" event={"ID":"2e1f60f0-f534-46e7-a463-648084a52735","Type":"ContainerDied","Data":"757e4f6c5d31f9fdb4b1fdae4d606fffff3f6c3d8bfa656fb96e03bb2cd4fbac"} Nov 27 08:36:35 crc kubenswrapper[4971]: I1127 08:36:35.511034 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-6-default"] Nov 27 08:36:36 crc kubenswrapper[4971]: I1127 08:36:36.191231 4971 generic.go:334] "Generic (PLEG): container finished" podID="41f4cbc9-1f79-4cf8-928f-782afdc845ae" containerID="f048df5a920636734cbfed5dbb0163e2b83723648fb08cfd7447cb2a5ac45989" exitCode=1 Nov 27 08:36:36 crc kubenswrapper[4971]: I1127 08:36:36.191442 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"41f4cbc9-1f79-4cf8-928f-782afdc845ae","Type":"ContainerDied","Data":"f048df5a920636734cbfed5dbb0163e2b83723648fb08cfd7447cb2a5ac45989"} Nov 27 08:36:36 crc kubenswrapper[4971]: I1127 08:36:36.191789 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"41f4cbc9-1f79-4cf8-928f-782afdc845ae","Type":"ContainerStarted","Data":"6ec6f764170cf4f5d860d70e25c7c06dc87209ec246c4ed736c4262bb4f7a19e"} Nov 27 08:36:36 crc kubenswrapper[4971]: I1127 08:36:36.194248 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-st8kl" event={"ID":"2e1f60f0-f534-46e7-a463-648084a52735","Type":"ContainerStarted","Data":"c2cc8ffcb103870387ddf873918672007129d6922b69c9b5e247f6dd6ad38974"} Nov 27 08:36:36 crc kubenswrapper[4971]: I1127 08:36:36.244603 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-st8kl" podStartSLOduration=2.753864725 podStartE2EDuration="5.24457397s" podCreationTimestamp="2025-11-27 08:36:31 +0000 UTC" firstStartedPulling="2025-11-27 08:36:33.140369391 +0000 UTC m=+6231.332413309" lastFinishedPulling="2025-11-27 08:36:35.631078636 +0000 UTC m=+6233.823122554" observedRunningTime="2025-11-27 08:36:36.24172682 +0000 UTC m=+6234.433770738" watchObservedRunningTime="2025-11-27 08:36:36.24457397 +0000 UTC m=+6234.436617898" Nov 27 08:36:36 crc kubenswrapper[4971]: I1127 08:36:36.570195 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b57de533-e1b6-4cec-aa1e-510abad03ff5" path="/var/lib/kubelet/pods/b57de533-e1b6-4cec-aa1e-510abad03ff5/volumes" Nov 27 08:36:37 crc kubenswrapper[4971]: I1127 08:36:37.597250 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Nov 27 08:36:37 crc kubenswrapper[4971]: I1127 08:36:37.603784 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hgdtg\" (UniqueName: \"kubernetes.io/projected/41f4cbc9-1f79-4cf8-928f-782afdc845ae-kube-api-access-hgdtg\") pod \"41f4cbc9-1f79-4cf8-928f-782afdc845ae\" (UID: \"41f4cbc9-1f79-4cf8-928f-782afdc845ae\") " Nov 27 08:36:37 crc kubenswrapper[4971]: I1127 08:36:37.616783 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41f4cbc9-1f79-4cf8-928f-782afdc845ae-kube-api-access-hgdtg" (OuterVolumeSpecName: "kube-api-access-hgdtg") pod "41f4cbc9-1f79-4cf8-928f-782afdc845ae" (UID: "41f4cbc9-1f79-4cf8-928f-782afdc845ae"). InnerVolumeSpecName "kube-api-access-hgdtg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:36:37 crc kubenswrapper[4971]: I1127 08:36:37.654326 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-6-default_41f4cbc9-1f79-4cf8-928f-782afdc845ae/mariadb-client-6-default/0.log" Nov 27 08:36:37 crc kubenswrapper[4971]: I1127 08:36:37.687338 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-6-default"] Nov 27 08:36:37 crc kubenswrapper[4971]: I1127 08:36:37.694018 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-6-default"] Nov 27 08:36:37 crc kubenswrapper[4971]: I1127 08:36:37.706810 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hgdtg\" (UniqueName: \"kubernetes.io/projected/41f4cbc9-1f79-4cf8-928f-782afdc845ae-kube-api-access-hgdtg\") on node \"crc\" DevicePath \"\"" Nov 27 08:36:37 crc kubenswrapper[4971]: I1127 08:36:37.855075 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-7-default"] Nov 27 08:36:37 crc kubenswrapper[4971]: E1127 08:36:37.856512 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41f4cbc9-1f79-4cf8-928f-782afdc845ae" containerName="mariadb-client-6-default" Nov 27 08:36:37 crc kubenswrapper[4971]: I1127 08:36:37.856643 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="41f4cbc9-1f79-4cf8-928f-782afdc845ae" containerName="mariadb-client-6-default" Nov 27 08:36:37 crc kubenswrapper[4971]: I1127 08:36:37.857077 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="41f4cbc9-1f79-4cf8-928f-782afdc845ae" containerName="mariadb-client-6-default" Nov 27 08:36:37 crc kubenswrapper[4971]: I1127 08:36:37.858489 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Nov 27 08:36:37 crc kubenswrapper[4971]: I1127 08:36:37.862865 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-7-default"] Nov 27 08:36:37 crc kubenswrapper[4971]: I1127 08:36:37.911261 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pqsk9\" (UniqueName: \"kubernetes.io/projected/beb4834a-4e34-4390-b423-e66008166966-kube-api-access-pqsk9\") pod \"mariadb-client-7-default\" (UID: \"beb4834a-4e34-4390-b423-e66008166966\") " pod="openstack/mariadb-client-7-default" Nov 27 08:36:38 crc kubenswrapper[4971]: I1127 08:36:38.012202 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pqsk9\" (UniqueName: \"kubernetes.io/projected/beb4834a-4e34-4390-b423-e66008166966-kube-api-access-pqsk9\") pod \"mariadb-client-7-default\" (UID: \"beb4834a-4e34-4390-b423-e66008166966\") " pod="openstack/mariadb-client-7-default" Nov 27 08:36:38 crc kubenswrapper[4971]: I1127 08:36:38.030567 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pqsk9\" (UniqueName: \"kubernetes.io/projected/beb4834a-4e34-4390-b423-e66008166966-kube-api-access-pqsk9\") pod \"mariadb-client-7-default\" (UID: \"beb4834a-4e34-4390-b423-e66008166966\") " pod="openstack/mariadb-client-7-default" Nov 27 08:36:38 crc kubenswrapper[4971]: I1127 08:36:38.186922 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Nov 27 08:36:38 crc kubenswrapper[4971]: I1127 08:36:38.227444 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6ec6f764170cf4f5d860d70e25c7c06dc87209ec246c4ed736c4262bb4f7a19e" Nov 27 08:36:38 crc kubenswrapper[4971]: I1127 08:36:38.227500 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Nov 27 08:36:38 crc kubenswrapper[4971]: I1127 08:36:38.565432 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41f4cbc9-1f79-4cf8-928f-782afdc845ae" path="/var/lib/kubelet/pods/41f4cbc9-1f79-4cf8-928f-782afdc845ae/volumes" Nov 27 08:36:38 crc kubenswrapper[4971]: I1127 08:36:38.727918 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-7-default"] Nov 27 08:36:39 crc kubenswrapper[4971]: I1127 08:36:39.098308 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-cg8rl"] Nov 27 08:36:39 crc kubenswrapper[4971]: I1127 08:36:39.100599 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cg8rl" Nov 27 08:36:39 crc kubenswrapper[4971]: I1127 08:36:39.110461 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cg8rl"] Nov 27 08:36:39 crc kubenswrapper[4971]: I1127 08:36:39.135270 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2qctb\" (UniqueName: \"kubernetes.io/projected/6c989465-d4ef-4c07-8ba1-848eacc54325-kube-api-access-2qctb\") pod \"community-operators-cg8rl\" (UID: \"6c989465-d4ef-4c07-8ba1-848eacc54325\") " pod="openshift-marketplace/community-operators-cg8rl" Nov 27 08:36:39 crc kubenswrapper[4971]: I1127 08:36:39.135524 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c989465-d4ef-4c07-8ba1-848eacc54325-utilities\") pod \"community-operators-cg8rl\" (UID: \"6c989465-d4ef-4c07-8ba1-848eacc54325\") " pod="openshift-marketplace/community-operators-cg8rl" Nov 27 08:36:39 crc kubenswrapper[4971]: I1127 08:36:39.135922 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c989465-d4ef-4c07-8ba1-848eacc54325-catalog-content\") pod \"community-operators-cg8rl\" (UID: \"6c989465-d4ef-4c07-8ba1-848eacc54325\") " pod="openshift-marketplace/community-operators-cg8rl" Nov 27 08:36:39 crc kubenswrapper[4971]: I1127 08:36:39.238247 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c989465-d4ef-4c07-8ba1-848eacc54325-catalog-content\") pod \"community-operators-cg8rl\" (UID: \"6c989465-d4ef-4c07-8ba1-848eacc54325\") " pod="openshift-marketplace/community-operators-cg8rl" Nov 27 08:36:39 crc kubenswrapper[4971]: I1127 08:36:39.238364 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2qctb\" (UniqueName: \"kubernetes.io/projected/6c989465-d4ef-4c07-8ba1-848eacc54325-kube-api-access-2qctb\") pod \"community-operators-cg8rl\" (UID: \"6c989465-d4ef-4c07-8ba1-848eacc54325\") " pod="openshift-marketplace/community-operators-cg8rl" Nov 27 08:36:39 crc kubenswrapper[4971]: I1127 08:36:39.238446 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c989465-d4ef-4c07-8ba1-848eacc54325-utilities\") pod \"community-operators-cg8rl\" (UID: \"6c989465-d4ef-4c07-8ba1-848eacc54325\") " pod="openshift-marketplace/community-operators-cg8rl" Nov 27 08:36:39 crc kubenswrapper[4971]: I1127 08:36:39.239013 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c989465-d4ef-4c07-8ba1-848eacc54325-catalog-content\") pod \"community-operators-cg8rl\" (UID: \"6c989465-d4ef-4c07-8ba1-848eacc54325\") " pod="openshift-marketplace/community-operators-cg8rl" Nov 27 08:36:39 crc kubenswrapper[4971]: I1127 08:36:39.239316 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c989465-d4ef-4c07-8ba1-848eacc54325-utilities\") pod \"community-operators-cg8rl\" (UID: \"6c989465-d4ef-4c07-8ba1-848eacc54325\") " pod="openshift-marketplace/community-operators-cg8rl" Nov 27 08:36:39 crc kubenswrapper[4971]: I1127 08:36:39.240994 4971 generic.go:334] "Generic (PLEG): container finished" podID="beb4834a-4e34-4390-b423-e66008166966" containerID="f5a759743f27049bf9fd9203918bc41182ad147913669b019e24902a82332018" exitCode=0 Nov 27 08:36:39 crc kubenswrapper[4971]: I1127 08:36:39.241052 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-7-default" event={"ID":"beb4834a-4e34-4390-b423-e66008166966","Type":"ContainerDied","Data":"f5a759743f27049bf9fd9203918bc41182ad147913669b019e24902a82332018"} Nov 27 08:36:39 crc kubenswrapper[4971]: I1127 08:36:39.241108 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-7-default" event={"ID":"beb4834a-4e34-4390-b423-e66008166966","Type":"ContainerStarted","Data":"4e2f2995b6dec98cd4e793a68320fb8fbcfc8b22052d388488bfa9789d5fda91"} Nov 27 08:36:39 crc kubenswrapper[4971]: I1127 08:36:39.267868 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2qctb\" (UniqueName: \"kubernetes.io/projected/6c989465-d4ef-4c07-8ba1-848eacc54325-kube-api-access-2qctb\") pod \"community-operators-cg8rl\" (UID: \"6c989465-d4ef-4c07-8ba1-848eacc54325\") " pod="openshift-marketplace/community-operators-cg8rl" Nov 27 08:36:39 crc kubenswrapper[4971]: I1127 08:36:39.434563 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cg8rl" Nov 27 08:36:39 crc kubenswrapper[4971]: I1127 08:36:39.963065 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cg8rl"] Nov 27 08:36:39 crc kubenswrapper[4971]: W1127 08:36:39.967073 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6c989465_d4ef_4c07_8ba1_848eacc54325.slice/crio-d84cd90aa8d46047cf9d617fb360dba47511a5c952530dd2619b1298f0527aeb WatchSource:0}: Error finding container d84cd90aa8d46047cf9d617fb360dba47511a5c952530dd2619b1298f0527aeb: Status 404 returned error can't find the container with id d84cd90aa8d46047cf9d617fb360dba47511a5c952530dd2619b1298f0527aeb Nov 27 08:36:40 crc kubenswrapper[4971]: I1127 08:36:40.251156 4971 generic.go:334] "Generic (PLEG): container finished" podID="6c989465-d4ef-4c07-8ba1-848eacc54325" containerID="7f39a207494b3ebc150336843c7731d44b5689fa97116677ce595ce4ff4fea7a" exitCode=0 Nov 27 08:36:40 crc kubenswrapper[4971]: I1127 08:36:40.251259 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cg8rl" event={"ID":"6c989465-d4ef-4c07-8ba1-848eacc54325","Type":"ContainerDied","Data":"7f39a207494b3ebc150336843c7731d44b5689fa97116677ce595ce4ff4fea7a"} Nov 27 08:36:40 crc kubenswrapper[4971]: I1127 08:36:40.251319 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cg8rl" event={"ID":"6c989465-d4ef-4c07-8ba1-848eacc54325","Type":"ContainerStarted","Data":"d84cd90aa8d46047cf9d617fb360dba47511a5c952530dd2619b1298f0527aeb"} Nov 27 08:36:40 crc kubenswrapper[4971]: I1127 08:36:40.253106 4971 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 27 08:36:40 crc kubenswrapper[4971]: I1127 08:36:40.607362 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Nov 27 08:36:40 crc kubenswrapper[4971]: I1127 08:36:40.631603 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-7-default_beb4834a-4e34-4390-b423-e66008166966/mariadb-client-7-default/0.log" Nov 27 08:36:40 crc kubenswrapper[4971]: I1127 08:36:40.658427 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pqsk9\" (UniqueName: \"kubernetes.io/projected/beb4834a-4e34-4390-b423-e66008166966-kube-api-access-pqsk9\") pod \"beb4834a-4e34-4390-b423-e66008166966\" (UID: \"beb4834a-4e34-4390-b423-e66008166966\") " Nov 27 08:36:40 crc kubenswrapper[4971]: I1127 08:36:40.662725 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-7-default"] Nov 27 08:36:40 crc kubenswrapper[4971]: I1127 08:36:40.667624 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/beb4834a-4e34-4390-b423-e66008166966-kube-api-access-pqsk9" (OuterVolumeSpecName: "kube-api-access-pqsk9") pod "beb4834a-4e34-4390-b423-e66008166966" (UID: "beb4834a-4e34-4390-b423-e66008166966"). InnerVolumeSpecName "kube-api-access-pqsk9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:36:40 crc kubenswrapper[4971]: I1127 08:36:40.684086 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-7-default"] Nov 27 08:36:40 crc kubenswrapper[4971]: I1127 08:36:40.760582 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pqsk9\" (UniqueName: \"kubernetes.io/projected/beb4834a-4e34-4390-b423-e66008166966-kube-api-access-pqsk9\") on node \"crc\" DevicePath \"\"" Nov 27 08:36:40 crc kubenswrapper[4971]: I1127 08:36:40.797002 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-2"] Nov 27 08:36:40 crc kubenswrapper[4971]: E1127 08:36:40.797702 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="beb4834a-4e34-4390-b423-e66008166966" containerName="mariadb-client-7-default" Nov 27 08:36:40 crc kubenswrapper[4971]: I1127 08:36:40.797731 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="beb4834a-4e34-4390-b423-e66008166966" containerName="mariadb-client-7-default" Nov 27 08:36:40 crc kubenswrapper[4971]: I1127 08:36:40.797943 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="beb4834a-4e34-4390-b423-e66008166966" containerName="mariadb-client-7-default" Nov 27 08:36:40 crc kubenswrapper[4971]: I1127 08:36:40.798652 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Nov 27 08:36:40 crc kubenswrapper[4971]: I1127 08:36:40.802601 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2"] Nov 27 08:36:40 crc kubenswrapper[4971]: I1127 08:36:40.862408 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bcrzv\" (UniqueName: \"kubernetes.io/projected/71db05d1-9a93-48ac-94c9-c24171106cf0-kube-api-access-bcrzv\") pod \"mariadb-client-2\" (UID: \"71db05d1-9a93-48ac-94c9-c24171106cf0\") " pod="openstack/mariadb-client-2" Nov 27 08:36:40 crc kubenswrapper[4971]: I1127 08:36:40.964151 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bcrzv\" (UniqueName: \"kubernetes.io/projected/71db05d1-9a93-48ac-94c9-c24171106cf0-kube-api-access-bcrzv\") pod \"mariadb-client-2\" (UID: \"71db05d1-9a93-48ac-94c9-c24171106cf0\") " pod="openstack/mariadb-client-2" Nov 27 08:36:40 crc kubenswrapper[4971]: I1127 08:36:40.986988 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bcrzv\" (UniqueName: \"kubernetes.io/projected/71db05d1-9a93-48ac-94c9-c24171106cf0-kube-api-access-bcrzv\") pod \"mariadb-client-2\" (UID: \"71db05d1-9a93-48ac-94c9-c24171106cf0\") " pod="openstack/mariadb-client-2" Nov 27 08:36:41 crc kubenswrapper[4971]: I1127 08:36:41.127107 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Nov 27 08:36:41 crc kubenswrapper[4971]: I1127 08:36:41.293706 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4e2f2995b6dec98cd4e793a68320fb8fbcfc8b22052d388488bfa9789d5fda91" Nov 27 08:36:41 crc kubenswrapper[4971]: I1127 08:36:41.294143 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Nov 27 08:36:41 crc kubenswrapper[4971]: I1127 08:36:41.691183 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2"] Nov 27 08:36:41 crc kubenswrapper[4971]: W1127 08:36:41.700244 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod71db05d1_9a93_48ac_94c9_c24171106cf0.slice/crio-7331bc37d84cedec16989c2d58fc3dde393a01d014996a09b7b9b5bf87502974 WatchSource:0}: Error finding container 7331bc37d84cedec16989c2d58fc3dde393a01d014996a09b7b9b5bf87502974: Status 404 returned error can't find the container with id 7331bc37d84cedec16989c2d58fc3dde393a01d014996a09b7b9b5bf87502974 Nov 27 08:36:42 crc kubenswrapper[4971]: I1127 08:36:42.034275 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-st8kl" Nov 27 08:36:42 crc kubenswrapper[4971]: I1127 08:36:42.034371 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-st8kl" Nov 27 08:36:42 crc kubenswrapper[4971]: I1127 08:36:42.113772 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-st8kl" Nov 27 08:36:42 crc kubenswrapper[4971]: I1127 08:36:42.304356 4971 generic.go:334] "Generic (PLEG): container finished" podID="6c989465-d4ef-4c07-8ba1-848eacc54325" containerID="fc9928381fe779f31edfef0c44c517d48f10ec00b29e3191e72c825724eb9849" exitCode=0 Nov 27 08:36:42 crc kubenswrapper[4971]: I1127 08:36:42.304464 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cg8rl" event={"ID":"6c989465-d4ef-4c07-8ba1-848eacc54325","Type":"ContainerDied","Data":"fc9928381fe779f31edfef0c44c517d48f10ec00b29e3191e72c825724eb9849"} Nov 27 08:36:42 crc kubenswrapper[4971]: I1127 08:36:42.308097 4971 generic.go:334] "Generic (PLEG): container finished" podID="71db05d1-9a93-48ac-94c9-c24171106cf0" containerID="02dc887e019a5735b4e12e89e83671965d6c7d82d5dc9f8a1c1f67c0917127da" exitCode=0 Nov 27 08:36:42 crc kubenswrapper[4971]: I1127 08:36:42.308164 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2" event={"ID":"71db05d1-9a93-48ac-94c9-c24171106cf0","Type":"ContainerDied","Data":"02dc887e019a5735b4e12e89e83671965d6c7d82d5dc9f8a1c1f67c0917127da"} Nov 27 08:36:42 crc kubenswrapper[4971]: I1127 08:36:42.308209 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2" event={"ID":"71db05d1-9a93-48ac-94c9-c24171106cf0","Type":"ContainerStarted","Data":"7331bc37d84cedec16989c2d58fc3dde393a01d014996a09b7b9b5bf87502974"} Nov 27 08:36:42 crc kubenswrapper[4971]: I1127 08:36:42.366949 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-st8kl" Nov 27 08:36:42 crc kubenswrapper[4971]: I1127 08:36:42.563187 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="beb4834a-4e34-4390-b423-e66008166966" path="/var/lib/kubelet/pods/beb4834a-4e34-4390-b423-e66008166966/volumes" Nov 27 08:36:43 crc kubenswrapper[4971]: I1127 08:36:43.320117 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cg8rl" event={"ID":"6c989465-d4ef-4c07-8ba1-848eacc54325","Type":"ContainerStarted","Data":"d6c31e727ffd3293ed434a4de9107ce00fe2c55ce91731d774606965cf93dfab"} Nov 27 08:36:43 crc kubenswrapper[4971]: I1127 08:36:43.350562 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-cg8rl" podStartSLOduration=1.687693694 podStartE2EDuration="4.350509644s" podCreationTimestamp="2025-11-27 08:36:39 +0000 UTC" firstStartedPulling="2025-11-27 08:36:40.252830848 +0000 UTC m=+6238.444874766" lastFinishedPulling="2025-11-27 08:36:42.915646798 +0000 UTC m=+6241.107690716" observedRunningTime="2025-11-27 08:36:43.343938089 +0000 UTC m=+6241.535982027" watchObservedRunningTime="2025-11-27 08:36:43.350509644 +0000 UTC m=+6241.542553562" Nov 27 08:36:43 crc kubenswrapper[4971]: I1127 08:36:43.710165 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Nov 27 08:36:43 crc kubenswrapper[4971]: I1127 08:36:43.732935 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-2_71db05d1-9a93-48ac-94c9-c24171106cf0/mariadb-client-2/0.log" Nov 27 08:36:43 crc kubenswrapper[4971]: I1127 08:36:43.763076 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-2"] Nov 27 08:36:43 crc kubenswrapper[4971]: I1127 08:36:43.769744 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-2"] Nov 27 08:36:43 crc kubenswrapper[4971]: I1127 08:36:43.829393 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bcrzv\" (UniqueName: \"kubernetes.io/projected/71db05d1-9a93-48ac-94c9-c24171106cf0-kube-api-access-bcrzv\") pod \"71db05d1-9a93-48ac-94c9-c24171106cf0\" (UID: \"71db05d1-9a93-48ac-94c9-c24171106cf0\") " Nov 27 08:36:43 crc kubenswrapper[4971]: I1127 08:36:43.837203 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71db05d1-9a93-48ac-94c9-c24171106cf0-kube-api-access-bcrzv" (OuterVolumeSpecName: "kube-api-access-bcrzv") pod "71db05d1-9a93-48ac-94c9-c24171106cf0" (UID: "71db05d1-9a93-48ac-94c9-c24171106cf0"). InnerVolumeSpecName "kube-api-access-bcrzv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:36:43 crc kubenswrapper[4971]: I1127 08:36:43.931669 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bcrzv\" (UniqueName: \"kubernetes.io/projected/71db05d1-9a93-48ac-94c9-c24171106cf0-kube-api-access-bcrzv\") on node \"crc\" DevicePath \"\"" Nov 27 08:36:44 crc kubenswrapper[4971]: I1127 08:36:44.035252 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-st8kl"] Nov 27 08:36:44 crc kubenswrapper[4971]: I1127 08:36:44.331723 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7331bc37d84cedec16989c2d58fc3dde393a01d014996a09b7b9b5bf87502974" Nov 27 08:36:44 crc kubenswrapper[4971]: I1127 08:36:44.331874 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Nov 27 08:36:44 crc kubenswrapper[4971]: I1127 08:36:44.331951 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-st8kl" podUID="2e1f60f0-f534-46e7-a463-648084a52735" containerName="registry-server" containerID="cri-o://c2cc8ffcb103870387ddf873918672007129d6922b69c9b5e247f6dd6ad38974" gracePeriod=2 Nov 27 08:36:44 crc kubenswrapper[4971]: I1127 08:36:44.581072 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71db05d1-9a93-48ac-94c9-c24171106cf0" path="/var/lib/kubelet/pods/71db05d1-9a93-48ac-94c9-c24171106cf0/volumes" Nov 27 08:36:44 crc kubenswrapper[4971]: I1127 08:36:44.723699 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-st8kl" Nov 27 08:36:44 crc kubenswrapper[4971]: I1127 08:36:44.749634 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e1f60f0-f534-46e7-a463-648084a52735-catalog-content\") pod \"2e1f60f0-f534-46e7-a463-648084a52735\" (UID: \"2e1f60f0-f534-46e7-a463-648084a52735\") " Nov 27 08:36:44 crc kubenswrapper[4971]: I1127 08:36:44.749738 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7kzpn\" (UniqueName: \"kubernetes.io/projected/2e1f60f0-f534-46e7-a463-648084a52735-kube-api-access-7kzpn\") pod \"2e1f60f0-f534-46e7-a463-648084a52735\" (UID: \"2e1f60f0-f534-46e7-a463-648084a52735\") " Nov 27 08:36:44 crc kubenswrapper[4971]: I1127 08:36:44.749871 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e1f60f0-f534-46e7-a463-648084a52735-utilities\") pod \"2e1f60f0-f534-46e7-a463-648084a52735\" (UID: \"2e1f60f0-f534-46e7-a463-648084a52735\") " Nov 27 08:36:44 crc kubenswrapper[4971]: I1127 08:36:44.751593 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2e1f60f0-f534-46e7-a463-648084a52735-utilities" (OuterVolumeSpecName: "utilities") pod "2e1f60f0-f534-46e7-a463-648084a52735" (UID: "2e1f60f0-f534-46e7-a463-648084a52735"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:36:44 crc kubenswrapper[4971]: I1127 08:36:44.754351 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e1f60f0-f534-46e7-a463-648084a52735-kube-api-access-7kzpn" (OuterVolumeSpecName: "kube-api-access-7kzpn") pod "2e1f60f0-f534-46e7-a463-648084a52735" (UID: "2e1f60f0-f534-46e7-a463-648084a52735"). InnerVolumeSpecName "kube-api-access-7kzpn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:36:44 crc kubenswrapper[4971]: I1127 08:36:44.779150 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2e1f60f0-f534-46e7-a463-648084a52735-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2e1f60f0-f534-46e7-a463-648084a52735" (UID: "2e1f60f0-f534-46e7-a463-648084a52735"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:36:44 crc kubenswrapper[4971]: I1127 08:36:44.851838 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e1f60f0-f534-46e7-a463-648084a52735-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 08:36:44 crc kubenswrapper[4971]: I1127 08:36:44.851891 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7kzpn\" (UniqueName: \"kubernetes.io/projected/2e1f60f0-f534-46e7-a463-648084a52735-kube-api-access-7kzpn\") on node \"crc\" DevicePath \"\"" Nov 27 08:36:44 crc kubenswrapper[4971]: I1127 08:36:44.851906 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e1f60f0-f534-46e7-a463-648084a52735-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 08:36:45 crc kubenswrapper[4971]: I1127 08:36:45.346631 4971 generic.go:334] "Generic (PLEG): container finished" podID="2e1f60f0-f534-46e7-a463-648084a52735" containerID="c2cc8ffcb103870387ddf873918672007129d6922b69c9b5e247f6dd6ad38974" exitCode=0 Nov 27 08:36:45 crc kubenswrapper[4971]: I1127 08:36:45.346695 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-st8kl" event={"ID":"2e1f60f0-f534-46e7-a463-648084a52735","Type":"ContainerDied","Data":"c2cc8ffcb103870387ddf873918672007129d6922b69c9b5e247f6dd6ad38974"} Nov 27 08:36:45 crc kubenswrapper[4971]: I1127 08:36:45.346735 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-st8kl" event={"ID":"2e1f60f0-f534-46e7-a463-648084a52735","Type":"ContainerDied","Data":"c4f1a333b77b2b3a74d3cf72d7a866433e00cacaa2527b4e49390f70bfcde7fa"} Nov 27 08:36:45 crc kubenswrapper[4971]: I1127 08:36:45.346760 4971 scope.go:117] "RemoveContainer" containerID="c2cc8ffcb103870387ddf873918672007129d6922b69c9b5e247f6dd6ad38974" Nov 27 08:36:45 crc kubenswrapper[4971]: I1127 08:36:45.346773 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-st8kl" Nov 27 08:36:45 crc kubenswrapper[4971]: I1127 08:36:45.371705 4971 scope.go:117] "RemoveContainer" containerID="757e4f6c5d31f9fdb4b1fdae4d606fffff3f6c3d8bfa656fb96e03bb2cd4fbac" Nov 27 08:36:45 crc kubenswrapper[4971]: I1127 08:36:45.389648 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-st8kl"] Nov 27 08:36:45 crc kubenswrapper[4971]: I1127 08:36:45.400044 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-st8kl"] Nov 27 08:36:45 crc kubenswrapper[4971]: I1127 08:36:45.428923 4971 scope.go:117] "RemoveContainer" containerID="3e5cfae167062f384499315c7a0e2096ff6bad9bc46af29437b142b102138aff" Nov 27 08:36:45 crc kubenswrapper[4971]: I1127 08:36:45.453087 4971 scope.go:117] "RemoveContainer" containerID="c2cc8ffcb103870387ddf873918672007129d6922b69c9b5e247f6dd6ad38974" Nov 27 08:36:45 crc kubenswrapper[4971]: E1127 08:36:45.453736 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2cc8ffcb103870387ddf873918672007129d6922b69c9b5e247f6dd6ad38974\": container with ID starting with c2cc8ffcb103870387ddf873918672007129d6922b69c9b5e247f6dd6ad38974 not found: ID does not exist" containerID="c2cc8ffcb103870387ddf873918672007129d6922b69c9b5e247f6dd6ad38974" Nov 27 08:36:45 crc kubenswrapper[4971]: I1127 08:36:45.453815 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2cc8ffcb103870387ddf873918672007129d6922b69c9b5e247f6dd6ad38974"} err="failed to get container status \"c2cc8ffcb103870387ddf873918672007129d6922b69c9b5e247f6dd6ad38974\": rpc error: code = NotFound desc = could not find container \"c2cc8ffcb103870387ddf873918672007129d6922b69c9b5e247f6dd6ad38974\": container with ID starting with c2cc8ffcb103870387ddf873918672007129d6922b69c9b5e247f6dd6ad38974 not found: ID does not exist" Nov 27 08:36:45 crc kubenswrapper[4971]: I1127 08:36:45.453851 4971 scope.go:117] "RemoveContainer" containerID="757e4f6c5d31f9fdb4b1fdae4d606fffff3f6c3d8bfa656fb96e03bb2cd4fbac" Nov 27 08:36:45 crc kubenswrapper[4971]: E1127 08:36:45.454262 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"757e4f6c5d31f9fdb4b1fdae4d606fffff3f6c3d8bfa656fb96e03bb2cd4fbac\": container with ID starting with 757e4f6c5d31f9fdb4b1fdae4d606fffff3f6c3d8bfa656fb96e03bb2cd4fbac not found: ID does not exist" containerID="757e4f6c5d31f9fdb4b1fdae4d606fffff3f6c3d8bfa656fb96e03bb2cd4fbac" Nov 27 08:36:45 crc kubenswrapper[4971]: I1127 08:36:45.454289 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"757e4f6c5d31f9fdb4b1fdae4d606fffff3f6c3d8bfa656fb96e03bb2cd4fbac"} err="failed to get container status \"757e4f6c5d31f9fdb4b1fdae4d606fffff3f6c3d8bfa656fb96e03bb2cd4fbac\": rpc error: code = NotFound desc = could not find container \"757e4f6c5d31f9fdb4b1fdae4d606fffff3f6c3d8bfa656fb96e03bb2cd4fbac\": container with ID starting with 757e4f6c5d31f9fdb4b1fdae4d606fffff3f6c3d8bfa656fb96e03bb2cd4fbac not found: ID does not exist" Nov 27 08:36:45 crc kubenswrapper[4971]: I1127 08:36:45.454305 4971 scope.go:117] "RemoveContainer" containerID="3e5cfae167062f384499315c7a0e2096ff6bad9bc46af29437b142b102138aff" Nov 27 08:36:45 crc kubenswrapper[4971]: E1127 08:36:45.454565 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e5cfae167062f384499315c7a0e2096ff6bad9bc46af29437b142b102138aff\": container with ID starting with 3e5cfae167062f384499315c7a0e2096ff6bad9bc46af29437b142b102138aff not found: ID does not exist" containerID="3e5cfae167062f384499315c7a0e2096ff6bad9bc46af29437b142b102138aff" Nov 27 08:36:45 crc kubenswrapper[4971]: I1127 08:36:45.454600 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e5cfae167062f384499315c7a0e2096ff6bad9bc46af29437b142b102138aff"} err="failed to get container status \"3e5cfae167062f384499315c7a0e2096ff6bad9bc46af29437b142b102138aff\": rpc error: code = NotFound desc = could not find container \"3e5cfae167062f384499315c7a0e2096ff6bad9bc46af29437b142b102138aff\": container with ID starting with 3e5cfae167062f384499315c7a0e2096ff6bad9bc46af29437b142b102138aff not found: ID does not exist" Nov 27 08:36:46 crc kubenswrapper[4971]: I1127 08:36:46.566306 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e1f60f0-f534-46e7-a463-648084a52735" path="/var/lib/kubelet/pods/2e1f60f0-f534-46e7-a463-648084a52735/volumes" Nov 27 08:36:49 crc kubenswrapper[4971]: I1127 08:36:49.310199 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-6xh4r"] Nov 27 08:36:49 crc kubenswrapper[4971]: E1127 08:36:49.311057 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e1f60f0-f534-46e7-a463-648084a52735" containerName="registry-server" Nov 27 08:36:49 crc kubenswrapper[4971]: I1127 08:36:49.311078 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e1f60f0-f534-46e7-a463-648084a52735" containerName="registry-server" Nov 27 08:36:49 crc kubenswrapper[4971]: E1127 08:36:49.311109 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e1f60f0-f534-46e7-a463-648084a52735" containerName="extract-content" Nov 27 08:36:49 crc kubenswrapper[4971]: I1127 08:36:49.311117 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e1f60f0-f534-46e7-a463-648084a52735" containerName="extract-content" Nov 27 08:36:49 crc kubenswrapper[4971]: E1127 08:36:49.311132 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e1f60f0-f534-46e7-a463-648084a52735" containerName="extract-utilities" Nov 27 08:36:49 crc kubenswrapper[4971]: I1127 08:36:49.311141 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e1f60f0-f534-46e7-a463-648084a52735" containerName="extract-utilities" Nov 27 08:36:49 crc kubenswrapper[4971]: E1127 08:36:49.311166 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71db05d1-9a93-48ac-94c9-c24171106cf0" containerName="mariadb-client-2" Nov 27 08:36:49 crc kubenswrapper[4971]: I1127 08:36:49.311174 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="71db05d1-9a93-48ac-94c9-c24171106cf0" containerName="mariadb-client-2" Nov 27 08:36:49 crc kubenswrapper[4971]: I1127 08:36:49.311376 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e1f60f0-f534-46e7-a463-648084a52735" containerName="registry-server" Nov 27 08:36:49 crc kubenswrapper[4971]: I1127 08:36:49.311393 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="71db05d1-9a93-48ac-94c9-c24171106cf0" containerName="mariadb-client-2" Nov 27 08:36:49 crc kubenswrapper[4971]: I1127 08:36:49.313144 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6xh4r" Nov 27 08:36:49 crc kubenswrapper[4971]: I1127 08:36:49.324455 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6xh4r"] Nov 27 08:36:49 crc kubenswrapper[4971]: I1127 08:36:49.435439 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-cg8rl" Nov 27 08:36:49 crc kubenswrapper[4971]: I1127 08:36:49.435906 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-cg8rl" Nov 27 08:36:49 crc kubenswrapper[4971]: I1127 08:36:49.437979 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00-catalog-content\") pod \"certified-operators-6xh4r\" (UID: \"c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00\") " pod="openshift-marketplace/certified-operators-6xh4r" Nov 27 08:36:49 crc kubenswrapper[4971]: I1127 08:36:49.438112 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cjccj\" (UniqueName: \"kubernetes.io/projected/c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00-kube-api-access-cjccj\") pod \"certified-operators-6xh4r\" (UID: \"c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00\") " pod="openshift-marketplace/certified-operators-6xh4r" Nov 27 08:36:49 crc kubenswrapper[4971]: I1127 08:36:49.438675 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00-utilities\") pod \"certified-operators-6xh4r\" (UID: \"c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00\") " pod="openshift-marketplace/certified-operators-6xh4r" Nov 27 08:36:49 crc kubenswrapper[4971]: I1127 08:36:49.505522 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-cg8rl" Nov 27 08:36:49 crc kubenswrapper[4971]: I1127 08:36:49.540216 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00-utilities\") pod \"certified-operators-6xh4r\" (UID: \"c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00\") " pod="openshift-marketplace/certified-operators-6xh4r" Nov 27 08:36:49 crc kubenswrapper[4971]: I1127 08:36:49.540281 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00-catalog-content\") pod \"certified-operators-6xh4r\" (UID: \"c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00\") " pod="openshift-marketplace/certified-operators-6xh4r" Nov 27 08:36:49 crc kubenswrapper[4971]: I1127 08:36:49.540352 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cjccj\" (UniqueName: \"kubernetes.io/projected/c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00-kube-api-access-cjccj\") pod \"certified-operators-6xh4r\" (UID: \"c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00\") " pod="openshift-marketplace/certified-operators-6xh4r" Nov 27 08:36:49 crc kubenswrapper[4971]: I1127 08:36:49.541252 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00-catalog-content\") pod \"certified-operators-6xh4r\" (UID: \"c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00\") " pod="openshift-marketplace/certified-operators-6xh4r" Nov 27 08:36:49 crc kubenswrapper[4971]: I1127 08:36:49.541285 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00-utilities\") pod \"certified-operators-6xh4r\" (UID: \"c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00\") " pod="openshift-marketplace/certified-operators-6xh4r" Nov 27 08:36:49 crc kubenswrapper[4971]: I1127 08:36:49.585766 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cjccj\" (UniqueName: \"kubernetes.io/projected/c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00-kube-api-access-cjccj\") pod \"certified-operators-6xh4r\" (UID: \"c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00\") " pod="openshift-marketplace/certified-operators-6xh4r" Nov 27 08:36:49 crc kubenswrapper[4971]: I1127 08:36:49.650737 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6xh4r" Nov 27 08:36:50 crc kubenswrapper[4971]: I1127 08:36:50.192616 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6xh4r"] Nov 27 08:36:50 crc kubenswrapper[4971]: I1127 08:36:50.401999 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6xh4r" event={"ID":"c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00","Type":"ContainerStarted","Data":"c1e0920bb83972975c70f92520aac2ebedab65d8d78d8ef1bd9c25909cfd47ba"} Nov 27 08:36:50 crc kubenswrapper[4971]: I1127 08:36:50.457724 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-cg8rl" Nov 27 08:36:51 crc kubenswrapper[4971]: I1127 08:36:51.417003 4971 generic.go:334] "Generic (PLEG): container finished" podID="c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00" containerID="efb2163926490635592cb9cd344c7fa4fe4bc259d911fd29164e1cc4976a40ed" exitCode=0 Nov 27 08:36:51 crc kubenswrapper[4971]: I1127 08:36:51.417056 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6xh4r" event={"ID":"c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00","Type":"ContainerDied","Data":"efb2163926490635592cb9cd344c7fa4fe4bc259d911fd29164e1cc4976a40ed"} Nov 27 08:36:51 crc kubenswrapper[4971]: I1127 08:36:51.880810 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cg8rl"] Nov 27 08:36:52 crc kubenswrapper[4971]: I1127 08:36:52.429298 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6xh4r" event={"ID":"c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00","Type":"ContainerStarted","Data":"f7781e9907ed3c9409be0fe55f1a677846b8bfd0941b323276bfd8e0b121ddd3"} Nov 27 08:36:53 crc kubenswrapper[4971]: I1127 08:36:53.442546 4971 generic.go:334] "Generic (PLEG): container finished" podID="c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00" containerID="f7781e9907ed3c9409be0fe55f1a677846b8bfd0941b323276bfd8e0b121ddd3" exitCode=0 Nov 27 08:36:53 crc kubenswrapper[4971]: I1127 08:36:53.443385 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-cg8rl" podUID="6c989465-d4ef-4c07-8ba1-848eacc54325" containerName="registry-server" containerID="cri-o://d6c31e727ffd3293ed434a4de9107ce00fe2c55ce91731d774606965cf93dfab" gracePeriod=2 Nov 27 08:36:53 crc kubenswrapper[4971]: I1127 08:36:53.443670 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6xh4r" event={"ID":"c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00","Type":"ContainerDied","Data":"f7781e9907ed3c9409be0fe55f1a677846b8bfd0941b323276bfd8e0b121ddd3"} Nov 27 08:36:53 crc kubenswrapper[4971]: I1127 08:36:53.926397 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cg8rl" Nov 27 08:36:54 crc kubenswrapper[4971]: I1127 08:36:54.040307 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2qctb\" (UniqueName: \"kubernetes.io/projected/6c989465-d4ef-4c07-8ba1-848eacc54325-kube-api-access-2qctb\") pod \"6c989465-d4ef-4c07-8ba1-848eacc54325\" (UID: \"6c989465-d4ef-4c07-8ba1-848eacc54325\") " Nov 27 08:36:54 crc kubenswrapper[4971]: I1127 08:36:54.040926 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c989465-d4ef-4c07-8ba1-848eacc54325-utilities\") pod \"6c989465-d4ef-4c07-8ba1-848eacc54325\" (UID: \"6c989465-d4ef-4c07-8ba1-848eacc54325\") " Nov 27 08:36:54 crc kubenswrapper[4971]: I1127 08:36:54.041076 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c989465-d4ef-4c07-8ba1-848eacc54325-catalog-content\") pod \"6c989465-d4ef-4c07-8ba1-848eacc54325\" (UID: \"6c989465-d4ef-4c07-8ba1-848eacc54325\") " Nov 27 08:36:54 crc kubenswrapper[4971]: I1127 08:36:54.042318 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c989465-d4ef-4c07-8ba1-848eacc54325-utilities" (OuterVolumeSpecName: "utilities") pod "6c989465-d4ef-4c07-8ba1-848eacc54325" (UID: "6c989465-d4ef-4c07-8ba1-848eacc54325"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:36:54 crc kubenswrapper[4971]: I1127 08:36:54.049767 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c989465-d4ef-4c07-8ba1-848eacc54325-kube-api-access-2qctb" (OuterVolumeSpecName: "kube-api-access-2qctb") pod "6c989465-d4ef-4c07-8ba1-848eacc54325" (UID: "6c989465-d4ef-4c07-8ba1-848eacc54325"). InnerVolumeSpecName "kube-api-access-2qctb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:36:54 crc kubenswrapper[4971]: I1127 08:36:54.105312 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c989465-d4ef-4c07-8ba1-848eacc54325-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6c989465-d4ef-4c07-8ba1-848eacc54325" (UID: "6c989465-d4ef-4c07-8ba1-848eacc54325"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:36:54 crc kubenswrapper[4971]: I1127 08:36:54.143701 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2qctb\" (UniqueName: \"kubernetes.io/projected/6c989465-d4ef-4c07-8ba1-848eacc54325-kube-api-access-2qctb\") on node \"crc\" DevicePath \"\"" Nov 27 08:36:54 crc kubenswrapper[4971]: I1127 08:36:54.143758 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c989465-d4ef-4c07-8ba1-848eacc54325-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 08:36:54 crc kubenswrapper[4971]: I1127 08:36:54.143774 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c989465-d4ef-4c07-8ba1-848eacc54325-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 08:36:54 crc kubenswrapper[4971]: I1127 08:36:54.457337 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6xh4r" event={"ID":"c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00","Type":"ContainerStarted","Data":"dc0f762c5f68b081681bcfc0ed83b3daf983f1b9e1a17adb932c07bdd0b459c9"} Nov 27 08:36:54 crc kubenswrapper[4971]: I1127 08:36:54.459695 4971 generic.go:334] "Generic (PLEG): container finished" podID="6c989465-d4ef-4c07-8ba1-848eacc54325" containerID="d6c31e727ffd3293ed434a4de9107ce00fe2c55ce91731d774606965cf93dfab" exitCode=0 Nov 27 08:36:54 crc kubenswrapper[4971]: I1127 08:36:54.459735 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cg8rl" event={"ID":"6c989465-d4ef-4c07-8ba1-848eacc54325","Type":"ContainerDied","Data":"d6c31e727ffd3293ed434a4de9107ce00fe2c55ce91731d774606965cf93dfab"} Nov 27 08:36:54 crc kubenswrapper[4971]: I1127 08:36:54.459788 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cg8rl" Nov 27 08:36:54 crc kubenswrapper[4971]: I1127 08:36:54.459806 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cg8rl" event={"ID":"6c989465-d4ef-4c07-8ba1-848eacc54325","Type":"ContainerDied","Data":"d84cd90aa8d46047cf9d617fb360dba47511a5c952530dd2619b1298f0527aeb"} Nov 27 08:36:54 crc kubenswrapper[4971]: I1127 08:36:54.459835 4971 scope.go:117] "RemoveContainer" containerID="d6c31e727ffd3293ed434a4de9107ce00fe2c55ce91731d774606965cf93dfab" Nov 27 08:36:54 crc kubenswrapper[4971]: I1127 08:36:54.477789 4971 scope.go:117] "RemoveContainer" containerID="fc9928381fe779f31edfef0c44c517d48f10ec00b29e3191e72c825724eb9849" Nov 27 08:36:54 crc kubenswrapper[4971]: I1127 08:36:54.494263 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-6xh4r" podStartSLOduration=3.00597835 podStartE2EDuration="5.494243474s" podCreationTimestamp="2025-11-27 08:36:49 +0000 UTC" firstStartedPulling="2025-11-27 08:36:51.419210525 +0000 UTC m=+6249.611254443" lastFinishedPulling="2025-11-27 08:36:53.907475649 +0000 UTC m=+6252.099519567" observedRunningTime="2025-11-27 08:36:54.486227419 +0000 UTC m=+6252.678271337" watchObservedRunningTime="2025-11-27 08:36:54.494243474 +0000 UTC m=+6252.686287392" Nov 27 08:36:54 crc kubenswrapper[4971]: I1127 08:36:54.510226 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cg8rl"] Nov 27 08:36:54 crc kubenswrapper[4971]: I1127 08:36:54.514946 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-cg8rl"] Nov 27 08:36:54 crc kubenswrapper[4971]: I1127 08:36:54.516347 4971 scope.go:117] "RemoveContainer" containerID="7f39a207494b3ebc150336843c7731d44b5689fa97116677ce595ce4ff4fea7a" Nov 27 08:36:54 crc kubenswrapper[4971]: I1127 08:36:54.537806 4971 scope.go:117] "RemoveContainer" containerID="d6c31e727ffd3293ed434a4de9107ce00fe2c55ce91731d774606965cf93dfab" Nov 27 08:36:54 crc kubenswrapper[4971]: E1127 08:36:54.538412 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d6c31e727ffd3293ed434a4de9107ce00fe2c55ce91731d774606965cf93dfab\": container with ID starting with d6c31e727ffd3293ed434a4de9107ce00fe2c55ce91731d774606965cf93dfab not found: ID does not exist" containerID="d6c31e727ffd3293ed434a4de9107ce00fe2c55ce91731d774606965cf93dfab" Nov 27 08:36:54 crc kubenswrapper[4971]: I1127 08:36:54.538457 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d6c31e727ffd3293ed434a4de9107ce00fe2c55ce91731d774606965cf93dfab"} err="failed to get container status \"d6c31e727ffd3293ed434a4de9107ce00fe2c55ce91731d774606965cf93dfab\": rpc error: code = NotFound desc = could not find container \"d6c31e727ffd3293ed434a4de9107ce00fe2c55ce91731d774606965cf93dfab\": container with ID starting with d6c31e727ffd3293ed434a4de9107ce00fe2c55ce91731d774606965cf93dfab not found: ID does not exist" Nov 27 08:36:54 crc kubenswrapper[4971]: I1127 08:36:54.538486 4971 scope.go:117] "RemoveContainer" containerID="fc9928381fe779f31edfef0c44c517d48f10ec00b29e3191e72c825724eb9849" Nov 27 08:36:54 crc kubenswrapper[4971]: E1127 08:36:54.538945 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc9928381fe779f31edfef0c44c517d48f10ec00b29e3191e72c825724eb9849\": container with ID starting with fc9928381fe779f31edfef0c44c517d48f10ec00b29e3191e72c825724eb9849 not found: ID does not exist" containerID="fc9928381fe779f31edfef0c44c517d48f10ec00b29e3191e72c825724eb9849" Nov 27 08:36:54 crc kubenswrapper[4971]: I1127 08:36:54.538971 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc9928381fe779f31edfef0c44c517d48f10ec00b29e3191e72c825724eb9849"} err="failed to get container status \"fc9928381fe779f31edfef0c44c517d48f10ec00b29e3191e72c825724eb9849\": rpc error: code = NotFound desc = could not find container \"fc9928381fe779f31edfef0c44c517d48f10ec00b29e3191e72c825724eb9849\": container with ID starting with fc9928381fe779f31edfef0c44c517d48f10ec00b29e3191e72c825724eb9849 not found: ID does not exist" Nov 27 08:36:54 crc kubenswrapper[4971]: I1127 08:36:54.538984 4971 scope.go:117] "RemoveContainer" containerID="7f39a207494b3ebc150336843c7731d44b5689fa97116677ce595ce4ff4fea7a" Nov 27 08:36:54 crc kubenswrapper[4971]: E1127 08:36:54.539219 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f39a207494b3ebc150336843c7731d44b5689fa97116677ce595ce4ff4fea7a\": container with ID starting with 7f39a207494b3ebc150336843c7731d44b5689fa97116677ce595ce4ff4fea7a not found: ID does not exist" containerID="7f39a207494b3ebc150336843c7731d44b5689fa97116677ce595ce4ff4fea7a" Nov 27 08:36:54 crc kubenswrapper[4971]: I1127 08:36:54.539241 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f39a207494b3ebc150336843c7731d44b5689fa97116677ce595ce4ff4fea7a"} err="failed to get container status \"7f39a207494b3ebc150336843c7731d44b5689fa97116677ce595ce4ff4fea7a\": rpc error: code = NotFound desc = could not find container \"7f39a207494b3ebc150336843c7731d44b5689fa97116677ce595ce4ff4fea7a\": container with ID starting with 7f39a207494b3ebc150336843c7731d44b5689fa97116677ce595ce4ff4fea7a not found: ID does not exist" Nov 27 08:36:54 crc kubenswrapper[4971]: I1127 08:36:54.572346 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c989465-d4ef-4c07-8ba1-848eacc54325" path="/var/lib/kubelet/pods/6c989465-d4ef-4c07-8ba1-848eacc54325/volumes" Nov 27 08:36:59 crc kubenswrapper[4971]: I1127 08:36:59.651928 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-6xh4r" Nov 27 08:36:59 crc kubenswrapper[4971]: I1127 08:36:59.652873 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-6xh4r" Nov 27 08:36:59 crc kubenswrapper[4971]: I1127 08:36:59.712580 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-6xh4r" Nov 27 08:37:00 crc kubenswrapper[4971]: I1127 08:37:00.563593 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-6xh4r" Nov 27 08:37:00 crc kubenswrapper[4971]: I1127 08:37:00.620187 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6xh4r"] Nov 27 08:37:02 crc kubenswrapper[4971]: I1127 08:37:02.536821 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-6xh4r" podUID="c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00" containerName="registry-server" containerID="cri-o://dc0f762c5f68b081681bcfc0ed83b3daf983f1b9e1a17adb932c07bdd0b459c9" gracePeriod=2 Nov 27 08:37:03 crc kubenswrapper[4971]: I1127 08:37:03.028099 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6xh4r" Nov 27 08:37:03 crc kubenswrapper[4971]: I1127 08:37:03.108633 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cjccj\" (UniqueName: \"kubernetes.io/projected/c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00-kube-api-access-cjccj\") pod \"c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00\" (UID: \"c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00\") " Nov 27 08:37:03 crc kubenswrapper[4971]: I1127 08:37:03.108777 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00-catalog-content\") pod \"c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00\" (UID: \"c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00\") " Nov 27 08:37:03 crc kubenswrapper[4971]: I1127 08:37:03.108931 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00-utilities\") pod \"c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00\" (UID: \"c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00\") " Nov 27 08:37:03 crc kubenswrapper[4971]: I1127 08:37:03.110220 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00-utilities" (OuterVolumeSpecName: "utilities") pod "c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00" (UID: "c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:37:03 crc kubenswrapper[4971]: I1127 08:37:03.117028 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00-kube-api-access-cjccj" (OuterVolumeSpecName: "kube-api-access-cjccj") pod "c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00" (UID: "c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00"). InnerVolumeSpecName "kube-api-access-cjccj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:37:03 crc kubenswrapper[4971]: I1127 08:37:03.210747 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 08:37:03 crc kubenswrapper[4971]: I1127 08:37:03.210787 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cjccj\" (UniqueName: \"kubernetes.io/projected/c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00-kube-api-access-cjccj\") on node \"crc\" DevicePath \"\"" Nov 27 08:37:03 crc kubenswrapper[4971]: I1127 08:37:03.229748 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00" (UID: "c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:37:03 crc kubenswrapper[4971]: I1127 08:37:03.312857 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 08:37:03 crc kubenswrapper[4971]: I1127 08:37:03.557184 4971 generic.go:334] "Generic (PLEG): container finished" podID="c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00" containerID="dc0f762c5f68b081681bcfc0ed83b3daf983f1b9e1a17adb932c07bdd0b459c9" exitCode=0 Nov 27 08:37:03 crc kubenswrapper[4971]: I1127 08:37:03.557350 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6xh4r" event={"ID":"c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00","Type":"ContainerDied","Data":"dc0f762c5f68b081681bcfc0ed83b3daf983f1b9e1a17adb932c07bdd0b459c9"} Nov 27 08:37:03 crc kubenswrapper[4971]: I1127 08:37:03.557402 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6xh4r" Nov 27 08:37:03 crc kubenswrapper[4971]: I1127 08:37:03.557775 4971 scope.go:117] "RemoveContainer" containerID="dc0f762c5f68b081681bcfc0ed83b3daf983f1b9e1a17adb932c07bdd0b459c9" Nov 27 08:37:03 crc kubenswrapper[4971]: I1127 08:37:03.557754 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6xh4r" event={"ID":"c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00","Type":"ContainerDied","Data":"c1e0920bb83972975c70f92520aac2ebedab65d8d78d8ef1bd9c25909cfd47ba"} Nov 27 08:37:03 crc kubenswrapper[4971]: I1127 08:37:03.580756 4971 scope.go:117] "RemoveContainer" containerID="f7781e9907ed3c9409be0fe55f1a677846b8bfd0941b323276bfd8e0b121ddd3" Nov 27 08:37:03 crc kubenswrapper[4971]: I1127 08:37:03.607856 4971 scope.go:117] "RemoveContainer" containerID="efb2163926490635592cb9cd344c7fa4fe4bc259d911fd29164e1cc4976a40ed" Nov 27 08:37:03 crc kubenswrapper[4971]: I1127 08:37:03.643072 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6xh4r"] Nov 27 08:37:03 crc kubenswrapper[4971]: I1127 08:37:03.648832 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-6xh4r"] Nov 27 08:37:03 crc kubenswrapper[4971]: I1127 08:37:03.651861 4971 scope.go:117] "RemoveContainer" containerID="dc0f762c5f68b081681bcfc0ed83b3daf983f1b9e1a17adb932c07bdd0b459c9" Nov 27 08:37:03 crc kubenswrapper[4971]: E1127 08:37:03.652511 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc0f762c5f68b081681bcfc0ed83b3daf983f1b9e1a17adb932c07bdd0b459c9\": container with ID starting with dc0f762c5f68b081681bcfc0ed83b3daf983f1b9e1a17adb932c07bdd0b459c9 not found: ID does not exist" containerID="dc0f762c5f68b081681bcfc0ed83b3daf983f1b9e1a17adb932c07bdd0b459c9" Nov 27 08:37:03 crc kubenswrapper[4971]: I1127 08:37:03.652567 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc0f762c5f68b081681bcfc0ed83b3daf983f1b9e1a17adb932c07bdd0b459c9"} err="failed to get container status \"dc0f762c5f68b081681bcfc0ed83b3daf983f1b9e1a17adb932c07bdd0b459c9\": rpc error: code = NotFound desc = could not find container \"dc0f762c5f68b081681bcfc0ed83b3daf983f1b9e1a17adb932c07bdd0b459c9\": container with ID starting with dc0f762c5f68b081681bcfc0ed83b3daf983f1b9e1a17adb932c07bdd0b459c9 not found: ID does not exist" Nov 27 08:37:03 crc kubenswrapper[4971]: I1127 08:37:03.652591 4971 scope.go:117] "RemoveContainer" containerID="f7781e9907ed3c9409be0fe55f1a677846b8bfd0941b323276bfd8e0b121ddd3" Nov 27 08:37:03 crc kubenswrapper[4971]: E1127 08:37:03.653001 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f7781e9907ed3c9409be0fe55f1a677846b8bfd0941b323276bfd8e0b121ddd3\": container with ID starting with f7781e9907ed3c9409be0fe55f1a677846b8bfd0941b323276bfd8e0b121ddd3 not found: ID does not exist" containerID="f7781e9907ed3c9409be0fe55f1a677846b8bfd0941b323276bfd8e0b121ddd3" Nov 27 08:37:03 crc kubenswrapper[4971]: I1127 08:37:03.653021 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7781e9907ed3c9409be0fe55f1a677846b8bfd0941b323276bfd8e0b121ddd3"} err="failed to get container status \"f7781e9907ed3c9409be0fe55f1a677846b8bfd0941b323276bfd8e0b121ddd3\": rpc error: code = NotFound desc = could not find container \"f7781e9907ed3c9409be0fe55f1a677846b8bfd0941b323276bfd8e0b121ddd3\": container with ID starting with f7781e9907ed3c9409be0fe55f1a677846b8bfd0941b323276bfd8e0b121ddd3 not found: ID does not exist" Nov 27 08:37:03 crc kubenswrapper[4971]: I1127 08:37:03.653033 4971 scope.go:117] "RemoveContainer" containerID="efb2163926490635592cb9cd344c7fa4fe4bc259d911fd29164e1cc4976a40ed" Nov 27 08:37:03 crc kubenswrapper[4971]: E1127 08:37:03.653333 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"efb2163926490635592cb9cd344c7fa4fe4bc259d911fd29164e1cc4976a40ed\": container with ID starting with efb2163926490635592cb9cd344c7fa4fe4bc259d911fd29164e1cc4976a40ed not found: ID does not exist" containerID="efb2163926490635592cb9cd344c7fa4fe4bc259d911fd29164e1cc4976a40ed" Nov 27 08:37:03 crc kubenswrapper[4971]: I1127 08:37:03.653352 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"efb2163926490635592cb9cd344c7fa4fe4bc259d911fd29164e1cc4976a40ed"} err="failed to get container status \"efb2163926490635592cb9cd344c7fa4fe4bc259d911fd29164e1cc4976a40ed\": rpc error: code = NotFound desc = could not find container \"efb2163926490635592cb9cd344c7fa4fe4bc259d911fd29164e1cc4976a40ed\": container with ID starting with efb2163926490635592cb9cd344c7fa4fe4bc259d911fd29164e1cc4976a40ed not found: ID does not exist" Nov 27 08:37:04 crc kubenswrapper[4971]: I1127 08:37:04.563841 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00" path="/var/lib/kubelet/pods/c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00/volumes" Nov 27 08:37:48 crc kubenswrapper[4971]: I1127 08:37:48.702652 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-mj822"] Nov 27 08:37:48 crc kubenswrapper[4971]: E1127 08:37:48.704392 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c989465-d4ef-4c07-8ba1-848eacc54325" containerName="extract-content" Nov 27 08:37:48 crc kubenswrapper[4971]: I1127 08:37:48.704417 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c989465-d4ef-4c07-8ba1-848eacc54325" containerName="extract-content" Nov 27 08:37:48 crc kubenswrapper[4971]: E1127 08:37:48.704442 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c989465-d4ef-4c07-8ba1-848eacc54325" containerName="extract-utilities" Nov 27 08:37:48 crc kubenswrapper[4971]: I1127 08:37:48.704455 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c989465-d4ef-4c07-8ba1-848eacc54325" containerName="extract-utilities" Nov 27 08:37:48 crc kubenswrapper[4971]: E1127 08:37:48.704488 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00" containerName="extract-content" Nov 27 08:37:48 crc kubenswrapper[4971]: I1127 08:37:48.704501 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00" containerName="extract-content" Nov 27 08:37:48 crc kubenswrapper[4971]: E1127 08:37:48.704516 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00" containerName="registry-server" Nov 27 08:37:48 crc kubenswrapper[4971]: I1127 08:37:48.704526 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00" containerName="registry-server" Nov 27 08:37:48 crc kubenswrapper[4971]: E1127 08:37:48.704573 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00" containerName="extract-utilities" Nov 27 08:37:48 crc kubenswrapper[4971]: I1127 08:37:48.704583 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00" containerName="extract-utilities" Nov 27 08:37:48 crc kubenswrapper[4971]: E1127 08:37:48.704671 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c989465-d4ef-4c07-8ba1-848eacc54325" containerName="registry-server" Nov 27 08:37:48 crc kubenswrapper[4971]: I1127 08:37:48.704683 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c989465-d4ef-4c07-8ba1-848eacc54325" containerName="registry-server" Nov 27 08:37:48 crc kubenswrapper[4971]: I1127 08:37:48.704994 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2b1d6fc-a5dd-4037-b0c1-7a49550fdb00" containerName="registry-server" Nov 27 08:37:48 crc kubenswrapper[4971]: I1127 08:37:48.705036 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c989465-d4ef-4c07-8ba1-848eacc54325" containerName="registry-server" Nov 27 08:37:48 crc kubenswrapper[4971]: I1127 08:37:48.706975 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mj822" Nov 27 08:37:48 crc kubenswrapper[4971]: I1127 08:37:48.719380 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mj822"] Nov 27 08:37:48 crc kubenswrapper[4971]: I1127 08:37:48.835421 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bgvt\" (UniqueName: \"kubernetes.io/projected/c76eab08-5bb0-423c-8d51-14d933544f00-kube-api-access-7bgvt\") pod \"redhat-operators-mj822\" (UID: \"c76eab08-5bb0-423c-8d51-14d933544f00\") " pod="openshift-marketplace/redhat-operators-mj822" Nov 27 08:37:48 crc kubenswrapper[4971]: I1127 08:37:48.835983 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c76eab08-5bb0-423c-8d51-14d933544f00-catalog-content\") pod \"redhat-operators-mj822\" (UID: \"c76eab08-5bb0-423c-8d51-14d933544f00\") " pod="openshift-marketplace/redhat-operators-mj822" Nov 27 08:37:48 crc kubenswrapper[4971]: I1127 08:37:48.836114 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c76eab08-5bb0-423c-8d51-14d933544f00-utilities\") pod \"redhat-operators-mj822\" (UID: \"c76eab08-5bb0-423c-8d51-14d933544f00\") " pod="openshift-marketplace/redhat-operators-mj822" Nov 27 08:37:48 crc kubenswrapper[4971]: I1127 08:37:48.938404 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c76eab08-5bb0-423c-8d51-14d933544f00-catalog-content\") pod \"redhat-operators-mj822\" (UID: \"c76eab08-5bb0-423c-8d51-14d933544f00\") " pod="openshift-marketplace/redhat-operators-mj822" Nov 27 08:37:48 crc kubenswrapper[4971]: I1127 08:37:48.938468 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c76eab08-5bb0-423c-8d51-14d933544f00-utilities\") pod \"redhat-operators-mj822\" (UID: \"c76eab08-5bb0-423c-8d51-14d933544f00\") " pod="openshift-marketplace/redhat-operators-mj822" Nov 27 08:37:48 crc kubenswrapper[4971]: I1127 08:37:48.938571 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bgvt\" (UniqueName: \"kubernetes.io/projected/c76eab08-5bb0-423c-8d51-14d933544f00-kube-api-access-7bgvt\") pod \"redhat-operators-mj822\" (UID: \"c76eab08-5bb0-423c-8d51-14d933544f00\") " pod="openshift-marketplace/redhat-operators-mj822" Nov 27 08:37:48 crc kubenswrapper[4971]: I1127 08:37:48.939169 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c76eab08-5bb0-423c-8d51-14d933544f00-catalog-content\") pod \"redhat-operators-mj822\" (UID: \"c76eab08-5bb0-423c-8d51-14d933544f00\") " pod="openshift-marketplace/redhat-operators-mj822" Nov 27 08:37:48 crc kubenswrapper[4971]: I1127 08:37:48.939226 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c76eab08-5bb0-423c-8d51-14d933544f00-utilities\") pod \"redhat-operators-mj822\" (UID: \"c76eab08-5bb0-423c-8d51-14d933544f00\") " pod="openshift-marketplace/redhat-operators-mj822" Nov 27 08:37:48 crc kubenswrapper[4971]: I1127 08:37:48.961102 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bgvt\" (UniqueName: \"kubernetes.io/projected/c76eab08-5bb0-423c-8d51-14d933544f00-kube-api-access-7bgvt\") pod \"redhat-operators-mj822\" (UID: \"c76eab08-5bb0-423c-8d51-14d933544f00\") " pod="openshift-marketplace/redhat-operators-mj822" Nov 27 08:37:49 crc kubenswrapper[4971]: I1127 08:37:49.034419 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mj822" Nov 27 08:37:49 crc kubenswrapper[4971]: I1127 08:37:49.507154 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mj822"] Nov 27 08:37:49 crc kubenswrapper[4971]: I1127 08:37:49.974827 4971 generic.go:334] "Generic (PLEG): container finished" podID="c76eab08-5bb0-423c-8d51-14d933544f00" containerID="bddbad22ed0f6bc59550a9c7bdf13622d8b76b45f1ee9eb7739d1a6743d0e196" exitCode=0 Nov 27 08:37:49 crc kubenswrapper[4971]: I1127 08:37:49.975167 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mj822" event={"ID":"c76eab08-5bb0-423c-8d51-14d933544f00","Type":"ContainerDied","Data":"bddbad22ed0f6bc59550a9c7bdf13622d8b76b45f1ee9eb7739d1a6743d0e196"} Nov 27 08:37:49 crc kubenswrapper[4971]: I1127 08:37:49.975199 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mj822" event={"ID":"c76eab08-5bb0-423c-8d51-14d933544f00","Type":"ContainerStarted","Data":"48a7a7cf51be3f18a160fba9c16173f8737688dc84fbbf299f240947b73e072e"} Nov 27 08:37:51 crc kubenswrapper[4971]: I1127 08:37:51.996738 4971 generic.go:334] "Generic (PLEG): container finished" podID="c76eab08-5bb0-423c-8d51-14d933544f00" containerID="568e3bfa6e80c70fdefe2aa5640fe01df5f0f9806166639bcb93daf51f1845aa" exitCode=0 Nov 27 08:37:51 crc kubenswrapper[4971]: I1127 08:37:51.996814 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mj822" event={"ID":"c76eab08-5bb0-423c-8d51-14d933544f00","Type":"ContainerDied","Data":"568e3bfa6e80c70fdefe2aa5640fe01df5f0f9806166639bcb93daf51f1845aa"} Nov 27 08:37:52 crc kubenswrapper[4971]: I1127 08:37:52.524971 4971 scope.go:117] "RemoveContainer" containerID="8ac2d014b9a82b9f59b6be52733fb364ce456cf2299c4380fa51a61f23a8b7c2" Nov 27 08:37:53 crc kubenswrapper[4971]: I1127 08:37:53.012196 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mj822" event={"ID":"c76eab08-5bb0-423c-8d51-14d933544f00","Type":"ContainerStarted","Data":"0ec6b7ca2b1c364459f97a389fcdf05d23378928c3933cd116dff931efb95251"} Nov 27 08:37:53 crc kubenswrapper[4971]: I1127 08:37:53.041753 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-mj822" podStartSLOduration=2.5822969799999997 podStartE2EDuration="5.041730065s" podCreationTimestamp="2025-11-27 08:37:48 +0000 UTC" firstStartedPulling="2025-11-27 08:37:49.976757508 +0000 UTC m=+6308.168801426" lastFinishedPulling="2025-11-27 08:37:52.436190563 +0000 UTC m=+6310.628234511" observedRunningTime="2025-11-27 08:37:53.039704058 +0000 UTC m=+6311.231748066" watchObservedRunningTime="2025-11-27 08:37:53.041730065 +0000 UTC m=+6311.233773993" Nov 27 08:37:56 crc kubenswrapper[4971]: I1127 08:37:56.412944 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 08:37:56 crc kubenswrapper[4971]: I1127 08:37:56.413418 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 08:37:59 crc kubenswrapper[4971]: I1127 08:37:59.035407 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-mj822" Nov 27 08:37:59 crc kubenswrapper[4971]: I1127 08:37:59.036072 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-mj822" Nov 27 08:37:59 crc kubenswrapper[4971]: I1127 08:37:59.100246 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-mj822" Nov 27 08:37:59 crc kubenswrapper[4971]: I1127 08:37:59.168528 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-mj822" Nov 27 08:37:59 crc kubenswrapper[4971]: I1127 08:37:59.353406 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mj822"] Nov 27 08:38:01 crc kubenswrapper[4971]: I1127 08:38:01.083866 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-mj822" podUID="c76eab08-5bb0-423c-8d51-14d933544f00" containerName="registry-server" containerID="cri-o://0ec6b7ca2b1c364459f97a389fcdf05d23378928c3933cd116dff931efb95251" gracePeriod=2 Nov 27 08:38:01 crc kubenswrapper[4971]: I1127 08:38:01.559322 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mj822" Nov 27 08:38:01 crc kubenswrapper[4971]: I1127 08:38:01.674424 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c76eab08-5bb0-423c-8d51-14d933544f00-utilities\") pod \"c76eab08-5bb0-423c-8d51-14d933544f00\" (UID: \"c76eab08-5bb0-423c-8d51-14d933544f00\") " Nov 27 08:38:01 crc kubenswrapper[4971]: I1127 08:38:01.674692 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c76eab08-5bb0-423c-8d51-14d933544f00-catalog-content\") pod \"c76eab08-5bb0-423c-8d51-14d933544f00\" (UID: \"c76eab08-5bb0-423c-8d51-14d933544f00\") " Nov 27 08:38:01 crc kubenswrapper[4971]: I1127 08:38:01.674786 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7bgvt\" (UniqueName: \"kubernetes.io/projected/c76eab08-5bb0-423c-8d51-14d933544f00-kube-api-access-7bgvt\") pod \"c76eab08-5bb0-423c-8d51-14d933544f00\" (UID: \"c76eab08-5bb0-423c-8d51-14d933544f00\") " Nov 27 08:38:01 crc kubenswrapper[4971]: I1127 08:38:01.676260 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c76eab08-5bb0-423c-8d51-14d933544f00-utilities" (OuterVolumeSpecName: "utilities") pod "c76eab08-5bb0-423c-8d51-14d933544f00" (UID: "c76eab08-5bb0-423c-8d51-14d933544f00"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:38:01 crc kubenswrapper[4971]: I1127 08:38:01.682283 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c76eab08-5bb0-423c-8d51-14d933544f00-kube-api-access-7bgvt" (OuterVolumeSpecName: "kube-api-access-7bgvt") pod "c76eab08-5bb0-423c-8d51-14d933544f00" (UID: "c76eab08-5bb0-423c-8d51-14d933544f00"). InnerVolumeSpecName "kube-api-access-7bgvt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:38:01 crc kubenswrapper[4971]: I1127 08:38:01.776447 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7bgvt\" (UniqueName: \"kubernetes.io/projected/c76eab08-5bb0-423c-8d51-14d933544f00-kube-api-access-7bgvt\") on node \"crc\" DevicePath \"\"" Nov 27 08:38:01 crc kubenswrapper[4971]: I1127 08:38:01.776497 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c76eab08-5bb0-423c-8d51-14d933544f00-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 08:38:01 crc kubenswrapper[4971]: I1127 08:38:01.803147 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c76eab08-5bb0-423c-8d51-14d933544f00-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c76eab08-5bb0-423c-8d51-14d933544f00" (UID: "c76eab08-5bb0-423c-8d51-14d933544f00"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:38:01 crc kubenswrapper[4971]: I1127 08:38:01.878126 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c76eab08-5bb0-423c-8d51-14d933544f00-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 08:38:02 crc kubenswrapper[4971]: I1127 08:38:02.096496 4971 generic.go:334] "Generic (PLEG): container finished" podID="c76eab08-5bb0-423c-8d51-14d933544f00" containerID="0ec6b7ca2b1c364459f97a389fcdf05d23378928c3933cd116dff931efb95251" exitCode=0 Nov 27 08:38:02 crc kubenswrapper[4971]: I1127 08:38:02.096605 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mj822" event={"ID":"c76eab08-5bb0-423c-8d51-14d933544f00","Type":"ContainerDied","Data":"0ec6b7ca2b1c364459f97a389fcdf05d23378928c3933cd116dff931efb95251"} Nov 27 08:38:02 crc kubenswrapper[4971]: I1127 08:38:02.096648 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mj822" Nov 27 08:38:02 crc kubenswrapper[4971]: I1127 08:38:02.096688 4971 scope.go:117] "RemoveContainer" containerID="0ec6b7ca2b1c364459f97a389fcdf05d23378928c3933cd116dff931efb95251" Nov 27 08:38:02 crc kubenswrapper[4971]: I1127 08:38:02.096668 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mj822" event={"ID":"c76eab08-5bb0-423c-8d51-14d933544f00","Type":"ContainerDied","Data":"48a7a7cf51be3f18a160fba9c16173f8737688dc84fbbf299f240947b73e072e"} Nov 27 08:38:02 crc kubenswrapper[4971]: I1127 08:38:02.117441 4971 scope.go:117] "RemoveContainer" containerID="568e3bfa6e80c70fdefe2aa5640fe01df5f0f9806166639bcb93daf51f1845aa" Nov 27 08:38:02 crc kubenswrapper[4971]: I1127 08:38:02.148706 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mj822"] Nov 27 08:38:02 crc kubenswrapper[4971]: I1127 08:38:02.149274 4971 scope.go:117] "RemoveContainer" containerID="bddbad22ed0f6bc59550a9c7bdf13622d8b76b45f1ee9eb7739d1a6743d0e196" Nov 27 08:38:02 crc kubenswrapper[4971]: I1127 08:38:02.154585 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-mj822"] Nov 27 08:38:02 crc kubenswrapper[4971]: I1127 08:38:02.174891 4971 scope.go:117] "RemoveContainer" containerID="0ec6b7ca2b1c364459f97a389fcdf05d23378928c3933cd116dff931efb95251" Nov 27 08:38:02 crc kubenswrapper[4971]: E1127 08:38:02.175415 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0ec6b7ca2b1c364459f97a389fcdf05d23378928c3933cd116dff931efb95251\": container with ID starting with 0ec6b7ca2b1c364459f97a389fcdf05d23378928c3933cd116dff931efb95251 not found: ID does not exist" containerID="0ec6b7ca2b1c364459f97a389fcdf05d23378928c3933cd116dff931efb95251" Nov 27 08:38:02 crc kubenswrapper[4971]: I1127 08:38:02.175470 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ec6b7ca2b1c364459f97a389fcdf05d23378928c3933cd116dff931efb95251"} err="failed to get container status \"0ec6b7ca2b1c364459f97a389fcdf05d23378928c3933cd116dff931efb95251\": rpc error: code = NotFound desc = could not find container \"0ec6b7ca2b1c364459f97a389fcdf05d23378928c3933cd116dff931efb95251\": container with ID starting with 0ec6b7ca2b1c364459f97a389fcdf05d23378928c3933cd116dff931efb95251 not found: ID does not exist" Nov 27 08:38:02 crc kubenswrapper[4971]: I1127 08:38:02.175503 4971 scope.go:117] "RemoveContainer" containerID="568e3bfa6e80c70fdefe2aa5640fe01df5f0f9806166639bcb93daf51f1845aa" Nov 27 08:38:02 crc kubenswrapper[4971]: E1127 08:38:02.175814 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"568e3bfa6e80c70fdefe2aa5640fe01df5f0f9806166639bcb93daf51f1845aa\": container with ID starting with 568e3bfa6e80c70fdefe2aa5640fe01df5f0f9806166639bcb93daf51f1845aa not found: ID does not exist" containerID="568e3bfa6e80c70fdefe2aa5640fe01df5f0f9806166639bcb93daf51f1845aa" Nov 27 08:38:02 crc kubenswrapper[4971]: I1127 08:38:02.175846 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"568e3bfa6e80c70fdefe2aa5640fe01df5f0f9806166639bcb93daf51f1845aa"} err="failed to get container status \"568e3bfa6e80c70fdefe2aa5640fe01df5f0f9806166639bcb93daf51f1845aa\": rpc error: code = NotFound desc = could not find container \"568e3bfa6e80c70fdefe2aa5640fe01df5f0f9806166639bcb93daf51f1845aa\": container with ID starting with 568e3bfa6e80c70fdefe2aa5640fe01df5f0f9806166639bcb93daf51f1845aa not found: ID does not exist" Nov 27 08:38:02 crc kubenswrapper[4971]: I1127 08:38:02.175863 4971 scope.go:117] "RemoveContainer" containerID="bddbad22ed0f6bc59550a9c7bdf13622d8b76b45f1ee9eb7739d1a6743d0e196" Nov 27 08:38:02 crc kubenswrapper[4971]: E1127 08:38:02.176096 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bddbad22ed0f6bc59550a9c7bdf13622d8b76b45f1ee9eb7739d1a6743d0e196\": container with ID starting with bddbad22ed0f6bc59550a9c7bdf13622d8b76b45f1ee9eb7739d1a6743d0e196 not found: ID does not exist" containerID="bddbad22ed0f6bc59550a9c7bdf13622d8b76b45f1ee9eb7739d1a6743d0e196" Nov 27 08:38:02 crc kubenswrapper[4971]: I1127 08:38:02.176131 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bddbad22ed0f6bc59550a9c7bdf13622d8b76b45f1ee9eb7739d1a6743d0e196"} err="failed to get container status \"bddbad22ed0f6bc59550a9c7bdf13622d8b76b45f1ee9eb7739d1a6743d0e196\": rpc error: code = NotFound desc = could not find container \"bddbad22ed0f6bc59550a9c7bdf13622d8b76b45f1ee9eb7739d1a6743d0e196\": container with ID starting with bddbad22ed0f6bc59550a9c7bdf13622d8b76b45f1ee9eb7739d1a6743d0e196 not found: ID does not exist" Nov 27 08:38:02 crc kubenswrapper[4971]: I1127 08:38:02.561456 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c76eab08-5bb0-423c-8d51-14d933544f00" path="/var/lib/kubelet/pods/c76eab08-5bb0-423c-8d51-14d933544f00/volumes" Nov 27 08:38:26 crc kubenswrapper[4971]: I1127 08:38:26.413092 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 08:38:26 crc kubenswrapper[4971]: I1127 08:38:26.414069 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 08:38:56 crc kubenswrapper[4971]: I1127 08:38:56.413971 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 08:38:56 crc kubenswrapper[4971]: I1127 08:38:56.414969 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 08:38:56 crc kubenswrapper[4971]: I1127 08:38:56.415070 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 08:38:56 crc kubenswrapper[4971]: I1127 08:38:56.416942 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8779d2c056ac81dbc96266ed24a74e46ca57847727d1c7a9ce34603cabd43c6d"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 08:38:56 crc kubenswrapper[4971]: I1127 08:38:56.417059 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://8779d2c056ac81dbc96266ed24a74e46ca57847727d1c7a9ce34603cabd43c6d" gracePeriod=600 Nov 27 08:38:56 crc kubenswrapper[4971]: E1127 08:38:56.543950 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:38:56 crc kubenswrapper[4971]: I1127 08:38:56.661877 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="8779d2c056ac81dbc96266ed24a74e46ca57847727d1c7a9ce34603cabd43c6d" exitCode=0 Nov 27 08:38:56 crc kubenswrapper[4971]: I1127 08:38:56.661949 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"8779d2c056ac81dbc96266ed24a74e46ca57847727d1c7a9ce34603cabd43c6d"} Nov 27 08:38:56 crc kubenswrapper[4971]: I1127 08:38:56.662070 4971 scope.go:117] "RemoveContainer" containerID="c5fdcd7a26114da98d92fa593d9bd5735d2520f363dead48d0a133e41557b50b" Nov 27 08:38:56 crc kubenswrapper[4971]: I1127 08:38:56.664442 4971 scope.go:117] "RemoveContainer" containerID="8779d2c056ac81dbc96266ed24a74e46ca57847727d1c7a9ce34603cabd43c6d" Nov 27 08:38:56 crc kubenswrapper[4971]: E1127 08:38:56.664831 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:39:11 crc kubenswrapper[4971]: I1127 08:39:11.550447 4971 scope.go:117] "RemoveContainer" containerID="8779d2c056ac81dbc96266ed24a74e46ca57847727d1c7a9ce34603cabd43c6d" Nov 27 08:39:11 crc kubenswrapper[4971]: E1127 08:39:11.551422 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:39:22 crc kubenswrapper[4971]: I1127 08:39:22.554456 4971 scope.go:117] "RemoveContainer" containerID="8779d2c056ac81dbc96266ed24a74e46ca57847727d1c7a9ce34603cabd43c6d" Nov 27 08:39:22 crc kubenswrapper[4971]: E1127 08:39:22.555693 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:39:35 crc kubenswrapper[4971]: I1127 08:39:35.551482 4971 scope.go:117] "RemoveContainer" containerID="8779d2c056ac81dbc96266ed24a74e46ca57847727d1c7a9ce34603cabd43c6d" Nov 27 08:39:35 crc kubenswrapper[4971]: E1127 08:39:35.552329 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:39:50 crc kubenswrapper[4971]: I1127 08:39:50.551208 4971 scope.go:117] "RemoveContainer" containerID="8779d2c056ac81dbc96266ed24a74e46ca57847727d1c7a9ce34603cabd43c6d" Nov 27 08:39:50 crc kubenswrapper[4971]: E1127 08:39:50.552600 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:40:03 crc kubenswrapper[4971]: I1127 08:40:03.550913 4971 scope.go:117] "RemoveContainer" containerID="8779d2c056ac81dbc96266ed24a74e46ca57847727d1c7a9ce34603cabd43c6d" Nov 27 08:40:03 crc kubenswrapper[4971]: E1127 08:40:03.552254 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:40:14 crc kubenswrapper[4971]: I1127 08:40:14.551014 4971 scope.go:117] "RemoveContainer" containerID="8779d2c056ac81dbc96266ed24a74e46ca57847727d1c7a9ce34603cabd43c6d" Nov 27 08:40:14 crc kubenswrapper[4971]: E1127 08:40:14.552120 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:40:28 crc kubenswrapper[4971]: I1127 08:40:28.550432 4971 scope.go:117] "RemoveContainer" containerID="8779d2c056ac81dbc96266ed24a74e46ca57847727d1c7a9ce34603cabd43c6d" Nov 27 08:40:28 crc kubenswrapper[4971]: E1127 08:40:28.551736 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:40:39 crc kubenswrapper[4971]: I1127 08:40:39.551172 4971 scope.go:117] "RemoveContainer" containerID="8779d2c056ac81dbc96266ed24a74e46ca57847727d1c7a9ce34603cabd43c6d" Nov 27 08:40:39 crc kubenswrapper[4971]: E1127 08:40:39.552432 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:40:51 crc kubenswrapper[4971]: I1127 08:40:51.550852 4971 scope.go:117] "RemoveContainer" containerID="8779d2c056ac81dbc96266ed24a74e46ca57847727d1c7a9ce34603cabd43c6d" Nov 27 08:40:51 crc kubenswrapper[4971]: E1127 08:40:51.553745 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:41:03 crc kubenswrapper[4971]: I1127 08:41:03.551856 4971 scope.go:117] "RemoveContainer" containerID="8779d2c056ac81dbc96266ed24a74e46ca57847727d1c7a9ce34603cabd43c6d" Nov 27 08:41:03 crc kubenswrapper[4971]: E1127 08:41:03.552799 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:41:18 crc kubenswrapper[4971]: I1127 08:41:18.550711 4971 scope.go:117] "RemoveContainer" containerID="8779d2c056ac81dbc96266ed24a74e46ca57847727d1c7a9ce34603cabd43c6d" Nov 27 08:41:18 crc kubenswrapper[4971]: E1127 08:41:18.551840 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:41:31 crc kubenswrapper[4971]: I1127 08:41:31.551594 4971 scope.go:117] "RemoveContainer" containerID="8779d2c056ac81dbc96266ed24a74e46ca57847727d1c7a9ce34603cabd43c6d" Nov 27 08:41:31 crc kubenswrapper[4971]: E1127 08:41:31.554952 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:41:46 crc kubenswrapper[4971]: I1127 08:41:46.550524 4971 scope.go:117] "RemoveContainer" containerID="8779d2c056ac81dbc96266ed24a74e46ca57847727d1c7a9ce34603cabd43c6d" Nov 27 08:41:46 crc kubenswrapper[4971]: E1127 08:41:46.551588 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:41:57 crc kubenswrapper[4971]: I1127 08:41:57.550778 4971 scope.go:117] "RemoveContainer" containerID="8779d2c056ac81dbc96266ed24a74e46ca57847727d1c7a9ce34603cabd43c6d" Nov 27 08:41:57 crc kubenswrapper[4971]: E1127 08:41:57.551676 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:42:08 crc kubenswrapper[4971]: I1127 08:42:08.551118 4971 scope.go:117] "RemoveContainer" containerID="8779d2c056ac81dbc96266ed24a74e46ca57847727d1c7a9ce34603cabd43c6d" Nov 27 08:42:08 crc kubenswrapper[4971]: E1127 08:42:08.552214 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:42:20 crc kubenswrapper[4971]: I1127 08:42:20.550807 4971 scope.go:117] "RemoveContainer" containerID="8779d2c056ac81dbc96266ed24a74e46ca57847727d1c7a9ce34603cabd43c6d" Nov 27 08:42:20 crc kubenswrapper[4971]: E1127 08:42:20.551589 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:42:33 crc kubenswrapper[4971]: I1127 08:42:33.550178 4971 scope.go:117] "RemoveContainer" containerID="8779d2c056ac81dbc96266ed24a74e46ca57847727d1c7a9ce34603cabd43c6d" Nov 27 08:42:33 crc kubenswrapper[4971]: E1127 08:42:33.551153 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:42:46 crc kubenswrapper[4971]: I1127 08:42:46.551087 4971 scope.go:117] "RemoveContainer" containerID="8779d2c056ac81dbc96266ed24a74e46ca57847727d1c7a9ce34603cabd43c6d" Nov 27 08:42:46 crc kubenswrapper[4971]: E1127 08:42:46.552136 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:42:52 crc kubenswrapper[4971]: I1127 08:42:52.746231 4971 scope.go:117] "RemoveContainer" containerID="02dc887e019a5735b4e12e89e83671965d6c7d82d5dc9f8a1c1f67c0917127da" Nov 27 08:42:52 crc kubenswrapper[4971]: I1127 08:42:52.777729 4971 scope.go:117] "RemoveContainer" containerID="105853af7f95005457dacd3abc12e0196c2dd817c139d620eb809d7fde09edfd" Nov 27 08:42:52 crc kubenswrapper[4971]: I1127 08:42:52.820908 4971 scope.go:117] "RemoveContainer" containerID="3b907a1368309dda8e7331ab54c94892e08c5bcf69b28266b2f72d0fb973f099" Nov 27 08:42:52 crc kubenswrapper[4971]: I1127 08:42:52.856357 4971 scope.go:117] "RemoveContainer" containerID="f5a759743f27049bf9fd9203918bc41182ad147913669b019e24902a82332018" Nov 27 08:42:52 crc kubenswrapper[4971]: I1127 08:42:52.908389 4971 scope.go:117] "RemoveContainer" containerID="6a2a95472c871c99c30f2a6bdcff16430decb8294899f5a45a9abf90fb5e5e1c" Nov 27 08:42:52 crc kubenswrapper[4971]: I1127 08:42:52.929951 4971 scope.go:117] "RemoveContainer" containerID="664a17b3bc26c68302e7f441f65dee5328f8e5fd8bec7b6ec07f8801977387dc" Nov 27 08:42:52 crc kubenswrapper[4971]: I1127 08:42:52.966595 4971 scope.go:117] "RemoveContainer" containerID="f048df5a920636734cbfed5dbb0163e2b83723648fb08cfd7447cb2a5ac45989" Nov 27 08:42:52 crc kubenswrapper[4971]: I1127 08:42:52.986843 4971 scope.go:117] "RemoveContainer" containerID="5ef0927681e9ea7f290248ba6100b7954aca82b758961aad1c3cb961bf30a48c" Nov 27 08:43:01 crc kubenswrapper[4971]: I1127 08:43:01.551603 4971 scope.go:117] "RemoveContainer" containerID="8779d2c056ac81dbc96266ed24a74e46ca57847727d1c7a9ce34603cabd43c6d" Nov 27 08:43:01 crc kubenswrapper[4971]: E1127 08:43:01.553101 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:43:15 crc kubenswrapper[4971]: I1127 08:43:15.551119 4971 scope.go:117] "RemoveContainer" containerID="8779d2c056ac81dbc96266ed24a74e46ca57847727d1c7a9ce34603cabd43c6d" Nov 27 08:43:15 crc kubenswrapper[4971]: E1127 08:43:15.552478 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:43:28 crc kubenswrapper[4971]: I1127 08:43:28.550555 4971 scope.go:117] "RemoveContainer" containerID="8779d2c056ac81dbc96266ed24a74e46ca57847727d1c7a9ce34603cabd43c6d" Nov 27 08:43:28 crc kubenswrapper[4971]: E1127 08:43:28.552632 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:43:41 crc kubenswrapper[4971]: I1127 08:43:41.551106 4971 scope.go:117] "RemoveContainer" containerID="8779d2c056ac81dbc96266ed24a74e46ca57847727d1c7a9ce34603cabd43c6d" Nov 27 08:43:41 crc kubenswrapper[4971]: E1127 08:43:41.552268 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:43:54 crc kubenswrapper[4971]: I1127 08:43:54.550942 4971 scope.go:117] "RemoveContainer" containerID="8779d2c056ac81dbc96266ed24a74e46ca57847727d1c7a9ce34603cabd43c6d" Nov 27 08:43:54 crc kubenswrapper[4971]: E1127 08:43:54.552042 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:44:09 crc kubenswrapper[4971]: I1127 08:44:09.550256 4971 scope.go:117] "RemoveContainer" containerID="8779d2c056ac81dbc96266ed24a74e46ca57847727d1c7a9ce34603cabd43c6d" Nov 27 08:44:10 crc kubenswrapper[4971]: I1127 08:44:10.641136 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"972a96022e7888e797ef56d94e28ce8ef6b71b2858abcc7c40439a752ca23142"} Nov 27 08:45:00 crc kubenswrapper[4971]: I1127 08:45:00.155358 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403885-zdqlh"] Nov 27 08:45:00 crc kubenswrapper[4971]: E1127 08:45:00.156496 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c76eab08-5bb0-423c-8d51-14d933544f00" containerName="registry-server" Nov 27 08:45:00 crc kubenswrapper[4971]: I1127 08:45:00.156516 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="c76eab08-5bb0-423c-8d51-14d933544f00" containerName="registry-server" Nov 27 08:45:00 crc kubenswrapper[4971]: E1127 08:45:00.156569 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c76eab08-5bb0-423c-8d51-14d933544f00" containerName="extract-utilities" Nov 27 08:45:00 crc kubenswrapper[4971]: I1127 08:45:00.156579 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="c76eab08-5bb0-423c-8d51-14d933544f00" containerName="extract-utilities" Nov 27 08:45:00 crc kubenswrapper[4971]: E1127 08:45:00.156593 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c76eab08-5bb0-423c-8d51-14d933544f00" containerName="extract-content" Nov 27 08:45:00 crc kubenswrapper[4971]: I1127 08:45:00.156601 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="c76eab08-5bb0-423c-8d51-14d933544f00" containerName="extract-content" Nov 27 08:45:00 crc kubenswrapper[4971]: I1127 08:45:00.156800 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="c76eab08-5bb0-423c-8d51-14d933544f00" containerName="registry-server" Nov 27 08:45:00 crc kubenswrapper[4971]: I1127 08:45:00.157563 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403885-zdqlh" Nov 27 08:45:00 crc kubenswrapper[4971]: I1127 08:45:00.160285 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 27 08:45:00 crc kubenswrapper[4971]: I1127 08:45:00.160983 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 27 08:45:00 crc kubenswrapper[4971]: I1127 08:45:00.168805 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403885-zdqlh"] Nov 27 08:45:00 crc kubenswrapper[4971]: I1127 08:45:00.270965 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sj2qp\" (UniqueName: \"kubernetes.io/projected/e9f5d1b5-794a-4b95-893a-f7036a07cf5a-kube-api-access-sj2qp\") pod \"collect-profiles-29403885-zdqlh\" (UID: \"e9f5d1b5-794a-4b95-893a-f7036a07cf5a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403885-zdqlh" Nov 27 08:45:00 crc kubenswrapper[4971]: I1127 08:45:00.271041 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e9f5d1b5-794a-4b95-893a-f7036a07cf5a-config-volume\") pod \"collect-profiles-29403885-zdqlh\" (UID: \"e9f5d1b5-794a-4b95-893a-f7036a07cf5a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403885-zdqlh" Nov 27 08:45:00 crc kubenswrapper[4971]: I1127 08:45:00.271072 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e9f5d1b5-794a-4b95-893a-f7036a07cf5a-secret-volume\") pod \"collect-profiles-29403885-zdqlh\" (UID: \"e9f5d1b5-794a-4b95-893a-f7036a07cf5a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403885-zdqlh" Nov 27 08:45:00 crc kubenswrapper[4971]: I1127 08:45:00.373230 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sj2qp\" (UniqueName: \"kubernetes.io/projected/e9f5d1b5-794a-4b95-893a-f7036a07cf5a-kube-api-access-sj2qp\") pod \"collect-profiles-29403885-zdqlh\" (UID: \"e9f5d1b5-794a-4b95-893a-f7036a07cf5a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403885-zdqlh" Nov 27 08:45:00 crc kubenswrapper[4971]: I1127 08:45:00.374456 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e9f5d1b5-794a-4b95-893a-f7036a07cf5a-config-volume\") pod \"collect-profiles-29403885-zdqlh\" (UID: \"e9f5d1b5-794a-4b95-893a-f7036a07cf5a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403885-zdqlh" Nov 27 08:45:00 crc kubenswrapper[4971]: I1127 08:45:00.374705 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e9f5d1b5-794a-4b95-893a-f7036a07cf5a-secret-volume\") pod \"collect-profiles-29403885-zdqlh\" (UID: \"e9f5d1b5-794a-4b95-893a-f7036a07cf5a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403885-zdqlh" Nov 27 08:45:00 crc kubenswrapper[4971]: I1127 08:45:00.375439 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e9f5d1b5-794a-4b95-893a-f7036a07cf5a-config-volume\") pod \"collect-profiles-29403885-zdqlh\" (UID: \"e9f5d1b5-794a-4b95-893a-f7036a07cf5a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403885-zdqlh" Nov 27 08:45:00 crc kubenswrapper[4971]: I1127 08:45:00.382125 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e9f5d1b5-794a-4b95-893a-f7036a07cf5a-secret-volume\") pod \"collect-profiles-29403885-zdqlh\" (UID: \"e9f5d1b5-794a-4b95-893a-f7036a07cf5a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403885-zdqlh" Nov 27 08:45:00 crc kubenswrapper[4971]: I1127 08:45:00.399974 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sj2qp\" (UniqueName: \"kubernetes.io/projected/e9f5d1b5-794a-4b95-893a-f7036a07cf5a-kube-api-access-sj2qp\") pod \"collect-profiles-29403885-zdqlh\" (UID: \"e9f5d1b5-794a-4b95-893a-f7036a07cf5a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403885-zdqlh" Nov 27 08:45:00 crc kubenswrapper[4971]: I1127 08:45:00.487043 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403885-zdqlh" Nov 27 08:45:00 crc kubenswrapper[4971]: I1127 08:45:00.965980 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403885-zdqlh"] Nov 27 08:45:00 crc kubenswrapper[4971]: W1127 08:45:00.977898 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode9f5d1b5_794a_4b95_893a_f7036a07cf5a.slice/crio-406705cbae870986844128b56bc149f6daa3d3f28861761c4f70b09e6953bc93 WatchSource:0}: Error finding container 406705cbae870986844128b56bc149f6daa3d3f28861761c4f70b09e6953bc93: Status 404 returned error can't find the container with id 406705cbae870986844128b56bc149f6daa3d3f28861761c4f70b09e6953bc93 Nov 27 08:45:01 crc kubenswrapper[4971]: I1127 08:45:01.084297 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403885-zdqlh" event={"ID":"e9f5d1b5-794a-4b95-893a-f7036a07cf5a","Type":"ContainerStarted","Data":"406705cbae870986844128b56bc149f6daa3d3f28861761c4f70b09e6953bc93"} Nov 27 08:45:02 crc kubenswrapper[4971]: I1127 08:45:02.094349 4971 generic.go:334] "Generic (PLEG): container finished" podID="e9f5d1b5-794a-4b95-893a-f7036a07cf5a" containerID="3668981b28bec3efabb0d7b967d4cdfb5e13ee8e721c4248037eb7cbffa352d0" exitCode=0 Nov 27 08:45:02 crc kubenswrapper[4971]: I1127 08:45:02.094426 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403885-zdqlh" event={"ID":"e9f5d1b5-794a-4b95-893a-f7036a07cf5a","Type":"ContainerDied","Data":"3668981b28bec3efabb0d7b967d4cdfb5e13ee8e721c4248037eb7cbffa352d0"} Nov 27 08:45:03 crc kubenswrapper[4971]: I1127 08:45:03.429848 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403885-zdqlh" Nov 27 08:45:03 crc kubenswrapper[4971]: I1127 08:45:03.529118 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e9f5d1b5-794a-4b95-893a-f7036a07cf5a-secret-volume\") pod \"e9f5d1b5-794a-4b95-893a-f7036a07cf5a\" (UID: \"e9f5d1b5-794a-4b95-893a-f7036a07cf5a\") " Nov 27 08:45:03 crc kubenswrapper[4971]: I1127 08:45:03.529195 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e9f5d1b5-794a-4b95-893a-f7036a07cf5a-config-volume\") pod \"e9f5d1b5-794a-4b95-893a-f7036a07cf5a\" (UID: \"e9f5d1b5-794a-4b95-893a-f7036a07cf5a\") " Nov 27 08:45:03 crc kubenswrapper[4971]: I1127 08:45:03.529300 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sj2qp\" (UniqueName: \"kubernetes.io/projected/e9f5d1b5-794a-4b95-893a-f7036a07cf5a-kube-api-access-sj2qp\") pod \"e9f5d1b5-794a-4b95-893a-f7036a07cf5a\" (UID: \"e9f5d1b5-794a-4b95-893a-f7036a07cf5a\") " Nov 27 08:45:03 crc kubenswrapper[4971]: I1127 08:45:03.530196 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9f5d1b5-794a-4b95-893a-f7036a07cf5a-config-volume" (OuterVolumeSpecName: "config-volume") pod "e9f5d1b5-794a-4b95-893a-f7036a07cf5a" (UID: "e9f5d1b5-794a-4b95-893a-f7036a07cf5a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:45:03 crc kubenswrapper[4971]: I1127 08:45:03.534776 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9f5d1b5-794a-4b95-893a-f7036a07cf5a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "e9f5d1b5-794a-4b95-893a-f7036a07cf5a" (UID: "e9f5d1b5-794a-4b95-893a-f7036a07cf5a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:45:03 crc kubenswrapper[4971]: I1127 08:45:03.535036 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9f5d1b5-794a-4b95-893a-f7036a07cf5a-kube-api-access-sj2qp" (OuterVolumeSpecName: "kube-api-access-sj2qp") pod "e9f5d1b5-794a-4b95-893a-f7036a07cf5a" (UID: "e9f5d1b5-794a-4b95-893a-f7036a07cf5a"). InnerVolumeSpecName "kube-api-access-sj2qp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:45:03 crc kubenswrapper[4971]: I1127 08:45:03.630752 4971 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e9f5d1b5-794a-4b95-893a-f7036a07cf5a-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 27 08:45:03 crc kubenswrapper[4971]: I1127 08:45:03.630796 4971 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e9f5d1b5-794a-4b95-893a-f7036a07cf5a-config-volume\") on node \"crc\" DevicePath \"\"" Nov 27 08:45:03 crc kubenswrapper[4971]: I1127 08:45:03.630807 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sj2qp\" (UniqueName: \"kubernetes.io/projected/e9f5d1b5-794a-4b95-893a-f7036a07cf5a-kube-api-access-sj2qp\") on node \"crc\" DevicePath \"\"" Nov 27 08:45:04 crc kubenswrapper[4971]: I1127 08:45:04.109391 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403885-zdqlh" event={"ID":"e9f5d1b5-794a-4b95-893a-f7036a07cf5a","Type":"ContainerDied","Data":"406705cbae870986844128b56bc149f6daa3d3f28861761c4f70b09e6953bc93"} Nov 27 08:45:04 crc kubenswrapper[4971]: I1127 08:45:04.109466 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="406705cbae870986844128b56bc149f6daa3d3f28861761c4f70b09e6953bc93" Nov 27 08:45:04 crc kubenswrapper[4971]: I1127 08:45:04.109603 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403885-zdqlh" Nov 27 08:45:04 crc kubenswrapper[4971]: I1127 08:45:04.513670 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403840-rbjnc"] Nov 27 08:45:04 crc kubenswrapper[4971]: I1127 08:45:04.522872 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403840-rbjnc"] Nov 27 08:45:04 crc kubenswrapper[4971]: I1127 08:45:04.561471 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19e3b6ec-43b6-4fa4-81f6-51c5385ae246" path="/var/lib/kubelet/pods/19e3b6ec-43b6-4fa4-81f6-51c5385ae246/volumes" Nov 27 08:45:53 crc kubenswrapper[4971]: I1127 08:45:53.166923 4971 scope.go:117] "RemoveContainer" containerID="2b53877f309230a7e50155c125aa08b61674704902686ec28b0f41db04dbc387" Nov 27 08:46:26 crc kubenswrapper[4971]: I1127 08:46:26.413310 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 08:46:26 crc kubenswrapper[4971]: I1127 08:46:26.414313 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 08:46:38 crc kubenswrapper[4971]: I1127 08:46:38.386679 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-hztzn"] Nov 27 08:46:38 crc kubenswrapper[4971]: E1127 08:46:38.387943 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9f5d1b5-794a-4b95-893a-f7036a07cf5a" containerName="collect-profiles" Nov 27 08:46:38 crc kubenswrapper[4971]: I1127 08:46:38.387963 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9f5d1b5-794a-4b95-893a-f7036a07cf5a" containerName="collect-profiles" Nov 27 08:46:38 crc kubenswrapper[4971]: I1127 08:46:38.388210 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9f5d1b5-794a-4b95-893a-f7036a07cf5a" containerName="collect-profiles" Nov 27 08:46:38 crc kubenswrapper[4971]: I1127 08:46:38.389500 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hztzn" Nov 27 08:46:38 crc kubenswrapper[4971]: I1127 08:46:38.393508 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hztzn"] Nov 27 08:46:38 crc kubenswrapper[4971]: I1127 08:46:38.534523 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecdd35e6-2962-453a-8b8d-9d29012d518d-utilities\") pod \"redhat-marketplace-hztzn\" (UID: \"ecdd35e6-2962-453a-8b8d-9d29012d518d\") " pod="openshift-marketplace/redhat-marketplace-hztzn" Nov 27 08:46:38 crc kubenswrapper[4971]: I1127 08:46:38.535085 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecdd35e6-2962-453a-8b8d-9d29012d518d-catalog-content\") pod \"redhat-marketplace-hztzn\" (UID: \"ecdd35e6-2962-453a-8b8d-9d29012d518d\") " pod="openshift-marketplace/redhat-marketplace-hztzn" Nov 27 08:46:38 crc kubenswrapper[4971]: I1127 08:46:38.535134 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-264qb\" (UniqueName: \"kubernetes.io/projected/ecdd35e6-2962-453a-8b8d-9d29012d518d-kube-api-access-264qb\") pod \"redhat-marketplace-hztzn\" (UID: \"ecdd35e6-2962-453a-8b8d-9d29012d518d\") " pod="openshift-marketplace/redhat-marketplace-hztzn" Nov 27 08:46:38 crc kubenswrapper[4971]: I1127 08:46:38.637352 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecdd35e6-2962-453a-8b8d-9d29012d518d-utilities\") pod \"redhat-marketplace-hztzn\" (UID: \"ecdd35e6-2962-453a-8b8d-9d29012d518d\") " pod="openshift-marketplace/redhat-marketplace-hztzn" Nov 27 08:46:38 crc kubenswrapper[4971]: I1127 08:46:38.637433 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecdd35e6-2962-453a-8b8d-9d29012d518d-catalog-content\") pod \"redhat-marketplace-hztzn\" (UID: \"ecdd35e6-2962-453a-8b8d-9d29012d518d\") " pod="openshift-marketplace/redhat-marketplace-hztzn" Nov 27 08:46:38 crc kubenswrapper[4971]: I1127 08:46:38.637480 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-264qb\" (UniqueName: \"kubernetes.io/projected/ecdd35e6-2962-453a-8b8d-9d29012d518d-kube-api-access-264qb\") pod \"redhat-marketplace-hztzn\" (UID: \"ecdd35e6-2962-453a-8b8d-9d29012d518d\") " pod="openshift-marketplace/redhat-marketplace-hztzn" Nov 27 08:46:38 crc kubenswrapper[4971]: I1127 08:46:38.638201 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecdd35e6-2962-453a-8b8d-9d29012d518d-utilities\") pod \"redhat-marketplace-hztzn\" (UID: \"ecdd35e6-2962-453a-8b8d-9d29012d518d\") " pod="openshift-marketplace/redhat-marketplace-hztzn" Nov 27 08:46:38 crc kubenswrapper[4971]: I1127 08:46:38.638322 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecdd35e6-2962-453a-8b8d-9d29012d518d-catalog-content\") pod \"redhat-marketplace-hztzn\" (UID: \"ecdd35e6-2962-453a-8b8d-9d29012d518d\") " pod="openshift-marketplace/redhat-marketplace-hztzn" Nov 27 08:46:38 crc kubenswrapper[4971]: I1127 08:46:38.665382 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-264qb\" (UniqueName: \"kubernetes.io/projected/ecdd35e6-2962-453a-8b8d-9d29012d518d-kube-api-access-264qb\") pod \"redhat-marketplace-hztzn\" (UID: \"ecdd35e6-2962-453a-8b8d-9d29012d518d\") " pod="openshift-marketplace/redhat-marketplace-hztzn" Nov 27 08:46:38 crc kubenswrapper[4971]: I1127 08:46:38.715592 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hztzn" Nov 27 08:46:39 crc kubenswrapper[4971]: I1127 08:46:39.229265 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hztzn"] Nov 27 08:46:39 crc kubenswrapper[4971]: I1127 08:46:39.903960 4971 generic.go:334] "Generic (PLEG): container finished" podID="ecdd35e6-2962-453a-8b8d-9d29012d518d" containerID="4a5e3f565699f1745dd209b76fba93491299766ddc39806d96dc3e7889abcfa4" exitCode=0 Nov 27 08:46:39 crc kubenswrapper[4971]: I1127 08:46:39.904011 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hztzn" event={"ID":"ecdd35e6-2962-453a-8b8d-9d29012d518d","Type":"ContainerDied","Data":"4a5e3f565699f1745dd209b76fba93491299766ddc39806d96dc3e7889abcfa4"} Nov 27 08:46:39 crc kubenswrapper[4971]: I1127 08:46:39.904309 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hztzn" event={"ID":"ecdd35e6-2962-453a-8b8d-9d29012d518d","Type":"ContainerStarted","Data":"dfca98842a621059ba9feec71c0dd942a53965578850dc198fdaccc57e69f87a"} Nov 27 08:46:39 crc kubenswrapper[4971]: I1127 08:46:39.907253 4971 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 27 08:46:41 crc kubenswrapper[4971]: I1127 08:46:41.011014 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-copy-data"] Nov 27 08:46:41 crc kubenswrapper[4971]: I1127 08:46:41.015126 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Nov 27 08:46:41 crc kubenswrapper[4971]: I1127 08:46:41.018203 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-8nz9c" Nov 27 08:46:41 crc kubenswrapper[4971]: I1127 08:46:41.022734 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Nov 27 08:46:41 crc kubenswrapper[4971]: I1127 08:46:41.078067 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-6afd7684-5df4-45a0-931c-e3913e0a3c7b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6afd7684-5df4-45a0-931c-e3913e0a3c7b\") pod \"mariadb-copy-data\" (UID: \"6a46d8be-7005-4ab5-8a1f-0b158e60d38b\") " pod="openstack/mariadb-copy-data" Nov 27 08:46:41 crc kubenswrapper[4971]: I1127 08:46:41.078524 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfsdr\" (UniqueName: \"kubernetes.io/projected/6a46d8be-7005-4ab5-8a1f-0b158e60d38b-kube-api-access-qfsdr\") pod \"mariadb-copy-data\" (UID: \"6a46d8be-7005-4ab5-8a1f-0b158e60d38b\") " pod="openstack/mariadb-copy-data" Nov 27 08:46:41 crc kubenswrapper[4971]: I1127 08:46:41.179510 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfsdr\" (UniqueName: \"kubernetes.io/projected/6a46d8be-7005-4ab5-8a1f-0b158e60d38b-kube-api-access-qfsdr\") pod \"mariadb-copy-data\" (UID: \"6a46d8be-7005-4ab5-8a1f-0b158e60d38b\") " pod="openstack/mariadb-copy-data" Nov 27 08:46:41 crc kubenswrapper[4971]: I1127 08:46:41.179610 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-6afd7684-5df4-45a0-931c-e3913e0a3c7b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6afd7684-5df4-45a0-931c-e3913e0a3c7b\") pod \"mariadb-copy-data\" (UID: \"6a46d8be-7005-4ab5-8a1f-0b158e60d38b\") " pod="openstack/mariadb-copy-data" Nov 27 08:46:41 crc kubenswrapper[4971]: I1127 08:46:41.184048 4971 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 27 08:46:41 crc kubenswrapper[4971]: I1127 08:46:41.184119 4971 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-6afd7684-5df4-45a0-931c-e3913e0a3c7b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6afd7684-5df4-45a0-931c-e3913e0a3c7b\") pod \"mariadb-copy-data\" (UID: \"6a46d8be-7005-4ab5-8a1f-0b158e60d38b\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/d10839a97571ed43eab8f20114f8627668d46161850b24cefce0f485290adffb/globalmount\"" pod="openstack/mariadb-copy-data" Nov 27 08:46:41 crc kubenswrapper[4971]: I1127 08:46:41.205398 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfsdr\" (UniqueName: \"kubernetes.io/projected/6a46d8be-7005-4ab5-8a1f-0b158e60d38b-kube-api-access-qfsdr\") pod \"mariadb-copy-data\" (UID: \"6a46d8be-7005-4ab5-8a1f-0b158e60d38b\") " pod="openstack/mariadb-copy-data" Nov 27 08:46:41 crc kubenswrapper[4971]: I1127 08:46:41.219791 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-6afd7684-5df4-45a0-931c-e3913e0a3c7b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6afd7684-5df4-45a0-931c-e3913e0a3c7b\") pod \"mariadb-copy-data\" (UID: \"6a46d8be-7005-4ab5-8a1f-0b158e60d38b\") " pod="openstack/mariadb-copy-data" Nov 27 08:46:41 crc kubenswrapper[4971]: I1127 08:46:41.338016 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Nov 27 08:46:41 crc kubenswrapper[4971]: I1127 08:46:41.889758 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Nov 27 08:46:41 crc kubenswrapper[4971]: W1127 08:46:41.891925 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6a46d8be_7005_4ab5_8a1f_0b158e60d38b.slice/crio-582a33e11053042cfd0f9e670d5b39aa775831c98df28cdcabd1362996f84306 WatchSource:0}: Error finding container 582a33e11053042cfd0f9e670d5b39aa775831c98df28cdcabd1362996f84306: Status 404 returned error can't find the container with id 582a33e11053042cfd0f9e670d5b39aa775831c98df28cdcabd1362996f84306 Nov 27 08:46:41 crc kubenswrapper[4971]: I1127 08:46:41.930389 4971 generic.go:334] "Generic (PLEG): container finished" podID="ecdd35e6-2962-453a-8b8d-9d29012d518d" containerID="441e51613be92e1048a04752cd456ee8d1ca791de3141ec36ee5ec15209b8660" exitCode=0 Nov 27 08:46:41 crc kubenswrapper[4971]: I1127 08:46:41.930487 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hztzn" event={"ID":"ecdd35e6-2962-453a-8b8d-9d29012d518d","Type":"ContainerDied","Data":"441e51613be92e1048a04752cd456ee8d1ca791de3141ec36ee5ec15209b8660"} Nov 27 08:46:41 crc kubenswrapper[4971]: I1127 08:46:41.931911 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"6a46d8be-7005-4ab5-8a1f-0b158e60d38b","Type":"ContainerStarted","Data":"582a33e11053042cfd0f9e670d5b39aa775831c98df28cdcabd1362996f84306"} Nov 27 08:46:42 crc kubenswrapper[4971]: I1127 08:46:42.941123 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hztzn" event={"ID":"ecdd35e6-2962-453a-8b8d-9d29012d518d","Type":"ContainerStarted","Data":"95a429082be33cfe7febb163c605be47cb7adba1c8db6ce29cb500a20b9e3af5"} Nov 27 08:46:42 crc kubenswrapper[4971]: I1127 08:46:42.942870 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"6a46d8be-7005-4ab5-8a1f-0b158e60d38b","Type":"ContainerStarted","Data":"cd9e88392578f007fa3f57e174e953c084f6e5cb42af83d8f3ec1fd23f1ff888"} Nov 27 08:46:42 crc kubenswrapper[4971]: I1127 08:46:42.965139 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-hztzn" podStartSLOduration=2.524749546 podStartE2EDuration="4.965119154s" podCreationTimestamp="2025-11-27 08:46:38 +0000 UTC" firstStartedPulling="2025-11-27 08:46:39.907041268 +0000 UTC m=+6838.099085186" lastFinishedPulling="2025-11-27 08:46:42.347410876 +0000 UTC m=+6840.539454794" observedRunningTime="2025-11-27 08:46:42.958810904 +0000 UTC m=+6841.150854852" watchObservedRunningTime="2025-11-27 08:46:42.965119154 +0000 UTC m=+6841.157163072" Nov 27 08:46:42 crc kubenswrapper[4971]: I1127 08:46:42.973978 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-copy-data" podStartSLOduration=3.973958317 podStartE2EDuration="3.973958317s" podCreationTimestamp="2025-11-27 08:46:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:46:42.971710093 +0000 UTC m=+6841.163754001" watchObservedRunningTime="2025-11-27 08:46:42.973958317 +0000 UTC m=+6841.166002245" Nov 27 08:46:46 crc kubenswrapper[4971]: I1127 08:46:46.233207 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Nov 27 08:46:46 crc kubenswrapper[4971]: I1127 08:46:46.235344 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Nov 27 08:46:46 crc kubenswrapper[4971]: I1127 08:46:46.249048 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Nov 27 08:46:46 crc kubenswrapper[4971]: I1127 08:46:46.369903 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n6xwr\" (UniqueName: \"kubernetes.io/projected/62e5060c-e43e-477d-8e1f-5dfc40dd4393-kube-api-access-n6xwr\") pod \"mariadb-client\" (UID: \"62e5060c-e43e-477d-8e1f-5dfc40dd4393\") " pod="openstack/mariadb-client" Nov 27 08:46:46 crc kubenswrapper[4971]: I1127 08:46:46.471953 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n6xwr\" (UniqueName: \"kubernetes.io/projected/62e5060c-e43e-477d-8e1f-5dfc40dd4393-kube-api-access-n6xwr\") pod \"mariadb-client\" (UID: \"62e5060c-e43e-477d-8e1f-5dfc40dd4393\") " pod="openstack/mariadb-client" Nov 27 08:46:46 crc kubenswrapper[4971]: I1127 08:46:46.507322 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n6xwr\" (UniqueName: \"kubernetes.io/projected/62e5060c-e43e-477d-8e1f-5dfc40dd4393-kube-api-access-n6xwr\") pod \"mariadb-client\" (UID: \"62e5060c-e43e-477d-8e1f-5dfc40dd4393\") " pod="openstack/mariadb-client" Nov 27 08:46:46 crc kubenswrapper[4971]: I1127 08:46:46.594178 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Nov 27 08:46:47 crc kubenswrapper[4971]: W1127 08:46:47.065570 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod62e5060c_e43e_477d_8e1f_5dfc40dd4393.slice/crio-8ea8627b10f57b90914f4ce7a3180cb7ef238bc6f6e77ab2a6ac86abe19d92d8 WatchSource:0}: Error finding container 8ea8627b10f57b90914f4ce7a3180cb7ef238bc6f6e77ab2a6ac86abe19d92d8: Status 404 returned error can't find the container with id 8ea8627b10f57b90914f4ce7a3180cb7ef238bc6f6e77ab2a6ac86abe19d92d8 Nov 27 08:46:47 crc kubenswrapper[4971]: I1127 08:46:47.065730 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Nov 27 08:46:47 crc kubenswrapper[4971]: I1127 08:46:47.990649 4971 generic.go:334] "Generic (PLEG): container finished" podID="62e5060c-e43e-477d-8e1f-5dfc40dd4393" containerID="13a6d74dc8bdec9813e38ff6986663b014177a32bd3209549f994c2bea534c9d" exitCode=0 Nov 27 08:46:47 crc kubenswrapper[4971]: I1127 08:46:47.990770 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"62e5060c-e43e-477d-8e1f-5dfc40dd4393","Type":"ContainerDied","Data":"13a6d74dc8bdec9813e38ff6986663b014177a32bd3209549f994c2bea534c9d"} Nov 27 08:46:47 crc kubenswrapper[4971]: I1127 08:46:47.991102 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"62e5060c-e43e-477d-8e1f-5dfc40dd4393","Type":"ContainerStarted","Data":"8ea8627b10f57b90914f4ce7a3180cb7ef238bc6f6e77ab2a6ac86abe19d92d8"} Nov 27 08:46:48 crc kubenswrapper[4971]: I1127 08:46:48.230047 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9jwb9"] Nov 27 08:46:48 crc kubenswrapper[4971]: I1127 08:46:48.232734 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9jwb9" Nov 27 08:46:48 crc kubenswrapper[4971]: I1127 08:46:48.240665 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9jwb9"] Nov 27 08:46:48 crc kubenswrapper[4971]: I1127 08:46:48.306901 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78ac5b1e-b8d1-45ac-aec7-1253421d3c45-catalog-content\") pod \"community-operators-9jwb9\" (UID: \"78ac5b1e-b8d1-45ac-aec7-1253421d3c45\") " pod="openshift-marketplace/community-operators-9jwb9" Nov 27 08:46:48 crc kubenswrapper[4971]: I1127 08:46:48.306978 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78ac5b1e-b8d1-45ac-aec7-1253421d3c45-utilities\") pod \"community-operators-9jwb9\" (UID: \"78ac5b1e-b8d1-45ac-aec7-1253421d3c45\") " pod="openshift-marketplace/community-operators-9jwb9" Nov 27 08:46:48 crc kubenswrapper[4971]: I1127 08:46:48.307199 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hv5nl\" (UniqueName: \"kubernetes.io/projected/78ac5b1e-b8d1-45ac-aec7-1253421d3c45-kube-api-access-hv5nl\") pod \"community-operators-9jwb9\" (UID: \"78ac5b1e-b8d1-45ac-aec7-1253421d3c45\") " pod="openshift-marketplace/community-operators-9jwb9" Nov 27 08:46:48 crc kubenswrapper[4971]: I1127 08:46:48.408763 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hv5nl\" (UniqueName: \"kubernetes.io/projected/78ac5b1e-b8d1-45ac-aec7-1253421d3c45-kube-api-access-hv5nl\") pod \"community-operators-9jwb9\" (UID: \"78ac5b1e-b8d1-45ac-aec7-1253421d3c45\") " pod="openshift-marketplace/community-operators-9jwb9" Nov 27 08:46:48 crc kubenswrapper[4971]: I1127 08:46:48.408834 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78ac5b1e-b8d1-45ac-aec7-1253421d3c45-catalog-content\") pod \"community-operators-9jwb9\" (UID: \"78ac5b1e-b8d1-45ac-aec7-1253421d3c45\") " pod="openshift-marketplace/community-operators-9jwb9" Nov 27 08:46:48 crc kubenswrapper[4971]: I1127 08:46:48.408872 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78ac5b1e-b8d1-45ac-aec7-1253421d3c45-utilities\") pod \"community-operators-9jwb9\" (UID: \"78ac5b1e-b8d1-45ac-aec7-1253421d3c45\") " pod="openshift-marketplace/community-operators-9jwb9" Nov 27 08:46:48 crc kubenswrapper[4971]: I1127 08:46:48.409407 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78ac5b1e-b8d1-45ac-aec7-1253421d3c45-utilities\") pod \"community-operators-9jwb9\" (UID: \"78ac5b1e-b8d1-45ac-aec7-1253421d3c45\") " pod="openshift-marketplace/community-operators-9jwb9" Nov 27 08:46:48 crc kubenswrapper[4971]: I1127 08:46:48.409625 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78ac5b1e-b8d1-45ac-aec7-1253421d3c45-catalog-content\") pod \"community-operators-9jwb9\" (UID: \"78ac5b1e-b8d1-45ac-aec7-1253421d3c45\") " pod="openshift-marketplace/community-operators-9jwb9" Nov 27 08:46:48 crc kubenswrapper[4971]: I1127 08:46:48.437649 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hv5nl\" (UniqueName: \"kubernetes.io/projected/78ac5b1e-b8d1-45ac-aec7-1253421d3c45-kube-api-access-hv5nl\") pod \"community-operators-9jwb9\" (UID: \"78ac5b1e-b8d1-45ac-aec7-1253421d3c45\") " pod="openshift-marketplace/community-operators-9jwb9" Nov 27 08:46:48 crc kubenswrapper[4971]: I1127 08:46:48.565639 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9jwb9" Nov 27 08:46:48 crc kubenswrapper[4971]: I1127 08:46:48.716926 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-hztzn" Nov 27 08:46:48 crc kubenswrapper[4971]: I1127 08:46:48.717820 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-hztzn" Nov 27 08:46:48 crc kubenswrapper[4971]: I1127 08:46:48.837926 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-hztzn" Nov 27 08:46:49 crc kubenswrapper[4971]: I1127 08:46:49.056341 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-hztzn" Nov 27 08:46:49 crc kubenswrapper[4971]: I1127 08:46:49.155985 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9jwb9"] Nov 27 08:46:49 crc kubenswrapper[4971]: I1127 08:46:49.309576 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Nov 27 08:46:49 crc kubenswrapper[4971]: I1127 08:46:49.334117 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_62e5060c-e43e-477d-8e1f-5dfc40dd4393/mariadb-client/0.log" Nov 27 08:46:49 crc kubenswrapper[4971]: I1127 08:46:49.365975 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Nov 27 08:46:49 crc kubenswrapper[4971]: I1127 08:46:49.374428 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Nov 27 08:46:49 crc kubenswrapper[4971]: I1127 08:46:49.432451 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n6xwr\" (UniqueName: \"kubernetes.io/projected/62e5060c-e43e-477d-8e1f-5dfc40dd4393-kube-api-access-n6xwr\") pod \"62e5060c-e43e-477d-8e1f-5dfc40dd4393\" (UID: \"62e5060c-e43e-477d-8e1f-5dfc40dd4393\") " Nov 27 08:46:49 crc kubenswrapper[4971]: I1127 08:46:49.438102 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62e5060c-e43e-477d-8e1f-5dfc40dd4393-kube-api-access-n6xwr" (OuterVolumeSpecName: "kube-api-access-n6xwr") pod "62e5060c-e43e-477d-8e1f-5dfc40dd4393" (UID: "62e5060c-e43e-477d-8e1f-5dfc40dd4393"). InnerVolumeSpecName "kube-api-access-n6xwr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:46:49 crc kubenswrapper[4971]: I1127 08:46:49.503659 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Nov 27 08:46:49 crc kubenswrapper[4971]: E1127 08:46:49.504413 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62e5060c-e43e-477d-8e1f-5dfc40dd4393" containerName="mariadb-client" Nov 27 08:46:49 crc kubenswrapper[4971]: I1127 08:46:49.504432 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="62e5060c-e43e-477d-8e1f-5dfc40dd4393" containerName="mariadb-client" Nov 27 08:46:49 crc kubenswrapper[4971]: I1127 08:46:49.511148 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="62e5060c-e43e-477d-8e1f-5dfc40dd4393" containerName="mariadb-client" Nov 27 08:46:49 crc kubenswrapper[4971]: I1127 08:46:49.512154 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Nov 27 08:46:49 crc kubenswrapper[4971]: I1127 08:46:49.515775 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Nov 27 08:46:49 crc kubenswrapper[4971]: I1127 08:46:49.538555 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n6xwr\" (UniqueName: \"kubernetes.io/projected/62e5060c-e43e-477d-8e1f-5dfc40dd4393-kube-api-access-n6xwr\") on node \"crc\" DevicePath \"\"" Nov 27 08:46:49 crc kubenswrapper[4971]: I1127 08:46:49.640742 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mdgsq\" (UniqueName: \"kubernetes.io/projected/88e952b7-e009-4c7c-b65a-116a3840b1f4-kube-api-access-mdgsq\") pod \"mariadb-client\" (UID: \"88e952b7-e009-4c7c-b65a-116a3840b1f4\") " pod="openstack/mariadb-client" Nov 27 08:46:49 crc kubenswrapper[4971]: I1127 08:46:49.742122 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mdgsq\" (UniqueName: \"kubernetes.io/projected/88e952b7-e009-4c7c-b65a-116a3840b1f4-kube-api-access-mdgsq\") pod \"mariadb-client\" (UID: \"88e952b7-e009-4c7c-b65a-116a3840b1f4\") " pod="openstack/mariadb-client" Nov 27 08:46:49 crc kubenswrapper[4971]: I1127 08:46:49.770626 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mdgsq\" (UniqueName: \"kubernetes.io/projected/88e952b7-e009-4c7c-b65a-116a3840b1f4-kube-api-access-mdgsq\") pod \"mariadb-client\" (UID: \"88e952b7-e009-4c7c-b65a-116a3840b1f4\") " pod="openstack/mariadb-client" Nov 27 08:46:49 crc kubenswrapper[4971]: I1127 08:46:49.841947 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Nov 27 08:46:50 crc kubenswrapper[4971]: I1127 08:46:50.041289 4971 generic.go:334] "Generic (PLEG): container finished" podID="78ac5b1e-b8d1-45ac-aec7-1253421d3c45" containerID="15f9cbba6e5d04ce17cdee6ec85b5304131868a0d2d56f773f0de1c81a970fa3" exitCode=0 Nov 27 08:46:50 crc kubenswrapper[4971]: I1127 08:46:50.041377 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9jwb9" event={"ID":"78ac5b1e-b8d1-45ac-aec7-1253421d3c45","Type":"ContainerDied","Data":"15f9cbba6e5d04ce17cdee6ec85b5304131868a0d2d56f773f0de1c81a970fa3"} Nov 27 08:46:50 crc kubenswrapper[4971]: I1127 08:46:50.041460 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9jwb9" event={"ID":"78ac5b1e-b8d1-45ac-aec7-1253421d3c45","Type":"ContainerStarted","Data":"9510eaa958662b8a1b5faf824f303201a830d3d6e32a01bc0df138dfa34fca7e"} Nov 27 08:46:50 crc kubenswrapper[4971]: I1127 08:46:50.046012 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Nov 27 08:46:50 crc kubenswrapper[4971]: I1127 08:46:50.046684 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8ea8627b10f57b90914f4ce7a3180cb7ef238bc6f6e77ab2a6ac86abe19d92d8" Nov 27 08:46:50 crc kubenswrapper[4971]: I1127 08:46:50.073512 4971 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/mariadb-client" oldPodUID="62e5060c-e43e-477d-8e1f-5dfc40dd4393" podUID="88e952b7-e009-4c7c-b65a-116a3840b1f4" Nov 27 08:46:50 crc kubenswrapper[4971]: I1127 08:46:50.377567 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Nov 27 08:46:50 crc kubenswrapper[4971]: I1127 08:46:50.564882 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62e5060c-e43e-477d-8e1f-5dfc40dd4393" path="/var/lib/kubelet/pods/62e5060c-e43e-477d-8e1f-5dfc40dd4393/volumes" Nov 27 08:46:51 crc kubenswrapper[4971]: I1127 08:46:51.060669 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9jwb9" event={"ID":"78ac5b1e-b8d1-45ac-aec7-1253421d3c45","Type":"ContainerStarted","Data":"d7ac8857700227a87600d0d345348c61929447cf1feed0bd894ea51baf618e18"} Nov 27 08:46:51 crc kubenswrapper[4971]: I1127 08:46:51.062733 4971 generic.go:334] "Generic (PLEG): container finished" podID="88e952b7-e009-4c7c-b65a-116a3840b1f4" containerID="776f786014c1b77faefbac7e56e1c4815358958425520d9958305759870b48b0" exitCode=0 Nov 27 08:46:51 crc kubenswrapper[4971]: I1127 08:46:51.063289 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"88e952b7-e009-4c7c-b65a-116a3840b1f4","Type":"ContainerDied","Data":"776f786014c1b77faefbac7e56e1c4815358958425520d9958305759870b48b0"} Nov 27 08:46:51 crc kubenswrapper[4971]: I1127 08:46:51.063315 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"88e952b7-e009-4c7c-b65a-116a3840b1f4","Type":"ContainerStarted","Data":"eae14fc227d22c43ccdd47989e9a288ab391976905b6e990d156fcd3217a6d73"} Nov 27 08:46:51 crc kubenswrapper[4971]: I1127 08:46:51.196693 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hztzn"] Nov 27 08:46:52 crc kubenswrapper[4971]: I1127 08:46:52.073226 4971 generic.go:334] "Generic (PLEG): container finished" podID="78ac5b1e-b8d1-45ac-aec7-1253421d3c45" containerID="d7ac8857700227a87600d0d345348c61929447cf1feed0bd894ea51baf618e18" exitCode=0 Nov 27 08:46:52 crc kubenswrapper[4971]: I1127 08:46:52.073437 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-hztzn" podUID="ecdd35e6-2962-453a-8b8d-9d29012d518d" containerName="registry-server" containerID="cri-o://95a429082be33cfe7febb163c605be47cb7adba1c8db6ce29cb500a20b9e3af5" gracePeriod=2 Nov 27 08:46:52 crc kubenswrapper[4971]: I1127 08:46:52.073527 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9jwb9" event={"ID":"78ac5b1e-b8d1-45ac-aec7-1253421d3c45","Type":"ContainerDied","Data":"d7ac8857700227a87600d0d345348c61929447cf1feed0bd894ea51baf618e18"} Nov 27 08:46:52 crc kubenswrapper[4971]: I1127 08:46:52.390347 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Nov 27 08:46:52 crc kubenswrapper[4971]: I1127 08:46:52.415660 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_88e952b7-e009-4c7c-b65a-116a3840b1f4/mariadb-client/0.log" Nov 27 08:46:52 crc kubenswrapper[4971]: I1127 08:46:52.446804 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Nov 27 08:46:52 crc kubenswrapper[4971]: I1127 08:46:52.455332 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Nov 27 08:46:52 crc kubenswrapper[4971]: I1127 08:46:52.511607 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mdgsq\" (UniqueName: \"kubernetes.io/projected/88e952b7-e009-4c7c-b65a-116a3840b1f4-kube-api-access-mdgsq\") pod \"88e952b7-e009-4c7c-b65a-116a3840b1f4\" (UID: \"88e952b7-e009-4c7c-b65a-116a3840b1f4\") " Nov 27 08:46:52 crc kubenswrapper[4971]: I1127 08:46:52.518498 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88e952b7-e009-4c7c-b65a-116a3840b1f4-kube-api-access-mdgsq" (OuterVolumeSpecName: "kube-api-access-mdgsq") pod "88e952b7-e009-4c7c-b65a-116a3840b1f4" (UID: "88e952b7-e009-4c7c-b65a-116a3840b1f4"). InnerVolumeSpecName "kube-api-access-mdgsq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:46:52 crc kubenswrapper[4971]: I1127 08:46:52.518995 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hztzn" Nov 27 08:46:52 crc kubenswrapper[4971]: I1127 08:46:52.561054 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88e952b7-e009-4c7c-b65a-116a3840b1f4" path="/var/lib/kubelet/pods/88e952b7-e009-4c7c-b65a-116a3840b1f4/volumes" Nov 27 08:46:52 crc kubenswrapper[4971]: I1127 08:46:52.613141 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecdd35e6-2962-453a-8b8d-9d29012d518d-utilities\") pod \"ecdd35e6-2962-453a-8b8d-9d29012d518d\" (UID: \"ecdd35e6-2962-453a-8b8d-9d29012d518d\") " Nov 27 08:46:52 crc kubenswrapper[4971]: I1127 08:46:52.613375 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecdd35e6-2962-453a-8b8d-9d29012d518d-catalog-content\") pod \"ecdd35e6-2962-453a-8b8d-9d29012d518d\" (UID: \"ecdd35e6-2962-453a-8b8d-9d29012d518d\") " Nov 27 08:46:52 crc kubenswrapper[4971]: I1127 08:46:52.614147 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ecdd35e6-2962-453a-8b8d-9d29012d518d-utilities" (OuterVolumeSpecName: "utilities") pod "ecdd35e6-2962-453a-8b8d-9d29012d518d" (UID: "ecdd35e6-2962-453a-8b8d-9d29012d518d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:46:52 crc kubenswrapper[4971]: I1127 08:46:52.617936 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-264qb\" (UniqueName: \"kubernetes.io/projected/ecdd35e6-2962-453a-8b8d-9d29012d518d-kube-api-access-264qb\") pod \"ecdd35e6-2962-453a-8b8d-9d29012d518d\" (UID: \"ecdd35e6-2962-453a-8b8d-9d29012d518d\") " Nov 27 08:46:52 crc kubenswrapper[4971]: I1127 08:46:52.618870 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecdd35e6-2962-453a-8b8d-9d29012d518d-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 08:46:52 crc kubenswrapper[4971]: I1127 08:46:52.618898 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mdgsq\" (UniqueName: \"kubernetes.io/projected/88e952b7-e009-4c7c-b65a-116a3840b1f4-kube-api-access-mdgsq\") on node \"crc\" DevicePath \"\"" Nov 27 08:46:52 crc kubenswrapper[4971]: I1127 08:46:52.621435 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ecdd35e6-2962-453a-8b8d-9d29012d518d-kube-api-access-264qb" (OuterVolumeSpecName: "kube-api-access-264qb") pod "ecdd35e6-2962-453a-8b8d-9d29012d518d" (UID: "ecdd35e6-2962-453a-8b8d-9d29012d518d"). InnerVolumeSpecName "kube-api-access-264qb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:46:52 crc kubenswrapper[4971]: I1127 08:46:52.631284 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ecdd35e6-2962-453a-8b8d-9d29012d518d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ecdd35e6-2962-453a-8b8d-9d29012d518d" (UID: "ecdd35e6-2962-453a-8b8d-9d29012d518d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:46:52 crc kubenswrapper[4971]: I1127 08:46:52.720879 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecdd35e6-2962-453a-8b8d-9d29012d518d-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 08:46:52 crc kubenswrapper[4971]: I1127 08:46:52.720994 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-264qb\" (UniqueName: \"kubernetes.io/projected/ecdd35e6-2962-453a-8b8d-9d29012d518d-kube-api-access-264qb\") on node \"crc\" DevicePath \"\"" Nov 27 08:46:53 crc kubenswrapper[4971]: I1127 08:46:53.085134 4971 generic.go:334] "Generic (PLEG): container finished" podID="ecdd35e6-2962-453a-8b8d-9d29012d518d" containerID="95a429082be33cfe7febb163c605be47cb7adba1c8db6ce29cb500a20b9e3af5" exitCode=0 Nov 27 08:46:53 crc kubenswrapper[4971]: I1127 08:46:53.085212 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hztzn" Nov 27 08:46:53 crc kubenswrapper[4971]: I1127 08:46:53.085220 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hztzn" event={"ID":"ecdd35e6-2962-453a-8b8d-9d29012d518d","Type":"ContainerDied","Data":"95a429082be33cfe7febb163c605be47cb7adba1c8db6ce29cb500a20b9e3af5"} Nov 27 08:46:53 crc kubenswrapper[4971]: I1127 08:46:53.085756 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hztzn" event={"ID":"ecdd35e6-2962-453a-8b8d-9d29012d518d","Type":"ContainerDied","Data":"dfca98842a621059ba9feec71c0dd942a53965578850dc198fdaccc57e69f87a"} Nov 27 08:46:53 crc kubenswrapper[4971]: I1127 08:46:53.085810 4971 scope.go:117] "RemoveContainer" containerID="95a429082be33cfe7febb163c605be47cb7adba1c8db6ce29cb500a20b9e3af5" Nov 27 08:46:53 crc kubenswrapper[4971]: I1127 08:46:53.091268 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9jwb9" event={"ID":"78ac5b1e-b8d1-45ac-aec7-1253421d3c45","Type":"ContainerStarted","Data":"ee8fd0887728adacadcdd207629bfe7a3745880c529a137116fa6e9c57dc71d3"} Nov 27 08:46:53 crc kubenswrapper[4971]: I1127 08:46:53.099480 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Nov 27 08:46:53 crc kubenswrapper[4971]: I1127 08:46:53.112116 4971 scope.go:117] "RemoveContainer" containerID="441e51613be92e1048a04752cd456ee8d1ca791de3141ec36ee5ec15209b8660" Nov 27 08:46:53 crc kubenswrapper[4971]: I1127 08:46:53.115634 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9jwb9" podStartSLOduration=2.600529519 podStartE2EDuration="5.115611547s" podCreationTimestamp="2025-11-27 08:46:48 +0000 UTC" firstStartedPulling="2025-11-27 08:46:50.044798526 +0000 UTC m=+6848.236842444" lastFinishedPulling="2025-11-27 08:46:52.559880554 +0000 UTC m=+6850.751924472" observedRunningTime="2025-11-27 08:46:53.114739142 +0000 UTC m=+6851.306783080" watchObservedRunningTime="2025-11-27 08:46:53.115611547 +0000 UTC m=+6851.307655465" Nov 27 08:46:53 crc kubenswrapper[4971]: I1127 08:46:53.138546 4971 scope.go:117] "RemoveContainer" containerID="4a5e3f565699f1745dd209b76fba93491299766ddc39806d96dc3e7889abcfa4" Nov 27 08:46:53 crc kubenswrapper[4971]: I1127 08:46:53.149332 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hztzn"] Nov 27 08:46:53 crc kubenswrapper[4971]: I1127 08:46:53.156695 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-hztzn"] Nov 27 08:46:53 crc kubenswrapper[4971]: I1127 08:46:53.167069 4971 scope.go:117] "RemoveContainer" containerID="95a429082be33cfe7febb163c605be47cb7adba1c8db6ce29cb500a20b9e3af5" Nov 27 08:46:53 crc kubenswrapper[4971]: E1127 08:46:53.167710 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"95a429082be33cfe7febb163c605be47cb7adba1c8db6ce29cb500a20b9e3af5\": container with ID starting with 95a429082be33cfe7febb163c605be47cb7adba1c8db6ce29cb500a20b9e3af5 not found: ID does not exist" containerID="95a429082be33cfe7febb163c605be47cb7adba1c8db6ce29cb500a20b9e3af5" Nov 27 08:46:53 crc kubenswrapper[4971]: I1127 08:46:53.167755 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95a429082be33cfe7febb163c605be47cb7adba1c8db6ce29cb500a20b9e3af5"} err="failed to get container status \"95a429082be33cfe7febb163c605be47cb7adba1c8db6ce29cb500a20b9e3af5\": rpc error: code = NotFound desc = could not find container \"95a429082be33cfe7febb163c605be47cb7adba1c8db6ce29cb500a20b9e3af5\": container with ID starting with 95a429082be33cfe7febb163c605be47cb7adba1c8db6ce29cb500a20b9e3af5 not found: ID does not exist" Nov 27 08:46:53 crc kubenswrapper[4971]: I1127 08:46:53.167784 4971 scope.go:117] "RemoveContainer" containerID="441e51613be92e1048a04752cd456ee8d1ca791de3141ec36ee5ec15209b8660" Nov 27 08:46:53 crc kubenswrapper[4971]: E1127 08:46:53.168175 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"441e51613be92e1048a04752cd456ee8d1ca791de3141ec36ee5ec15209b8660\": container with ID starting with 441e51613be92e1048a04752cd456ee8d1ca791de3141ec36ee5ec15209b8660 not found: ID does not exist" containerID="441e51613be92e1048a04752cd456ee8d1ca791de3141ec36ee5ec15209b8660" Nov 27 08:46:53 crc kubenswrapper[4971]: I1127 08:46:53.168204 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"441e51613be92e1048a04752cd456ee8d1ca791de3141ec36ee5ec15209b8660"} err="failed to get container status \"441e51613be92e1048a04752cd456ee8d1ca791de3141ec36ee5ec15209b8660\": rpc error: code = NotFound desc = could not find container \"441e51613be92e1048a04752cd456ee8d1ca791de3141ec36ee5ec15209b8660\": container with ID starting with 441e51613be92e1048a04752cd456ee8d1ca791de3141ec36ee5ec15209b8660 not found: ID does not exist" Nov 27 08:46:53 crc kubenswrapper[4971]: I1127 08:46:53.168221 4971 scope.go:117] "RemoveContainer" containerID="4a5e3f565699f1745dd209b76fba93491299766ddc39806d96dc3e7889abcfa4" Nov 27 08:46:53 crc kubenswrapper[4971]: E1127 08:46:53.168581 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a5e3f565699f1745dd209b76fba93491299766ddc39806d96dc3e7889abcfa4\": container with ID starting with 4a5e3f565699f1745dd209b76fba93491299766ddc39806d96dc3e7889abcfa4 not found: ID does not exist" containerID="4a5e3f565699f1745dd209b76fba93491299766ddc39806d96dc3e7889abcfa4" Nov 27 08:46:53 crc kubenswrapper[4971]: I1127 08:46:53.168636 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a5e3f565699f1745dd209b76fba93491299766ddc39806d96dc3e7889abcfa4"} err="failed to get container status \"4a5e3f565699f1745dd209b76fba93491299766ddc39806d96dc3e7889abcfa4\": rpc error: code = NotFound desc = could not find container \"4a5e3f565699f1745dd209b76fba93491299766ddc39806d96dc3e7889abcfa4\": container with ID starting with 4a5e3f565699f1745dd209b76fba93491299766ddc39806d96dc3e7889abcfa4 not found: ID does not exist" Nov 27 08:46:53 crc kubenswrapper[4971]: I1127 08:46:53.168671 4971 scope.go:117] "RemoveContainer" containerID="776f786014c1b77faefbac7e56e1c4815358958425520d9958305759870b48b0" Nov 27 08:46:54 crc kubenswrapper[4971]: I1127 08:46:54.563011 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ecdd35e6-2962-453a-8b8d-9d29012d518d" path="/var/lib/kubelet/pods/ecdd35e6-2962-453a-8b8d-9d29012d518d/volumes" Nov 27 08:46:56 crc kubenswrapper[4971]: I1127 08:46:56.413255 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 08:46:56 crc kubenswrapper[4971]: I1127 08:46:56.413335 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 08:46:58 crc kubenswrapper[4971]: I1127 08:46:58.566397 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9jwb9" Nov 27 08:46:58 crc kubenswrapper[4971]: I1127 08:46:58.567430 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9jwb9" Nov 27 08:46:58 crc kubenswrapper[4971]: I1127 08:46:58.611785 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9jwb9" Nov 27 08:46:59 crc kubenswrapper[4971]: I1127 08:46:59.201688 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9jwb9" Nov 27 08:46:59 crc kubenswrapper[4971]: I1127 08:46:59.262620 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9jwb9"] Nov 27 08:47:01 crc kubenswrapper[4971]: I1127 08:47:01.169198 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-9jwb9" podUID="78ac5b1e-b8d1-45ac-aec7-1253421d3c45" containerName="registry-server" containerID="cri-o://ee8fd0887728adacadcdd207629bfe7a3745880c529a137116fa6e9c57dc71d3" gracePeriod=2 Nov 27 08:47:01 crc kubenswrapper[4971]: I1127 08:47:01.775985 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9jwb9" Nov 27 08:47:01 crc kubenswrapper[4971]: I1127 08:47:01.899722 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78ac5b1e-b8d1-45ac-aec7-1253421d3c45-utilities\") pod \"78ac5b1e-b8d1-45ac-aec7-1253421d3c45\" (UID: \"78ac5b1e-b8d1-45ac-aec7-1253421d3c45\") " Nov 27 08:47:01 crc kubenswrapper[4971]: I1127 08:47:01.899826 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78ac5b1e-b8d1-45ac-aec7-1253421d3c45-catalog-content\") pod \"78ac5b1e-b8d1-45ac-aec7-1253421d3c45\" (UID: \"78ac5b1e-b8d1-45ac-aec7-1253421d3c45\") " Nov 27 08:47:01 crc kubenswrapper[4971]: I1127 08:47:01.899893 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hv5nl\" (UniqueName: \"kubernetes.io/projected/78ac5b1e-b8d1-45ac-aec7-1253421d3c45-kube-api-access-hv5nl\") pod \"78ac5b1e-b8d1-45ac-aec7-1253421d3c45\" (UID: \"78ac5b1e-b8d1-45ac-aec7-1253421d3c45\") " Nov 27 08:47:01 crc kubenswrapper[4971]: I1127 08:47:01.900890 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78ac5b1e-b8d1-45ac-aec7-1253421d3c45-utilities" (OuterVolumeSpecName: "utilities") pod "78ac5b1e-b8d1-45ac-aec7-1253421d3c45" (UID: "78ac5b1e-b8d1-45ac-aec7-1253421d3c45"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:47:01 crc kubenswrapper[4971]: I1127 08:47:01.915785 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78ac5b1e-b8d1-45ac-aec7-1253421d3c45-kube-api-access-hv5nl" (OuterVolumeSpecName: "kube-api-access-hv5nl") pod "78ac5b1e-b8d1-45ac-aec7-1253421d3c45" (UID: "78ac5b1e-b8d1-45ac-aec7-1253421d3c45"). InnerVolumeSpecName "kube-api-access-hv5nl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:47:01 crc kubenswrapper[4971]: I1127 08:47:01.953108 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78ac5b1e-b8d1-45ac-aec7-1253421d3c45-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "78ac5b1e-b8d1-45ac-aec7-1253421d3c45" (UID: "78ac5b1e-b8d1-45ac-aec7-1253421d3c45"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:47:02 crc kubenswrapper[4971]: I1127 08:47:02.001903 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78ac5b1e-b8d1-45ac-aec7-1253421d3c45-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 08:47:02 crc kubenswrapper[4971]: I1127 08:47:02.001942 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78ac5b1e-b8d1-45ac-aec7-1253421d3c45-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 08:47:02 crc kubenswrapper[4971]: I1127 08:47:02.001953 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hv5nl\" (UniqueName: \"kubernetes.io/projected/78ac5b1e-b8d1-45ac-aec7-1253421d3c45-kube-api-access-hv5nl\") on node \"crc\" DevicePath \"\"" Nov 27 08:47:02 crc kubenswrapper[4971]: I1127 08:47:02.185425 4971 generic.go:334] "Generic (PLEG): container finished" podID="78ac5b1e-b8d1-45ac-aec7-1253421d3c45" containerID="ee8fd0887728adacadcdd207629bfe7a3745880c529a137116fa6e9c57dc71d3" exitCode=0 Nov 27 08:47:02 crc kubenswrapper[4971]: I1127 08:47:02.185482 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9jwb9" event={"ID":"78ac5b1e-b8d1-45ac-aec7-1253421d3c45","Type":"ContainerDied","Data":"ee8fd0887728adacadcdd207629bfe7a3745880c529a137116fa6e9c57dc71d3"} Nov 27 08:47:02 crc kubenswrapper[4971]: I1127 08:47:02.185561 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9jwb9" event={"ID":"78ac5b1e-b8d1-45ac-aec7-1253421d3c45","Type":"ContainerDied","Data":"9510eaa958662b8a1b5faf824f303201a830d3d6e32a01bc0df138dfa34fca7e"} Nov 27 08:47:02 crc kubenswrapper[4971]: I1127 08:47:02.185585 4971 scope.go:117] "RemoveContainer" containerID="ee8fd0887728adacadcdd207629bfe7a3745880c529a137116fa6e9c57dc71d3" Nov 27 08:47:02 crc kubenswrapper[4971]: I1127 08:47:02.185620 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9jwb9" Nov 27 08:47:02 crc kubenswrapper[4971]: I1127 08:47:02.210278 4971 scope.go:117] "RemoveContainer" containerID="d7ac8857700227a87600d0d345348c61929447cf1feed0bd894ea51baf618e18" Nov 27 08:47:02 crc kubenswrapper[4971]: I1127 08:47:02.221771 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9jwb9"] Nov 27 08:47:02 crc kubenswrapper[4971]: I1127 08:47:02.228005 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-9jwb9"] Nov 27 08:47:02 crc kubenswrapper[4971]: I1127 08:47:02.246488 4971 scope.go:117] "RemoveContainer" containerID="15f9cbba6e5d04ce17cdee6ec85b5304131868a0d2d56f773f0de1c81a970fa3" Nov 27 08:47:02 crc kubenswrapper[4971]: I1127 08:47:02.265330 4971 scope.go:117] "RemoveContainer" containerID="ee8fd0887728adacadcdd207629bfe7a3745880c529a137116fa6e9c57dc71d3" Nov 27 08:47:02 crc kubenswrapper[4971]: E1127 08:47:02.265874 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee8fd0887728adacadcdd207629bfe7a3745880c529a137116fa6e9c57dc71d3\": container with ID starting with ee8fd0887728adacadcdd207629bfe7a3745880c529a137116fa6e9c57dc71d3 not found: ID does not exist" containerID="ee8fd0887728adacadcdd207629bfe7a3745880c529a137116fa6e9c57dc71d3" Nov 27 08:47:02 crc kubenswrapper[4971]: I1127 08:47:02.265929 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee8fd0887728adacadcdd207629bfe7a3745880c529a137116fa6e9c57dc71d3"} err="failed to get container status \"ee8fd0887728adacadcdd207629bfe7a3745880c529a137116fa6e9c57dc71d3\": rpc error: code = NotFound desc = could not find container \"ee8fd0887728adacadcdd207629bfe7a3745880c529a137116fa6e9c57dc71d3\": container with ID starting with ee8fd0887728adacadcdd207629bfe7a3745880c529a137116fa6e9c57dc71d3 not found: ID does not exist" Nov 27 08:47:02 crc kubenswrapper[4971]: I1127 08:47:02.265969 4971 scope.go:117] "RemoveContainer" containerID="d7ac8857700227a87600d0d345348c61929447cf1feed0bd894ea51baf618e18" Nov 27 08:47:02 crc kubenswrapper[4971]: E1127 08:47:02.266802 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7ac8857700227a87600d0d345348c61929447cf1feed0bd894ea51baf618e18\": container with ID starting with d7ac8857700227a87600d0d345348c61929447cf1feed0bd894ea51baf618e18 not found: ID does not exist" containerID="d7ac8857700227a87600d0d345348c61929447cf1feed0bd894ea51baf618e18" Nov 27 08:47:02 crc kubenswrapper[4971]: I1127 08:47:02.266843 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7ac8857700227a87600d0d345348c61929447cf1feed0bd894ea51baf618e18"} err="failed to get container status \"d7ac8857700227a87600d0d345348c61929447cf1feed0bd894ea51baf618e18\": rpc error: code = NotFound desc = could not find container \"d7ac8857700227a87600d0d345348c61929447cf1feed0bd894ea51baf618e18\": container with ID starting with d7ac8857700227a87600d0d345348c61929447cf1feed0bd894ea51baf618e18 not found: ID does not exist" Nov 27 08:47:02 crc kubenswrapper[4971]: I1127 08:47:02.266859 4971 scope.go:117] "RemoveContainer" containerID="15f9cbba6e5d04ce17cdee6ec85b5304131868a0d2d56f773f0de1c81a970fa3" Nov 27 08:47:02 crc kubenswrapper[4971]: E1127 08:47:02.267478 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15f9cbba6e5d04ce17cdee6ec85b5304131868a0d2d56f773f0de1c81a970fa3\": container with ID starting with 15f9cbba6e5d04ce17cdee6ec85b5304131868a0d2d56f773f0de1c81a970fa3 not found: ID does not exist" containerID="15f9cbba6e5d04ce17cdee6ec85b5304131868a0d2d56f773f0de1c81a970fa3" Nov 27 08:47:02 crc kubenswrapper[4971]: I1127 08:47:02.267514 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15f9cbba6e5d04ce17cdee6ec85b5304131868a0d2d56f773f0de1c81a970fa3"} err="failed to get container status \"15f9cbba6e5d04ce17cdee6ec85b5304131868a0d2d56f773f0de1c81a970fa3\": rpc error: code = NotFound desc = could not find container \"15f9cbba6e5d04ce17cdee6ec85b5304131868a0d2d56f773f0de1c81a970fa3\": container with ID starting with 15f9cbba6e5d04ce17cdee6ec85b5304131868a0d2d56f773f0de1c81a970fa3 not found: ID does not exist" Nov 27 08:47:02 crc kubenswrapper[4971]: I1127 08:47:02.560002 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78ac5b1e-b8d1-45ac-aec7-1253421d3c45" path="/var/lib/kubelet/pods/78ac5b1e-b8d1-45ac-aec7-1253421d3c45/volumes" Nov 27 08:47:12 crc kubenswrapper[4971]: I1127 08:47:12.565203 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-45m2w"] Nov 27 08:47:12 crc kubenswrapper[4971]: E1127 08:47:12.566377 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78ac5b1e-b8d1-45ac-aec7-1253421d3c45" containerName="extract-utilities" Nov 27 08:47:12 crc kubenswrapper[4971]: I1127 08:47:12.566397 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="78ac5b1e-b8d1-45ac-aec7-1253421d3c45" containerName="extract-utilities" Nov 27 08:47:12 crc kubenswrapper[4971]: E1127 08:47:12.566424 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecdd35e6-2962-453a-8b8d-9d29012d518d" containerName="registry-server" Nov 27 08:47:12 crc kubenswrapper[4971]: I1127 08:47:12.566435 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecdd35e6-2962-453a-8b8d-9d29012d518d" containerName="registry-server" Nov 27 08:47:12 crc kubenswrapper[4971]: E1127 08:47:12.566455 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88e952b7-e009-4c7c-b65a-116a3840b1f4" containerName="mariadb-client" Nov 27 08:47:12 crc kubenswrapper[4971]: I1127 08:47:12.566465 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="88e952b7-e009-4c7c-b65a-116a3840b1f4" containerName="mariadb-client" Nov 27 08:47:12 crc kubenswrapper[4971]: E1127 08:47:12.566494 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78ac5b1e-b8d1-45ac-aec7-1253421d3c45" containerName="registry-server" Nov 27 08:47:12 crc kubenswrapper[4971]: I1127 08:47:12.566505 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="78ac5b1e-b8d1-45ac-aec7-1253421d3c45" containerName="registry-server" Nov 27 08:47:12 crc kubenswrapper[4971]: E1127 08:47:12.566523 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78ac5b1e-b8d1-45ac-aec7-1253421d3c45" containerName="extract-content" Nov 27 08:47:12 crc kubenswrapper[4971]: I1127 08:47:12.566551 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="78ac5b1e-b8d1-45ac-aec7-1253421d3c45" containerName="extract-content" Nov 27 08:47:12 crc kubenswrapper[4971]: E1127 08:47:12.566582 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecdd35e6-2962-453a-8b8d-9d29012d518d" containerName="extract-utilities" Nov 27 08:47:12 crc kubenswrapper[4971]: I1127 08:47:12.566592 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecdd35e6-2962-453a-8b8d-9d29012d518d" containerName="extract-utilities" Nov 27 08:47:12 crc kubenswrapper[4971]: E1127 08:47:12.566607 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecdd35e6-2962-453a-8b8d-9d29012d518d" containerName="extract-content" Nov 27 08:47:12 crc kubenswrapper[4971]: I1127 08:47:12.566615 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecdd35e6-2962-453a-8b8d-9d29012d518d" containerName="extract-content" Nov 27 08:47:12 crc kubenswrapper[4971]: I1127 08:47:12.566811 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="ecdd35e6-2962-453a-8b8d-9d29012d518d" containerName="registry-server" Nov 27 08:47:12 crc kubenswrapper[4971]: I1127 08:47:12.566840 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="88e952b7-e009-4c7c-b65a-116a3840b1f4" containerName="mariadb-client" Nov 27 08:47:12 crc kubenswrapper[4971]: I1127 08:47:12.566852 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="78ac5b1e-b8d1-45ac-aec7-1253421d3c45" containerName="registry-server" Nov 27 08:47:12 crc kubenswrapper[4971]: I1127 08:47:12.568449 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-45m2w" Nov 27 08:47:12 crc kubenswrapper[4971]: I1127 08:47:12.590464 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-45m2w"] Nov 27 08:47:12 crc kubenswrapper[4971]: I1127 08:47:12.720763 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f33d3322-d126-414a-b601-9eedbc225211-catalog-content\") pod \"certified-operators-45m2w\" (UID: \"f33d3322-d126-414a-b601-9eedbc225211\") " pod="openshift-marketplace/certified-operators-45m2w" Nov 27 08:47:12 crc kubenswrapper[4971]: I1127 08:47:12.720836 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f33d3322-d126-414a-b601-9eedbc225211-utilities\") pod \"certified-operators-45m2w\" (UID: \"f33d3322-d126-414a-b601-9eedbc225211\") " pod="openshift-marketplace/certified-operators-45m2w" Nov 27 08:47:12 crc kubenswrapper[4971]: I1127 08:47:12.721109 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gp4bf\" (UniqueName: \"kubernetes.io/projected/f33d3322-d126-414a-b601-9eedbc225211-kube-api-access-gp4bf\") pod \"certified-operators-45m2w\" (UID: \"f33d3322-d126-414a-b601-9eedbc225211\") " pod="openshift-marketplace/certified-operators-45m2w" Nov 27 08:47:12 crc kubenswrapper[4971]: I1127 08:47:12.822451 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f33d3322-d126-414a-b601-9eedbc225211-catalog-content\") pod \"certified-operators-45m2w\" (UID: \"f33d3322-d126-414a-b601-9eedbc225211\") " pod="openshift-marketplace/certified-operators-45m2w" Nov 27 08:47:12 crc kubenswrapper[4971]: I1127 08:47:12.822503 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f33d3322-d126-414a-b601-9eedbc225211-utilities\") pod \"certified-operators-45m2w\" (UID: \"f33d3322-d126-414a-b601-9eedbc225211\") " pod="openshift-marketplace/certified-operators-45m2w" Nov 27 08:47:12 crc kubenswrapper[4971]: I1127 08:47:12.822626 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gp4bf\" (UniqueName: \"kubernetes.io/projected/f33d3322-d126-414a-b601-9eedbc225211-kube-api-access-gp4bf\") pod \"certified-operators-45m2w\" (UID: \"f33d3322-d126-414a-b601-9eedbc225211\") " pod="openshift-marketplace/certified-operators-45m2w" Nov 27 08:47:12 crc kubenswrapper[4971]: I1127 08:47:12.823083 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f33d3322-d126-414a-b601-9eedbc225211-catalog-content\") pod \"certified-operators-45m2w\" (UID: \"f33d3322-d126-414a-b601-9eedbc225211\") " pod="openshift-marketplace/certified-operators-45m2w" Nov 27 08:47:12 crc kubenswrapper[4971]: I1127 08:47:12.823127 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f33d3322-d126-414a-b601-9eedbc225211-utilities\") pod \"certified-operators-45m2w\" (UID: \"f33d3322-d126-414a-b601-9eedbc225211\") " pod="openshift-marketplace/certified-operators-45m2w" Nov 27 08:47:12 crc kubenswrapper[4971]: I1127 08:47:12.851570 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gp4bf\" (UniqueName: \"kubernetes.io/projected/f33d3322-d126-414a-b601-9eedbc225211-kube-api-access-gp4bf\") pod \"certified-operators-45m2w\" (UID: \"f33d3322-d126-414a-b601-9eedbc225211\") " pod="openshift-marketplace/certified-operators-45m2w" Nov 27 08:47:12 crc kubenswrapper[4971]: I1127 08:47:12.891081 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-45m2w" Nov 27 08:47:13 crc kubenswrapper[4971]: I1127 08:47:13.431795 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-45m2w"] Nov 27 08:47:14 crc kubenswrapper[4971]: I1127 08:47:14.296791 4971 generic.go:334] "Generic (PLEG): container finished" podID="f33d3322-d126-414a-b601-9eedbc225211" containerID="5f9a047e8556cac8e5f23e167512a605300175feb3c6a92f6f088989e359f4a1" exitCode=0 Nov 27 08:47:14 crc kubenswrapper[4971]: I1127 08:47:14.297046 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-45m2w" event={"ID":"f33d3322-d126-414a-b601-9eedbc225211","Type":"ContainerDied","Data":"5f9a047e8556cac8e5f23e167512a605300175feb3c6a92f6f088989e359f4a1"} Nov 27 08:47:14 crc kubenswrapper[4971]: I1127 08:47:14.297527 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-45m2w" event={"ID":"f33d3322-d126-414a-b601-9eedbc225211","Type":"ContainerStarted","Data":"cfef44444a729051eb6738185e8770537b4fe51ff8b58816ad81137e3d463c59"} Nov 27 08:47:19 crc kubenswrapper[4971]: E1127 08:47:19.030133 4971 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.50:33796->38.102.83.50:35357: write tcp 38.102.83.50:33796->38.102.83.50:35357: write: broken pipe Nov 27 08:47:19 crc kubenswrapper[4971]: I1127 08:47:19.343282 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-45m2w" event={"ID":"f33d3322-d126-414a-b601-9eedbc225211","Type":"ContainerStarted","Data":"40b3161a4ae3603b6df43e280a034ca34c9b425c2fcddb30d676d9c26df08d4a"} Nov 27 08:47:20 crc kubenswrapper[4971]: I1127 08:47:20.351496 4971 generic.go:334] "Generic (PLEG): container finished" podID="f33d3322-d126-414a-b601-9eedbc225211" containerID="40b3161a4ae3603b6df43e280a034ca34c9b425c2fcddb30d676d9c26df08d4a" exitCode=0 Nov 27 08:47:20 crc kubenswrapper[4971]: I1127 08:47:20.351583 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-45m2w" event={"ID":"f33d3322-d126-414a-b601-9eedbc225211","Type":"ContainerDied","Data":"40b3161a4ae3603b6df43e280a034ca34c9b425c2fcddb30d676d9c26df08d4a"} Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.027682 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.029470 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.031288 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-tsjsz" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.031617 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.032768 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.054337 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.075331 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-2"] Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.078349 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.088236 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-1"] Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.090162 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.119668 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.137755 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.188631 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27511fdf-56f4-42be-8f6f-bb0cfd874e6b-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"27511fdf-56f4-42be-8f6f-bb0cfd874e6b\") " pod="openstack/ovsdbserver-nb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.188682 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-265d9781-840c-4147-b06c-22b57f5bca06\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-265d9781-840c-4147-b06c-22b57f5bca06\") pod \"ovsdbserver-nb-2\" (UID: \"d33d13ea-43c9-4a85-a638-026123faf7e8\") " pod="openstack/ovsdbserver-nb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.188704 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27511fdf-56f4-42be-8f6f-bb0cfd874e6b-config\") pod \"ovsdbserver-nb-0\" (UID: \"27511fdf-56f4-42be-8f6f-bb0cfd874e6b\") " pod="openstack/ovsdbserver-nb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.189104 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d33d13ea-43c9-4a85-a638-026123faf7e8-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"d33d13ea-43c9-4a85-a638-026123faf7e8\") " pod="openstack/ovsdbserver-nb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.189172 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ad86fb8-ef7c-4f30-87d0-e7d49757ec00-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"7ad86fb8-ef7c-4f30-87d0-e7d49757ec00\") " pod="openstack/ovsdbserver-nb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.189283 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ad86fb8-ef7c-4f30-87d0-e7d49757ec00-config\") pod \"ovsdbserver-nb-1\" (UID: \"7ad86fb8-ef7c-4f30-87d0-e7d49757ec00\") " pod="openstack/ovsdbserver-nb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.189346 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7ad86fb8-ef7c-4f30-87d0-e7d49757ec00-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"7ad86fb8-ef7c-4f30-87d0-e7d49757ec00\") " pod="openstack/ovsdbserver-nb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.189397 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-f2ed3ff3-5b2b-497c-8589-de2909138930\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f2ed3ff3-5b2b-497c-8589-de2909138930\") pod \"ovsdbserver-nb-1\" (UID: \"7ad86fb8-ef7c-4f30-87d0-e7d49757ec00\") " pod="openstack/ovsdbserver-nb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.189437 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d33d13ea-43c9-4a85-a638-026123faf7e8-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"d33d13ea-43c9-4a85-a638-026123faf7e8\") " pod="openstack/ovsdbserver-nb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.189466 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l2zxv\" (UniqueName: \"kubernetes.io/projected/7ad86fb8-ef7c-4f30-87d0-e7d49757ec00-kube-api-access-l2zxv\") pod \"ovsdbserver-nb-1\" (UID: \"7ad86fb8-ef7c-4f30-87d0-e7d49757ec00\") " pod="openstack/ovsdbserver-nb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.189509 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxbfx\" (UniqueName: \"kubernetes.io/projected/d33d13ea-43c9-4a85-a638-026123faf7e8-kube-api-access-hxbfx\") pod \"ovsdbserver-nb-2\" (UID: \"d33d13ea-43c9-4a85-a638-026123faf7e8\") " pod="openstack/ovsdbserver-nb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.189558 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/27511fdf-56f4-42be-8f6f-bb0cfd874e6b-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"27511fdf-56f4-42be-8f6f-bb0cfd874e6b\") " pod="openstack/ovsdbserver-nb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.189629 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d33d13ea-43c9-4a85-a638-026123faf7e8-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"d33d13ea-43c9-4a85-a638-026123faf7e8\") " pod="openstack/ovsdbserver-nb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.189686 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7ad86fb8-ef7c-4f30-87d0-e7d49757ec00-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"7ad86fb8-ef7c-4f30-87d0-e7d49757ec00\") " pod="openstack/ovsdbserver-nb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.189720 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9xqc\" (UniqueName: \"kubernetes.io/projected/27511fdf-56f4-42be-8f6f-bb0cfd874e6b-kube-api-access-v9xqc\") pod \"ovsdbserver-nb-0\" (UID: \"27511fdf-56f4-42be-8f6f-bb0cfd874e6b\") " pod="openstack/ovsdbserver-nb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.189766 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/27511fdf-56f4-42be-8f6f-bb0cfd874e6b-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"27511fdf-56f4-42be-8f6f-bb0cfd874e6b\") " pod="openstack/ovsdbserver-nb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.189833 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-7848e55c-efc3-409b-8436-526e195d2dec\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7848e55c-efc3-409b-8436-526e195d2dec\") pod \"ovsdbserver-nb-0\" (UID: \"27511fdf-56f4-42be-8f6f-bb0cfd874e6b\") " pod="openstack/ovsdbserver-nb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.189969 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d33d13ea-43c9-4a85-a638-026123faf7e8-config\") pod \"ovsdbserver-nb-2\" (UID: \"d33d13ea-43c9-4a85-a638-026123faf7e8\") " pod="openstack/ovsdbserver-nb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.279868 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.282507 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.285570 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.287069 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.287881 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-tnkn6" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.292118 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9xqc\" (UniqueName: \"kubernetes.io/projected/27511fdf-56f4-42be-8f6f-bb0cfd874e6b-kube-api-access-v9xqc\") pod \"ovsdbserver-nb-0\" (UID: \"27511fdf-56f4-42be-8f6f-bb0cfd874e6b\") " pod="openstack/ovsdbserver-nb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.292174 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/27511fdf-56f4-42be-8f6f-bb0cfd874e6b-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"27511fdf-56f4-42be-8f6f-bb0cfd874e6b\") " pod="openstack/ovsdbserver-nb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.292209 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-7848e55c-efc3-409b-8436-526e195d2dec\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7848e55c-efc3-409b-8436-526e195d2dec\") pod \"ovsdbserver-nb-0\" (UID: \"27511fdf-56f4-42be-8f6f-bb0cfd874e6b\") " pod="openstack/ovsdbserver-nb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.292245 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d33d13ea-43c9-4a85-a638-026123faf7e8-config\") pod \"ovsdbserver-nb-2\" (UID: \"d33d13ea-43c9-4a85-a638-026123faf7e8\") " pod="openstack/ovsdbserver-nb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.292289 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27511fdf-56f4-42be-8f6f-bb0cfd874e6b-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"27511fdf-56f4-42be-8f6f-bb0cfd874e6b\") " pod="openstack/ovsdbserver-nb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.292316 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-265d9781-840c-4147-b06c-22b57f5bca06\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-265d9781-840c-4147-b06c-22b57f5bca06\") pod \"ovsdbserver-nb-2\" (UID: \"d33d13ea-43c9-4a85-a638-026123faf7e8\") " pod="openstack/ovsdbserver-nb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.292339 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27511fdf-56f4-42be-8f6f-bb0cfd874e6b-config\") pod \"ovsdbserver-nb-0\" (UID: \"27511fdf-56f4-42be-8f6f-bb0cfd874e6b\") " pod="openstack/ovsdbserver-nb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.292362 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d33d13ea-43c9-4a85-a638-026123faf7e8-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"d33d13ea-43c9-4a85-a638-026123faf7e8\") " pod="openstack/ovsdbserver-nb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.292385 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ad86fb8-ef7c-4f30-87d0-e7d49757ec00-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"7ad86fb8-ef7c-4f30-87d0-e7d49757ec00\") " pod="openstack/ovsdbserver-nb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.292423 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ad86fb8-ef7c-4f30-87d0-e7d49757ec00-config\") pod \"ovsdbserver-nb-1\" (UID: \"7ad86fb8-ef7c-4f30-87d0-e7d49757ec00\") " pod="openstack/ovsdbserver-nb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.292451 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7ad86fb8-ef7c-4f30-87d0-e7d49757ec00-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"7ad86fb8-ef7c-4f30-87d0-e7d49757ec00\") " pod="openstack/ovsdbserver-nb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.292484 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-f2ed3ff3-5b2b-497c-8589-de2909138930\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f2ed3ff3-5b2b-497c-8589-de2909138930\") pod \"ovsdbserver-nb-1\" (UID: \"7ad86fb8-ef7c-4f30-87d0-e7d49757ec00\") " pod="openstack/ovsdbserver-nb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.292508 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d33d13ea-43c9-4a85-a638-026123faf7e8-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"d33d13ea-43c9-4a85-a638-026123faf7e8\") " pod="openstack/ovsdbserver-nb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.292561 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l2zxv\" (UniqueName: \"kubernetes.io/projected/7ad86fb8-ef7c-4f30-87d0-e7d49757ec00-kube-api-access-l2zxv\") pod \"ovsdbserver-nb-1\" (UID: \"7ad86fb8-ef7c-4f30-87d0-e7d49757ec00\") " pod="openstack/ovsdbserver-nb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.292586 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxbfx\" (UniqueName: \"kubernetes.io/projected/d33d13ea-43c9-4a85-a638-026123faf7e8-kube-api-access-hxbfx\") pod \"ovsdbserver-nb-2\" (UID: \"d33d13ea-43c9-4a85-a638-026123faf7e8\") " pod="openstack/ovsdbserver-nb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.292616 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/27511fdf-56f4-42be-8f6f-bb0cfd874e6b-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"27511fdf-56f4-42be-8f6f-bb0cfd874e6b\") " pod="openstack/ovsdbserver-nb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.292662 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d33d13ea-43c9-4a85-a638-026123faf7e8-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"d33d13ea-43c9-4a85-a638-026123faf7e8\") " pod="openstack/ovsdbserver-nb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.292696 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7ad86fb8-ef7c-4f30-87d0-e7d49757ec00-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"7ad86fb8-ef7c-4f30-87d0-e7d49757ec00\") " pod="openstack/ovsdbserver-nb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.293302 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7ad86fb8-ef7c-4f30-87d0-e7d49757ec00-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"7ad86fb8-ef7c-4f30-87d0-e7d49757ec00\") " pod="openstack/ovsdbserver-nb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.294014 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/27511fdf-56f4-42be-8f6f-bb0cfd874e6b-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"27511fdf-56f4-42be-8f6f-bb0cfd874e6b\") " pod="openstack/ovsdbserver-nb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.295228 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d33d13ea-43c9-4a85-a638-026123faf7e8-config\") pod \"ovsdbserver-nb-2\" (UID: \"d33d13ea-43c9-4a85-a638-026123faf7e8\") " pod="openstack/ovsdbserver-nb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.300139 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27511fdf-56f4-42be-8f6f-bb0cfd874e6b-config\") pod \"ovsdbserver-nb-0\" (UID: \"27511fdf-56f4-42be-8f6f-bb0cfd874e6b\") " pod="openstack/ovsdbserver-nb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.300412 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/27511fdf-56f4-42be-8f6f-bb0cfd874e6b-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"27511fdf-56f4-42be-8f6f-bb0cfd874e6b\") " pod="openstack/ovsdbserver-nb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.300970 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.302861 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ad86fb8-ef7c-4f30-87d0-e7d49757ec00-config\") pod \"ovsdbserver-nb-1\" (UID: \"7ad86fb8-ef7c-4f30-87d0-e7d49757ec00\") " pod="openstack/ovsdbserver-nb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.303145 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7ad86fb8-ef7c-4f30-87d0-e7d49757ec00-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"7ad86fb8-ef7c-4f30-87d0-e7d49757ec00\") " pod="openstack/ovsdbserver-nb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.303612 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27511fdf-56f4-42be-8f6f-bb0cfd874e6b-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"27511fdf-56f4-42be-8f6f-bb0cfd874e6b\") " pod="openstack/ovsdbserver-nb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.304049 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d33d13ea-43c9-4a85-a638-026123faf7e8-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"d33d13ea-43c9-4a85-a638-026123faf7e8\") " pod="openstack/ovsdbserver-nb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.306588 4971 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.306625 4971 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-f2ed3ff3-5b2b-497c-8589-de2909138930\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f2ed3ff3-5b2b-497c-8589-de2909138930\") pod \"ovsdbserver-nb-1\" (UID: \"7ad86fb8-ef7c-4f30-87d0-e7d49757ec00\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/2eeb2401b0c7086061a4c0e4af1b4eaf3d1bdb565ba30b92e3b46735a22dba27/globalmount\"" pod="openstack/ovsdbserver-nb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.307599 4971 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.307630 4971 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-7848e55c-efc3-409b-8436-526e195d2dec\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7848e55c-efc3-409b-8436-526e195d2dec\") pod \"ovsdbserver-nb-0\" (UID: \"27511fdf-56f4-42be-8f6f-bb0cfd874e6b\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/13254d0f9a55f76b2f2f3301e2c03c01964166b49d1b2cc5c1ff88de52babb66/globalmount\"" pod="openstack/ovsdbserver-nb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.308075 4971 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.308102 4971 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-265d9781-840c-4147-b06c-22b57f5bca06\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-265d9781-840c-4147-b06c-22b57f5bca06\") pod \"ovsdbserver-nb-2\" (UID: \"d33d13ea-43c9-4a85-a638-026123faf7e8\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1772406ae332e03c8473f37a54722c0f4da1b835c3e3016713e2dc0bf323e192/globalmount\"" pod="openstack/ovsdbserver-nb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.308636 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d33d13ea-43c9-4a85-a638-026123faf7e8-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"d33d13ea-43c9-4a85-a638-026123faf7e8\") " pod="openstack/ovsdbserver-nb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.314362 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d33d13ea-43c9-4a85-a638-026123faf7e8-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"d33d13ea-43c9-4a85-a638-026123faf7e8\") " pod="openstack/ovsdbserver-nb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.327090 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9xqc\" (UniqueName: \"kubernetes.io/projected/27511fdf-56f4-42be-8f6f-bb0cfd874e6b-kube-api-access-v9xqc\") pod \"ovsdbserver-nb-0\" (UID: \"27511fdf-56f4-42be-8f6f-bb0cfd874e6b\") " pod="openstack/ovsdbserver-nb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.333974 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l2zxv\" (UniqueName: \"kubernetes.io/projected/7ad86fb8-ef7c-4f30-87d0-e7d49757ec00-kube-api-access-l2zxv\") pod \"ovsdbserver-nb-1\" (UID: \"7ad86fb8-ef7c-4f30-87d0-e7d49757ec00\") " pod="openstack/ovsdbserver-nb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.334163 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ad86fb8-ef7c-4f30-87d0-e7d49757ec00-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"7ad86fb8-ef7c-4f30-87d0-e7d49757ec00\") " pod="openstack/ovsdbserver-nb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.335986 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-1"] Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.338157 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.350039 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxbfx\" (UniqueName: \"kubernetes.io/projected/d33d13ea-43c9-4a85-a638-026123faf7e8-kube-api-access-hxbfx\") pod \"ovsdbserver-nb-2\" (UID: \"d33d13ea-43c9-4a85-a638-026123faf7e8\") " pod="openstack/ovsdbserver-nb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.372675 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-2"] Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.378087 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.394700 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5fb7afd5-f25d-4315-92e5-89c9f46b6eae-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"5fb7afd5-f25d-4315-92e5-89c9f46b6eae\") " pod="openstack/ovsdbserver-sb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.394757 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-150690f6-37cf-4a11-b1fb-b7a783d8c8a9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-150690f6-37cf-4a11-b1fb-b7a783d8c8a9\") pod \"ovsdbserver-sb-0\" (UID: \"5fb7afd5-f25d-4315-92e5-89c9f46b6eae\") " pod="openstack/ovsdbserver-sb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.394811 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5fb7afd5-f25d-4315-92e5-89c9f46b6eae-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"5fb7afd5-f25d-4315-92e5-89c9f46b6eae\") " pod="openstack/ovsdbserver-sb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.394829 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fb7afd5-f25d-4315-92e5-89c9f46b6eae-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"5fb7afd5-f25d-4315-92e5-89c9f46b6eae\") " pod="openstack/ovsdbserver-sb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.394849 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5fb7afd5-f25d-4315-92e5-89c9f46b6eae-config\") pod \"ovsdbserver-sb-0\" (UID: \"5fb7afd5-f25d-4315-92e5-89c9f46b6eae\") " pod="openstack/ovsdbserver-sb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.394879 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8gw9r\" (UniqueName: \"kubernetes.io/projected/5fb7afd5-f25d-4315-92e5-89c9f46b6eae-kube-api-access-8gw9r\") pod \"ovsdbserver-sb-0\" (UID: \"5fb7afd5-f25d-4315-92e5-89c9f46b6eae\") " pod="openstack/ovsdbserver-sb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.402436 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-f2ed3ff3-5b2b-497c-8589-de2909138930\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f2ed3ff3-5b2b-497c-8589-de2909138930\") pod \"ovsdbserver-nb-1\" (UID: \"7ad86fb8-ef7c-4f30-87d0-e7d49757ec00\") " pod="openstack/ovsdbserver-nb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.402789 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-7848e55c-efc3-409b-8436-526e195d2dec\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7848e55c-efc3-409b-8436-526e195d2dec\") pod \"ovsdbserver-nb-0\" (UID: \"27511fdf-56f4-42be-8f6f-bb0cfd874e6b\") " pod="openstack/ovsdbserver-nb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.406737 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-265d9781-840c-4147-b06c-22b57f5bca06\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-265d9781-840c-4147-b06c-22b57f5bca06\") pod \"ovsdbserver-nb-2\" (UID: \"d33d13ea-43c9-4a85-a638-026123faf7e8\") " pod="openstack/ovsdbserver-nb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.417790 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.421670 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.447309 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.464627 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-45m2w" podStartSLOduration=2.636818231 podStartE2EDuration="9.46460651s" podCreationTimestamp="2025-11-27 08:47:12 +0000 UTC" firstStartedPulling="2025-11-27 08:47:14.300660256 +0000 UTC m=+6872.492704184" lastFinishedPulling="2025-11-27 08:47:21.128448535 +0000 UTC m=+6879.320492463" observedRunningTime="2025-11-27 08:47:21.453129932 +0000 UTC m=+6879.645173850" watchObservedRunningTime="2025-11-27 08:47:21.46460651 +0000 UTC m=+6879.656650428" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.496871 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3d3c4964-3f5f-40eb-a660-63b41ed0a3bc-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"3d3c4964-3f5f-40eb-a660-63b41ed0a3bc\") " pod="openstack/ovsdbserver-sb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.497222 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3d3c4964-3f5f-40eb-a660-63b41ed0a3bc-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"3d3c4964-3f5f-40eb-a660-63b41ed0a3bc\") " pod="openstack/ovsdbserver-sb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.497371 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-79a2e51f-c024-4de4-a86b-3b836f602a42\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-79a2e51f-c024-4de4-a86b-3b836f602a42\") pod \"ovsdbserver-sb-2\" (UID: \"d85a4597-b37a-4ddb-943e-1c2c7fb9693d\") " pod="openstack/ovsdbserver-sb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.497503 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4z85k\" (UniqueName: \"kubernetes.io/projected/3d3c4964-3f5f-40eb-a660-63b41ed0a3bc-kube-api-access-4z85k\") pod \"ovsdbserver-sb-1\" (UID: \"3d3c4964-3f5f-40eb-a660-63b41ed0a3bc\") " pod="openstack/ovsdbserver-sb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.497657 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d85a4597-b37a-4ddb-943e-1c2c7fb9693d-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"d85a4597-b37a-4ddb-943e-1c2c7fb9693d\") " pod="openstack/ovsdbserver-sb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.497838 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d3c4964-3f5f-40eb-a660-63b41ed0a3bc-config\") pod \"ovsdbserver-sb-1\" (UID: \"3d3c4964-3f5f-40eb-a660-63b41ed0a3bc\") " pod="openstack/ovsdbserver-sb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.497979 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d85a4597-b37a-4ddb-943e-1c2c7fb9693d-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"d85a4597-b37a-4ddb-943e-1c2c7fb9693d\") " pod="openstack/ovsdbserver-sb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.498157 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mbpq6\" (UniqueName: \"kubernetes.io/projected/d85a4597-b37a-4ddb-943e-1c2c7fb9693d-kube-api-access-mbpq6\") pod \"ovsdbserver-sb-2\" (UID: \"d85a4597-b37a-4ddb-943e-1c2c7fb9693d\") " pod="openstack/ovsdbserver-sb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.498290 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5fb7afd5-f25d-4315-92e5-89c9f46b6eae-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"5fb7afd5-f25d-4315-92e5-89c9f46b6eae\") " pod="openstack/ovsdbserver-sb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.498432 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c96cf306-64ff-46f4-bb62-6165387b9c76\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c96cf306-64ff-46f4-bb62-6165387b9c76\") pod \"ovsdbserver-sb-1\" (UID: \"3d3c4964-3f5f-40eb-a660-63b41ed0a3bc\") " pod="openstack/ovsdbserver-sb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.498575 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d85a4597-b37a-4ddb-943e-1c2c7fb9693d-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"d85a4597-b37a-4ddb-943e-1c2c7fb9693d\") " pod="openstack/ovsdbserver-sb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.498675 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-150690f6-37cf-4a11-b1fb-b7a783d8c8a9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-150690f6-37cf-4a11-b1fb-b7a783d8c8a9\") pod \"ovsdbserver-sb-0\" (UID: \"5fb7afd5-f25d-4315-92e5-89c9f46b6eae\") " pod="openstack/ovsdbserver-sb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.498774 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d3c4964-3f5f-40eb-a660-63b41ed0a3bc-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"3d3c4964-3f5f-40eb-a660-63b41ed0a3bc\") " pod="openstack/ovsdbserver-sb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.498871 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d85a4597-b37a-4ddb-943e-1c2c7fb9693d-config\") pod \"ovsdbserver-sb-2\" (UID: \"d85a4597-b37a-4ddb-943e-1c2c7fb9693d\") " pod="openstack/ovsdbserver-sb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.499068 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5fb7afd5-f25d-4315-92e5-89c9f46b6eae-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"5fb7afd5-f25d-4315-92e5-89c9f46b6eae\") " pod="openstack/ovsdbserver-sb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.499113 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fb7afd5-f25d-4315-92e5-89c9f46b6eae-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"5fb7afd5-f25d-4315-92e5-89c9f46b6eae\") " pod="openstack/ovsdbserver-sb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.499141 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5fb7afd5-f25d-4315-92e5-89c9f46b6eae-config\") pod \"ovsdbserver-sb-0\" (UID: \"5fb7afd5-f25d-4315-92e5-89c9f46b6eae\") " pod="openstack/ovsdbserver-sb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.499199 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8gw9r\" (UniqueName: \"kubernetes.io/projected/5fb7afd5-f25d-4315-92e5-89c9f46b6eae-kube-api-access-8gw9r\") pod \"ovsdbserver-sb-0\" (UID: \"5fb7afd5-f25d-4315-92e5-89c9f46b6eae\") " pod="openstack/ovsdbserver-sb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.500008 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5fb7afd5-f25d-4315-92e5-89c9f46b6eae-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"5fb7afd5-f25d-4315-92e5-89c9f46b6eae\") " pod="openstack/ovsdbserver-sb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.500043 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5fb7afd5-f25d-4315-92e5-89c9f46b6eae-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"5fb7afd5-f25d-4315-92e5-89c9f46b6eae\") " pod="openstack/ovsdbserver-sb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.501783 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5fb7afd5-f25d-4315-92e5-89c9f46b6eae-config\") pod \"ovsdbserver-sb-0\" (UID: \"5fb7afd5-f25d-4315-92e5-89c9f46b6eae\") " pod="openstack/ovsdbserver-sb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.502605 4971 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.502663 4971 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-150690f6-37cf-4a11-b1fb-b7a783d8c8a9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-150690f6-37cf-4a11-b1fb-b7a783d8c8a9\") pod \"ovsdbserver-sb-0\" (UID: \"5fb7afd5-f25d-4315-92e5-89c9f46b6eae\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/70785e147953cc0ef1ca534d0c39cfde488f35f56b4a597ebb07fb160e7549da/globalmount\"" pod="openstack/ovsdbserver-sb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.509768 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fb7afd5-f25d-4315-92e5-89c9f46b6eae-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"5fb7afd5-f25d-4315-92e5-89c9f46b6eae\") " pod="openstack/ovsdbserver-sb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.518559 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8gw9r\" (UniqueName: \"kubernetes.io/projected/5fb7afd5-f25d-4315-92e5-89c9f46b6eae-kube-api-access-8gw9r\") pod \"ovsdbserver-sb-0\" (UID: \"5fb7afd5-f25d-4315-92e5-89c9f46b6eae\") " pod="openstack/ovsdbserver-sb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.539499 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-150690f6-37cf-4a11-b1fb-b7a783d8c8a9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-150690f6-37cf-4a11-b1fb-b7a783d8c8a9\") pod \"ovsdbserver-sb-0\" (UID: \"5fb7afd5-f25d-4315-92e5-89c9f46b6eae\") " pod="openstack/ovsdbserver-sb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.600978 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-79a2e51f-c024-4de4-a86b-3b836f602a42\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-79a2e51f-c024-4de4-a86b-3b836f602a42\") pod \"ovsdbserver-sb-2\" (UID: \"d85a4597-b37a-4ddb-943e-1c2c7fb9693d\") " pod="openstack/ovsdbserver-sb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.601513 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4z85k\" (UniqueName: \"kubernetes.io/projected/3d3c4964-3f5f-40eb-a660-63b41ed0a3bc-kube-api-access-4z85k\") pod \"ovsdbserver-sb-1\" (UID: \"3d3c4964-3f5f-40eb-a660-63b41ed0a3bc\") " pod="openstack/ovsdbserver-sb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.601551 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d85a4597-b37a-4ddb-943e-1c2c7fb9693d-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"d85a4597-b37a-4ddb-943e-1c2c7fb9693d\") " pod="openstack/ovsdbserver-sb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.601580 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d3c4964-3f5f-40eb-a660-63b41ed0a3bc-config\") pod \"ovsdbserver-sb-1\" (UID: \"3d3c4964-3f5f-40eb-a660-63b41ed0a3bc\") " pod="openstack/ovsdbserver-sb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.601595 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d85a4597-b37a-4ddb-943e-1c2c7fb9693d-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"d85a4597-b37a-4ddb-943e-1c2c7fb9693d\") " pod="openstack/ovsdbserver-sb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.601615 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mbpq6\" (UniqueName: \"kubernetes.io/projected/d85a4597-b37a-4ddb-943e-1c2c7fb9693d-kube-api-access-mbpq6\") pod \"ovsdbserver-sb-2\" (UID: \"d85a4597-b37a-4ddb-943e-1c2c7fb9693d\") " pod="openstack/ovsdbserver-sb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.601638 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c96cf306-64ff-46f4-bb62-6165387b9c76\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c96cf306-64ff-46f4-bb62-6165387b9c76\") pod \"ovsdbserver-sb-1\" (UID: \"3d3c4964-3f5f-40eb-a660-63b41ed0a3bc\") " pod="openstack/ovsdbserver-sb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.601657 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d85a4597-b37a-4ddb-943e-1c2c7fb9693d-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"d85a4597-b37a-4ddb-943e-1c2c7fb9693d\") " pod="openstack/ovsdbserver-sb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.601681 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d3c4964-3f5f-40eb-a660-63b41ed0a3bc-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"3d3c4964-3f5f-40eb-a660-63b41ed0a3bc\") " pod="openstack/ovsdbserver-sb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.601705 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d85a4597-b37a-4ddb-943e-1c2c7fb9693d-config\") pod \"ovsdbserver-sb-2\" (UID: \"d85a4597-b37a-4ddb-943e-1c2c7fb9693d\") " pod="openstack/ovsdbserver-sb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.601745 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3d3c4964-3f5f-40eb-a660-63b41ed0a3bc-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"3d3c4964-3f5f-40eb-a660-63b41ed0a3bc\") " pod="openstack/ovsdbserver-sb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.601780 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3d3c4964-3f5f-40eb-a660-63b41ed0a3bc-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"3d3c4964-3f5f-40eb-a660-63b41ed0a3bc\") " pod="openstack/ovsdbserver-sb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.602996 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3d3c4964-3f5f-40eb-a660-63b41ed0a3bc-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"3d3c4964-3f5f-40eb-a660-63b41ed0a3bc\") " pod="openstack/ovsdbserver-sb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.605526 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d85a4597-b37a-4ddb-943e-1c2c7fb9693d-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"d85a4597-b37a-4ddb-943e-1c2c7fb9693d\") " pod="openstack/ovsdbserver-sb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.606374 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d3c4964-3f5f-40eb-a660-63b41ed0a3bc-config\") pod \"ovsdbserver-sb-1\" (UID: \"3d3c4964-3f5f-40eb-a660-63b41ed0a3bc\") " pod="openstack/ovsdbserver-sb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.606758 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d85a4597-b37a-4ddb-943e-1c2c7fb9693d-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"d85a4597-b37a-4ddb-943e-1c2c7fb9693d\") " pod="openstack/ovsdbserver-sb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.614221 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d3c4964-3f5f-40eb-a660-63b41ed0a3bc-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"3d3c4964-3f5f-40eb-a660-63b41ed0a3bc\") " pod="openstack/ovsdbserver-sb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.614442 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d85a4597-b37a-4ddb-943e-1c2c7fb9693d-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"d85a4597-b37a-4ddb-943e-1c2c7fb9693d\") " pod="openstack/ovsdbserver-sb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.614617 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3d3c4964-3f5f-40eb-a660-63b41ed0a3bc-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"3d3c4964-3f5f-40eb-a660-63b41ed0a3bc\") " pod="openstack/ovsdbserver-sb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.622370 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d85a4597-b37a-4ddb-943e-1c2c7fb9693d-config\") pod \"ovsdbserver-sb-2\" (UID: \"d85a4597-b37a-4ddb-943e-1c2c7fb9693d\") " pod="openstack/ovsdbserver-sb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.623847 4971 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.623879 4971 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-79a2e51f-c024-4de4-a86b-3b836f602a42\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-79a2e51f-c024-4de4-a86b-3b836f602a42\") pod \"ovsdbserver-sb-2\" (UID: \"d85a4597-b37a-4ddb-943e-1c2c7fb9693d\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/6ca6f1fba2025208a1354e6366eb6b173dbdda947758052731f7ea1a2eef426f/globalmount\"" pod="openstack/ovsdbserver-sb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.624125 4971 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.624162 4971 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c96cf306-64ff-46f4-bb62-6165387b9c76\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c96cf306-64ff-46f4-bb62-6165387b9c76\") pod \"ovsdbserver-sb-1\" (UID: \"3d3c4964-3f5f-40eb-a660-63b41ed0a3bc\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/d045775accc13907748d5714636b7eecc7fc35d9cce7714a62cbf1e62228996f/globalmount\"" pod="openstack/ovsdbserver-sb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.625918 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4z85k\" (UniqueName: \"kubernetes.io/projected/3d3c4964-3f5f-40eb-a660-63b41ed0a3bc-kube-api-access-4z85k\") pod \"ovsdbserver-sb-1\" (UID: \"3d3c4964-3f5f-40eb-a660-63b41ed0a3bc\") " pod="openstack/ovsdbserver-sb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.635180 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mbpq6\" (UniqueName: \"kubernetes.io/projected/d85a4597-b37a-4ddb-943e-1c2c7fb9693d-kube-api-access-mbpq6\") pod \"ovsdbserver-sb-2\" (UID: \"d85a4597-b37a-4ddb-943e-1c2c7fb9693d\") " pod="openstack/ovsdbserver-sb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.649211 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.663492 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c96cf306-64ff-46f4-bb62-6165387b9c76\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c96cf306-64ff-46f4-bb62-6165387b9c76\") pod \"ovsdbserver-sb-1\" (UID: \"3d3c4964-3f5f-40eb-a660-63b41ed0a3bc\") " pod="openstack/ovsdbserver-sb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.668963 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-79a2e51f-c024-4de4-a86b-3b836f602a42\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-79a2e51f-c024-4de4-a86b-3b836f602a42\") pod \"ovsdbserver-sb-2\" (UID: \"d85a4597-b37a-4ddb-943e-1c2c7fb9693d\") " pod="openstack/ovsdbserver-sb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.697414 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.738708 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.766042 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Nov 27 08:47:21 crc kubenswrapper[4971]: I1127 08:47:21.819101 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Nov 27 08:47:22 crc kubenswrapper[4971]: I1127 08:47:22.110712 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Nov 27 08:47:22 crc kubenswrapper[4971]: I1127 08:47:22.300222 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Nov 27 08:47:22 crc kubenswrapper[4971]: I1127 08:47:22.405417 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 27 08:47:22 crc kubenswrapper[4971]: I1127 08:47:22.440371 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"d33d13ea-43c9-4a85-a638-026123faf7e8","Type":"ContainerStarted","Data":"56a3108ded706f99ab182ea5ee87c2ddcde2559718c90bf93e99c67c4d381bf8"} Nov 27 08:47:22 crc kubenswrapper[4971]: I1127 08:47:22.445047 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"27511fdf-56f4-42be-8f6f-bb0cfd874e6b","Type":"ContainerStarted","Data":"61fc431698220f35f99e7363316a6e12a3e5bcf8b5499b6a5ac9d70e68b337de"} Nov 27 08:47:22 crc kubenswrapper[4971]: I1127 08:47:22.447197 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"7ad86fb8-ef7c-4f30-87d0-e7d49757ec00","Type":"ContainerStarted","Data":"2566b502da1514cc845c9b560e87cdd170d3c61ecc57980ec827c4c2eda905c4"} Nov 27 08:47:22 crc kubenswrapper[4971]: I1127 08:47:22.464480 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-45m2w" event={"ID":"f33d3322-d126-414a-b601-9eedbc225211","Type":"ContainerStarted","Data":"a50d0ded044bca6a2e073bd1aae4ed13eebbb7cad6d1c314f242f2e763be07a4"} Nov 27 08:47:22 crc kubenswrapper[4971]: I1127 08:47:22.504908 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Nov 27 08:47:22 crc kubenswrapper[4971]: W1127 08:47:22.516760 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3d3c4964_3f5f_40eb_a660_63b41ed0a3bc.slice/crio-e76f15f4c7bb1d39efb23ce0c43fd72431128fde7ba87968ee54b2d3038c7376 WatchSource:0}: Error finding container e76f15f4c7bb1d39efb23ce0c43fd72431128fde7ba87968ee54b2d3038c7376: Status 404 returned error can't find the container with id e76f15f4c7bb1d39efb23ce0c43fd72431128fde7ba87968ee54b2d3038c7376 Nov 27 08:47:22 crc kubenswrapper[4971]: I1127 08:47:22.585273 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Nov 27 08:47:22 crc kubenswrapper[4971]: W1127 08:47:22.604687 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd85a4597_b37a_4ddb_943e_1c2c7fb9693d.slice/crio-fcbef869015c6dd75a8bea7b229161deeacf06cdcf02d1c4cacee18aa277c54a WatchSource:0}: Error finding container fcbef869015c6dd75a8bea7b229161deeacf06cdcf02d1c4cacee18aa277c54a: Status 404 returned error can't find the container with id fcbef869015c6dd75a8bea7b229161deeacf06cdcf02d1c4cacee18aa277c54a Nov 27 08:47:22 crc kubenswrapper[4971]: I1127 08:47:22.891287 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-45m2w" Nov 27 08:47:22 crc kubenswrapper[4971]: I1127 08:47:22.891364 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-45m2w" Nov 27 08:47:23 crc kubenswrapper[4971]: I1127 08:47:23.078153 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 27 08:47:23 crc kubenswrapper[4971]: I1127 08:47:23.474984 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"d85a4597-b37a-4ddb-943e-1c2c7fb9693d","Type":"ContainerStarted","Data":"fcbef869015c6dd75a8bea7b229161deeacf06cdcf02d1c4cacee18aa277c54a"} Nov 27 08:47:23 crc kubenswrapper[4971]: I1127 08:47:23.478142 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"3d3c4964-3f5f-40eb-a660-63b41ed0a3bc","Type":"ContainerStarted","Data":"e76f15f4c7bb1d39efb23ce0c43fd72431128fde7ba87968ee54b2d3038c7376"} Nov 27 08:47:23 crc kubenswrapper[4971]: I1127 08:47:23.479977 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"5fb7afd5-f25d-4315-92e5-89c9f46b6eae","Type":"ContainerStarted","Data":"a092c3deca5c7a5c12882a159eac25cf244a0db7a8875c2188efc0a293bb38e2"} Nov 27 08:47:23 crc kubenswrapper[4971]: I1127 08:47:23.957642 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-45m2w" podUID="f33d3322-d126-414a-b601-9eedbc225211" containerName="registry-server" probeResult="failure" output=< Nov 27 08:47:23 crc kubenswrapper[4971]: timeout: failed to connect service ":50051" within 1s Nov 27 08:47:23 crc kubenswrapper[4971]: > Nov 27 08:47:26 crc kubenswrapper[4971]: I1127 08:47:26.413672 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 08:47:26 crc kubenswrapper[4971]: I1127 08:47:26.414179 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 08:47:26 crc kubenswrapper[4971]: I1127 08:47:26.414238 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 08:47:26 crc kubenswrapper[4971]: I1127 08:47:26.415497 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"972a96022e7888e797ef56d94e28ce8ef6b71b2858abcc7c40439a752ca23142"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 08:47:26 crc kubenswrapper[4971]: I1127 08:47:26.415606 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://972a96022e7888e797ef56d94e28ce8ef6b71b2858abcc7c40439a752ca23142" gracePeriod=600 Nov 27 08:47:27 crc kubenswrapper[4971]: I1127 08:47:27.515986 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"27511fdf-56f4-42be-8f6f-bb0cfd874e6b","Type":"ContainerStarted","Data":"d5fd5b7c3f0561a410036ca390e26d3bf5c7e7d27b8925b03daf7bde88ba381d"} Nov 27 08:47:27 crc kubenswrapper[4971]: I1127 08:47:27.516962 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"27511fdf-56f4-42be-8f6f-bb0cfd874e6b","Type":"ContainerStarted","Data":"22e4618a0cabcd13be0187500e4a8c0cc7bb957b0141b3cdd92cc91960f76441"} Nov 27 08:47:27 crc kubenswrapper[4971]: I1127 08:47:27.518628 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"7ad86fb8-ef7c-4f30-87d0-e7d49757ec00","Type":"ContainerStarted","Data":"df14ab0d0bf6ac93d33bc73956165d213f3b47c5e78b9af3a64e7ccf147e4601"} Nov 27 08:47:27 crc kubenswrapper[4971]: I1127 08:47:27.519095 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"7ad86fb8-ef7c-4f30-87d0-e7d49757ec00","Type":"ContainerStarted","Data":"6b71f4c5fe65f0fb4e922378a002e1390d104abfba6b9f06565d8cf1927788f3"} Nov 27 08:47:27 crc kubenswrapper[4971]: I1127 08:47:27.522470 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="972a96022e7888e797ef56d94e28ce8ef6b71b2858abcc7c40439a752ca23142" exitCode=0 Nov 27 08:47:27 crc kubenswrapper[4971]: I1127 08:47:27.522517 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"972a96022e7888e797ef56d94e28ce8ef6b71b2858abcc7c40439a752ca23142"} Nov 27 08:47:27 crc kubenswrapper[4971]: I1127 08:47:27.522573 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"503fed002d2daf941b71890d178a428a5c3cffb1b48a2221858dc92e28178dc4"} Nov 27 08:47:27 crc kubenswrapper[4971]: I1127 08:47:27.522594 4971 scope.go:117] "RemoveContainer" containerID="8779d2c056ac81dbc96266ed24a74e46ca57847727d1c7a9ce34603cabd43c6d" Nov 27 08:47:27 crc kubenswrapper[4971]: I1127 08:47:27.526312 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"3d3c4964-3f5f-40eb-a660-63b41ed0a3bc","Type":"ContainerStarted","Data":"7f2ca3a864d3dd2e995c033a5e3b9ee4c0a1432b5f372323f821d1b8fd5ae598"} Nov 27 08:47:27 crc kubenswrapper[4971]: I1127 08:47:27.526835 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"3d3c4964-3f5f-40eb-a660-63b41ed0a3bc","Type":"ContainerStarted","Data":"ac207767a1a8fd8e5e71396803270a70625ac15e01ce148e9c382cc9d0985894"} Nov 27 08:47:27 crc kubenswrapper[4971]: I1127 08:47:27.528682 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"5fb7afd5-f25d-4315-92e5-89c9f46b6eae","Type":"ContainerStarted","Data":"8bbc1be390ae3133e0daa7f5cf0269ca673762c5553d318ccfee9dffefba67aa"} Nov 27 08:47:27 crc kubenswrapper[4971]: I1127 08:47:27.528749 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"5fb7afd5-f25d-4315-92e5-89c9f46b6eae","Type":"ContainerStarted","Data":"a28f3afbc9a413ffba57400404901311da5636a66f0d22b45c310a3259afe592"} Nov 27 08:47:27 crc kubenswrapper[4971]: I1127 08:47:27.533641 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"d85a4597-b37a-4ddb-943e-1c2c7fb9693d","Type":"ContainerStarted","Data":"9fc47c1c6e008c4744d452ad7dd6579455e8c8380a1722deca843bcfaaac5244"} Nov 27 08:47:27 crc kubenswrapper[4971]: I1127 08:47:27.533695 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"d85a4597-b37a-4ddb-943e-1c2c7fb9693d","Type":"ContainerStarted","Data":"d65623deb9101d98189503d50b21399b94c295b1b40d5ec011920f170435f1cf"} Nov 27 08:47:27 crc kubenswrapper[4971]: I1127 08:47:27.541396 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"d33d13ea-43c9-4a85-a638-026123faf7e8","Type":"ContainerStarted","Data":"3f7e5a24e2f0a5fbb80fdfb33c229b4e80259fd3ee35400251678ae1ab1643b4"} Nov 27 08:47:27 crc kubenswrapper[4971]: I1127 08:47:27.541461 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"d33d13ea-43c9-4a85-a638-026123faf7e8","Type":"ContainerStarted","Data":"91a234bbc8e55598afaf803e742c891d2cbad63f2950ce0679103a613c2022fe"} Nov 27 08:47:27 crc kubenswrapper[4971]: I1127 08:47:27.541686 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=3.280920419 podStartE2EDuration="7.541660852s" podCreationTimestamp="2025-11-27 08:47:20 +0000 UTC" firstStartedPulling="2025-11-27 08:47:22.419303058 +0000 UTC m=+6880.611346976" lastFinishedPulling="2025-11-27 08:47:26.680043491 +0000 UTC m=+6884.872087409" observedRunningTime="2025-11-27 08:47:27.537109882 +0000 UTC m=+6885.729153790" watchObservedRunningTime="2025-11-27 08:47:27.541660852 +0000 UTC m=+6885.733704770" Nov 27 08:47:27 crc kubenswrapper[4971]: I1127 08:47:27.567659 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=3.9636122670000002 podStartE2EDuration="7.567629916s" podCreationTimestamp="2025-11-27 08:47:20 +0000 UTC" firstStartedPulling="2025-11-27 08:47:23.093789481 +0000 UTC m=+6881.285833399" lastFinishedPulling="2025-11-27 08:47:26.69780713 +0000 UTC m=+6884.889851048" observedRunningTime="2025-11-27 08:47:27.561164921 +0000 UTC m=+6885.753208839" watchObservedRunningTime="2025-11-27 08:47:27.567629916 +0000 UTC m=+6885.759673864" Nov 27 08:47:27 crc kubenswrapper[4971]: I1127 08:47:27.588876 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-1" podStartSLOduration=3.106198316 podStartE2EDuration="7.588859634s" podCreationTimestamp="2025-11-27 08:47:20 +0000 UTC" firstStartedPulling="2025-11-27 08:47:22.536245956 +0000 UTC m=+6880.728289874" lastFinishedPulling="2025-11-27 08:47:27.018907274 +0000 UTC m=+6885.210951192" observedRunningTime="2025-11-27 08:47:27.584047596 +0000 UTC m=+6885.776091514" watchObservedRunningTime="2025-11-27 08:47:27.588859634 +0000 UTC m=+6885.780903552" Nov 27 08:47:27 crc kubenswrapper[4971]: I1127 08:47:27.609184 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-1" podStartSLOduration=3.04035919 podStartE2EDuration="7.609166365s" podCreationTimestamp="2025-11-27 08:47:20 +0000 UTC" firstStartedPulling="2025-11-27 08:47:22.111231096 +0000 UTC m=+6880.303275014" lastFinishedPulling="2025-11-27 08:47:26.680038271 +0000 UTC m=+6884.872082189" observedRunningTime="2025-11-27 08:47:27.603581165 +0000 UTC m=+6885.795625083" watchObservedRunningTime="2025-11-27 08:47:27.609166365 +0000 UTC m=+6885.801210283" Nov 27 08:47:27 crc kubenswrapper[4971]: I1127 08:47:27.649926 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Nov 27 08:47:27 crc kubenswrapper[4971]: I1127 08:47:27.654408 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-2" podStartSLOduration=3.581608419 podStartE2EDuration="7.65438348s" podCreationTimestamp="2025-11-27 08:47:20 +0000 UTC" firstStartedPulling="2025-11-27 08:47:22.607322452 +0000 UTC m=+6880.799366370" lastFinishedPulling="2025-11-27 08:47:26.680097513 +0000 UTC m=+6884.872141431" observedRunningTime="2025-11-27 08:47:27.650602412 +0000 UTC m=+6885.842646370" watchObservedRunningTime="2025-11-27 08:47:27.65438348 +0000 UTC m=+6885.846427398" Nov 27 08:47:27 crc kubenswrapper[4971]: I1127 08:47:27.677336 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-2" podStartSLOduration=3.095164351 podStartE2EDuration="7.677318577s" podCreationTimestamp="2025-11-27 08:47:20 +0000 UTC" firstStartedPulling="2025-11-27 08:47:22.323609838 +0000 UTC m=+6880.515653756" lastFinishedPulling="2025-11-27 08:47:26.905764064 +0000 UTC m=+6885.097807982" observedRunningTime="2025-11-27 08:47:27.674434114 +0000 UTC m=+6885.866478042" watchObservedRunningTime="2025-11-27 08:47:27.677318577 +0000 UTC m=+6885.869362495" Nov 27 08:47:27 crc kubenswrapper[4971]: I1127 08:47:27.714431 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-2" Nov 27 08:47:27 crc kubenswrapper[4971]: I1127 08:47:27.740125 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Nov 27 08:47:27 crc kubenswrapper[4971]: I1127 08:47:27.766904 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-1" Nov 27 08:47:27 crc kubenswrapper[4971]: I1127 08:47:27.819612 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-2" Nov 27 08:47:30 crc kubenswrapper[4971]: I1127 08:47:30.423029 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-1" Nov 27 08:47:30 crc kubenswrapper[4971]: I1127 08:47:30.466347 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-1" Nov 27 08:47:30 crc kubenswrapper[4971]: I1127 08:47:30.569616 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-1" Nov 27 08:47:30 crc kubenswrapper[4971]: I1127 08:47:30.689435 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Nov 27 08:47:30 crc kubenswrapper[4971]: I1127 08:47:30.689973 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Nov 27 08:47:30 crc kubenswrapper[4971]: I1127 08:47:30.735157 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-2" Nov 27 08:47:30 crc kubenswrapper[4971]: I1127 08:47:30.735619 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-2" Nov 27 08:47:30 crc kubenswrapper[4971]: I1127 08:47:30.781726 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Nov 27 08:47:30 crc kubenswrapper[4971]: I1127 08:47:30.782524 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Nov 27 08:47:30 crc kubenswrapper[4971]: I1127 08:47:30.809421 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-1" Nov 27 08:47:30 crc kubenswrapper[4971]: I1127 08:47:30.809881 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-1" Nov 27 08:47:30 crc kubenswrapper[4971]: I1127 08:47:30.869267 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-2" Nov 27 08:47:30 crc kubenswrapper[4971]: I1127 08:47:30.870211 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-2" Nov 27 08:47:31 crc kubenswrapper[4971]: I1127 08:47:31.479348 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-1" Nov 27 08:47:31 crc kubenswrapper[4971]: I1127 08:47:31.658025 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Nov 27 08:47:31 crc kubenswrapper[4971]: I1127 08:47:31.672153 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-2" Nov 27 08:47:31 crc kubenswrapper[4971]: I1127 08:47:31.675653 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Nov 27 08:47:31 crc kubenswrapper[4971]: I1127 08:47:31.769117 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7b7cbd7fcf-wsmm6"] Nov 27 08:47:31 crc kubenswrapper[4971]: I1127 08:47:31.770500 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b7cbd7fcf-wsmm6" Nov 27 08:47:31 crc kubenswrapper[4971]: I1127 08:47:31.777208 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Nov 27 08:47:31 crc kubenswrapper[4971]: I1127 08:47:31.793811 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7b7cbd7fcf-wsmm6"] Nov 27 08:47:31 crc kubenswrapper[4971]: I1127 08:47:31.919647 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7cedaf56-dee4-45e5-aa67-f53f454d7eb3-dns-svc\") pod \"dnsmasq-dns-7b7cbd7fcf-wsmm6\" (UID: \"7cedaf56-dee4-45e5-aa67-f53f454d7eb3\") " pod="openstack/dnsmasq-dns-7b7cbd7fcf-wsmm6" Nov 27 08:47:31 crc kubenswrapper[4971]: I1127 08:47:31.919725 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vlpxt\" (UniqueName: \"kubernetes.io/projected/7cedaf56-dee4-45e5-aa67-f53f454d7eb3-kube-api-access-vlpxt\") pod \"dnsmasq-dns-7b7cbd7fcf-wsmm6\" (UID: \"7cedaf56-dee4-45e5-aa67-f53f454d7eb3\") " pod="openstack/dnsmasq-dns-7b7cbd7fcf-wsmm6" Nov 27 08:47:31 crc kubenswrapper[4971]: I1127 08:47:31.919864 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7cedaf56-dee4-45e5-aa67-f53f454d7eb3-config\") pod \"dnsmasq-dns-7b7cbd7fcf-wsmm6\" (UID: \"7cedaf56-dee4-45e5-aa67-f53f454d7eb3\") " pod="openstack/dnsmasq-dns-7b7cbd7fcf-wsmm6" Nov 27 08:47:31 crc kubenswrapper[4971]: I1127 08:47:31.920097 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7cedaf56-dee4-45e5-aa67-f53f454d7eb3-ovsdbserver-nb\") pod \"dnsmasq-dns-7b7cbd7fcf-wsmm6\" (UID: \"7cedaf56-dee4-45e5-aa67-f53f454d7eb3\") " pod="openstack/dnsmasq-dns-7b7cbd7fcf-wsmm6" Nov 27 08:47:31 crc kubenswrapper[4971]: I1127 08:47:31.982932 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7b7cbd7fcf-wsmm6"] Nov 27 08:47:31 crc kubenswrapper[4971]: E1127 08:47:31.983562 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config dns-svc kube-api-access-vlpxt ovsdbserver-nb], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-7b7cbd7fcf-wsmm6" podUID="7cedaf56-dee4-45e5-aa67-f53f454d7eb3" Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.009385 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5ccbc467bc-bh4qq"] Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.010767 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccbc467bc-bh4qq" Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.018766 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.030340 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ccbc467bc-bh4qq"] Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.032338 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7cedaf56-dee4-45e5-aa67-f53f454d7eb3-ovsdbserver-nb\") pod \"dnsmasq-dns-7b7cbd7fcf-wsmm6\" (UID: \"7cedaf56-dee4-45e5-aa67-f53f454d7eb3\") " pod="openstack/dnsmasq-dns-7b7cbd7fcf-wsmm6" Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.032620 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7cedaf56-dee4-45e5-aa67-f53f454d7eb3-dns-svc\") pod \"dnsmasq-dns-7b7cbd7fcf-wsmm6\" (UID: \"7cedaf56-dee4-45e5-aa67-f53f454d7eb3\") " pod="openstack/dnsmasq-dns-7b7cbd7fcf-wsmm6" Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.032746 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vlpxt\" (UniqueName: \"kubernetes.io/projected/7cedaf56-dee4-45e5-aa67-f53f454d7eb3-kube-api-access-vlpxt\") pod \"dnsmasq-dns-7b7cbd7fcf-wsmm6\" (UID: \"7cedaf56-dee4-45e5-aa67-f53f454d7eb3\") " pod="openstack/dnsmasq-dns-7b7cbd7fcf-wsmm6" Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.032829 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7cedaf56-dee4-45e5-aa67-f53f454d7eb3-config\") pod \"dnsmasq-dns-7b7cbd7fcf-wsmm6\" (UID: \"7cedaf56-dee4-45e5-aa67-f53f454d7eb3\") " pod="openstack/dnsmasq-dns-7b7cbd7fcf-wsmm6" Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.034057 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7cedaf56-dee4-45e5-aa67-f53f454d7eb3-config\") pod \"dnsmasq-dns-7b7cbd7fcf-wsmm6\" (UID: \"7cedaf56-dee4-45e5-aa67-f53f454d7eb3\") " pod="openstack/dnsmasq-dns-7b7cbd7fcf-wsmm6" Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.034238 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7cedaf56-dee4-45e5-aa67-f53f454d7eb3-dns-svc\") pod \"dnsmasq-dns-7b7cbd7fcf-wsmm6\" (UID: \"7cedaf56-dee4-45e5-aa67-f53f454d7eb3\") " pod="openstack/dnsmasq-dns-7b7cbd7fcf-wsmm6" Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.034513 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7cedaf56-dee4-45e5-aa67-f53f454d7eb3-ovsdbserver-nb\") pod \"dnsmasq-dns-7b7cbd7fcf-wsmm6\" (UID: \"7cedaf56-dee4-45e5-aa67-f53f454d7eb3\") " pod="openstack/dnsmasq-dns-7b7cbd7fcf-wsmm6" Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.070151 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vlpxt\" (UniqueName: \"kubernetes.io/projected/7cedaf56-dee4-45e5-aa67-f53f454d7eb3-kube-api-access-vlpxt\") pod \"dnsmasq-dns-7b7cbd7fcf-wsmm6\" (UID: \"7cedaf56-dee4-45e5-aa67-f53f454d7eb3\") " pod="openstack/dnsmasq-dns-7b7cbd7fcf-wsmm6" Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.133959 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0413b693-383d-4291-bab3-447fd147e5ff-ovsdbserver-nb\") pod \"dnsmasq-dns-5ccbc467bc-bh4qq\" (UID: \"0413b693-383d-4291-bab3-447fd147e5ff\") " pod="openstack/dnsmasq-dns-5ccbc467bc-bh4qq" Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.134375 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0413b693-383d-4291-bab3-447fd147e5ff-config\") pod \"dnsmasq-dns-5ccbc467bc-bh4qq\" (UID: \"0413b693-383d-4291-bab3-447fd147e5ff\") " pod="openstack/dnsmasq-dns-5ccbc467bc-bh4qq" Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.134414 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5r75\" (UniqueName: \"kubernetes.io/projected/0413b693-383d-4291-bab3-447fd147e5ff-kube-api-access-s5r75\") pod \"dnsmasq-dns-5ccbc467bc-bh4qq\" (UID: \"0413b693-383d-4291-bab3-447fd147e5ff\") " pod="openstack/dnsmasq-dns-5ccbc467bc-bh4qq" Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.134434 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0413b693-383d-4291-bab3-447fd147e5ff-ovsdbserver-sb\") pod \"dnsmasq-dns-5ccbc467bc-bh4qq\" (UID: \"0413b693-383d-4291-bab3-447fd147e5ff\") " pod="openstack/dnsmasq-dns-5ccbc467bc-bh4qq" Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.134488 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0413b693-383d-4291-bab3-447fd147e5ff-dns-svc\") pod \"dnsmasq-dns-5ccbc467bc-bh4qq\" (UID: \"0413b693-383d-4291-bab3-447fd147e5ff\") " pod="openstack/dnsmasq-dns-5ccbc467bc-bh4qq" Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.237140 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0413b693-383d-4291-bab3-447fd147e5ff-dns-svc\") pod \"dnsmasq-dns-5ccbc467bc-bh4qq\" (UID: \"0413b693-383d-4291-bab3-447fd147e5ff\") " pod="openstack/dnsmasq-dns-5ccbc467bc-bh4qq" Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.238043 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0413b693-383d-4291-bab3-447fd147e5ff-ovsdbserver-nb\") pod \"dnsmasq-dns-5ccbc467bc-bh4qq\" (UID: \"0413b693-383d-4291-bab3-447fd147e5ff\") " pod="openstack/dnsmasq-dns-5ccbc467bc-bh4qq" Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.238326 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0413b693-383d-4291-bab3-447fd147e5ff-dns-svc\") pod \"dnsmasq-dns-5ccbc467bc-bh4qq\" (UID: \"0413b693-383d-4291-bab3-447fd147e5ff\") " pod="openstack/dnsmasq-dns-5ccbc467bc-bh4qq" Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.238352 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0413b693-383d-4291-bab3-447fd147e5ff-config\") pod \"dnsmasq-dns-5ccbc467bc-bh4qq\" (UID: \"0413b693-383d-4291-bab3-447fd147e5ff\") " pod="openstack/dnsmasq-dns-5ccbc467bc-bh4qq" Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.238896 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0413b693-383d-4291-bab3-447fd147e5ff-ovsdbserver-nb\") pod \"dnsmasq-dns-5ccbc467bc-bh4qq\" (UID: \"0413b693-383d-4291-bab3-447fd147e5ff\") " pod="openstack/dnsmasq-dns-5ccbc467bc-bh4qq" Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.238914 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5r75\" (UniqueName: \"kubernetes.io/projected/0413b693-383d-4291-bab3-447fd147e5ff-kube-api-access-s5r75\") pod \"dnsmasq-dns-5ccbc467bc-bh4qq\" (UID: \"0413b693-383d-4291-bab3-447fd147e5ff\") " pod="openstack/dnsmasq-dns-5ccbc467bc-bh4qq" Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.239241 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0413b693-383d-4291-bab3-447fd147e5ff-config\") pod \"dnsmasq-dns-5ccbc467bc-bh4qq\" (UID: \"0413b693-383d-4291-bab3-447fd147e5ff\") " pod="openstack/dnsmasq-dns-5ccbc467bc-bh4qq" Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.239400 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0413b693-383d-4291-bab3-447fd147e5ff-ovsdbserver-sb\") pod \"dnsmasq-dns-5ccbc467bc-bh4qq\" (UID: \"0413b693-383d-4291-bab3-447fd147e5ff\") " pod="openstack/dnsmasq-dns-5ccbc467bc-bh4qq" Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.241516 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0413b693-383d-4291-bab3-447fd147e5ff-ovsdbserver-sb\") pod \"dnsmasq-dns-5ccbc467bc-bh4qq\" (UID: \"0413b693-383d-4291-bab3-447fd147e5ff\") " pod="openstack/dnsmasq-dns-5ccbc467bc-bh4qq" Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.255234 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5r75\" (UniqueName: \"kubernetes.io/projected/0413b693-383d-4291-bab3-447fd147e5ff-kube-api-access-s5r75\") pod \"dnsmasq-dns-5ccbc467bc-bh4qq\" (UID: \"0413b693-383d-4291-bab3-447fd147e5ff\") " pod="openstack/dnsmasq-dns-5ccbc467bc-bh4qq" Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.341274 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccbc467bc-bh4qq" Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.584535 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b7cbd7fcf-wsmm6" Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.599472 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b7cbd7fcf-wsmm6" Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.749084 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7cedaf56-dee4-45e5-aa67-f53f454d7eb3-dns-svc\") pod \"7cedaf56-dee4-45e5-aa67-f53f454d7eb3\" (UID: \"7cedaf56-dee4-45e5-aa67-f53f454d7eb3\") " Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.749203 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7cedaf56-dee4-45e5-aa67-f53f454d7eb3-ovsdbserver-nb\") pod \"7cedaf56-dee4-45e5-aa67-f53f454d7eb3\" (UID: \"7cedaf56-dee4-45e5-aa67-f53f454d7eb3\") " Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.749276 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7cedaf56-dee4-45e5-aa67-f53f454d7eb3-config\") pod \"7cedaf56-dee4-45e5-aa67-f53f454d7eb3\" (UID: \"7cedaf56-dee4-45e5-aa67-f53f454d7eb3\") " Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.749371 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vlpxt\" (UniqueName: \"kubernetes.io/projected/7cedaf56-dee4-45e5-aa67-f53f454d7eb3-kube-api-access-vlpxt\") pod \"7cedaf56-dee4-45e5-aa67-f53f454d7eb3\" (UID: \"7cedaf56-dee4-45e5-aa67-f53f454d7eb3\") " Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.749658 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7cedaf56-dee4-45e5-aa67-f53f454d7eb3-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7cedaf56-dee4-45e5-aa67-f53f454d7eb3" (UID: "7cedaf56-dee4-45e5-aa67-f53f454d7eb3"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.749807 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7cedaf56-dee4-45e5-aa67-f53f454d7eb3-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7cedaf56-dee4-45e5-aa67-f53f454d7eb3" (UID: "7cedaf56-dee4-45e5-aa67-f53f454d7eb3"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.750238 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7cedaf56-dee4-45e5-aa67-f53f454d7eb3-config" (OuterVolumeSpecName: "config") pod "7cedaf56-dee4-45e5-aa67-f53f454d7eb3" (UID: "7cedaf56-dee4-45e5-aa67-f53f454d7eb3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.750361 4971 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7cedaf56-dee4-45e5-aa67-f53f454d7eb3-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.750387 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7cedaf56-dee4-45e5-aa67-f53f454d7eb3-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.755842 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7cedaf56-dee4-45e5-aa67-f53f454d7eb3-kube-api-access-vlpxt" (OuterVolumeSpecName: "kube-api-access-vlpxt") pod "7cedaf56-dee4-45e5-aa67-f53f454d7eb3" (UID: "7cedaf56-dee4-45e5-aa67-f53f454d7eb3"). InnerVolumeSpecName "kube-api-access-vlpxt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.853245 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vlpxt\" (UniqueName: \"kubernetes.io/projected/7cedaf56-dee4-45e5-aa67-f53f454d7eb3-kube-api-access-vlpxt\") on node \"crc\" DevicePath \"\"" Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.853311 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7cedaf56-dee4-45e5-aa67-f53f454d7eb3-config\") on node \"crc\" DevicePath \"\"" Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.887506 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ccbc467bc-bh4qq"] Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.937885 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-45m2w" Nov 27 08:47:32 crc kubenswrapper[4971]: I1127 08:47:32.990731 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-45m2w" Nov 27 08:47:33 crc kubenswrapper[4971]: I1127 08:47:33.054620 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-45m2w"] Nov 27 08:47:33 crc kubenswrapper[4971]: I1127 08:47:33.181048 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-np4kj"] Nov 27 08:47:33 crc kubenswrapper[4971]: I1127 08:47:33.181341 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-np4kj" podUID="047e5a2f-96f7-4368-a26b-9692b2d3419e" containerName="registry-server" containerID="cri-o://9431eb86553b3f8153086637a4dd761aef59549053f7bd489333bbd2071ab1b3" gracePeriod=2 Nov 27 08:47:33 crc kubenswrapper[4971]: I1127 08:47:33.594720 4971 generic.go:334] "Generic (PLEG): container finished" podID="0413b693-383d-4291-bab3-447fd147e5ff" containerID="1b7f8a572e2887a79e4dc5e3c00774af3d1b9ec6faf1103f3d830a96390f967c" exitCode=0 Nov 27 08:47:33 crc kubenswrapper[4971]: I1127 08:47:33.594805 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccbc467bc-bh4qq" event={"ID":"0413b693-383d-4291-bab3-447fd147e5ff","Type":"ContainerDied","Data":"1b7f8a572e2887a79e4dc5e3c00774af3d1b9ec6faf1103f3d830a96390f967c"} Nov 27 08:47:33 crc kubenswrapper[4971]: I1127 08:47:33.595407 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccbc467bc-bh4qq" event={"ID":"0413b693-383d-4291-bab3-447fd147e5ff","Type":"ContainerStarted","Data":"1b208c6bad050a2ddde9c859757be81aaa076c89b3cb54f992cc1768259b1a1f"} Nov 27 08:47:33 crc kubenswrapper[4971]: I1127 08:47:33.601932 4971 generic.go:334] "Generic (PLEG): container finished" podID="047e5a2f-96f7-4368-a26b-9692b2d3419e" containerID="9431eb86553b3f8153086637a4dd761aef59549053f7bd489333bbd2071ab1b3" exitCode=0 Nov 27 08:47:33 crc kubenswrapper[4971]: I1127 08:47:33.602221 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-np4kj" event={"ID":"047e5a2f-96f7-4368-a26b-9692b2d3419e","Type":"ContainerDied","Data":"9431eb86553b3f8153086637a4dd761aef59549053f7bd489333bbd2071ab1b3"} Nov 27 08:47:33 crc kubenswrapper[4971]: I1127 08:47:33.602267 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-np4kj" event={"ID":"047e5a2f-96f7-4368-a26b-9692b2d3419e","Type":"ContainerDied","Data":"7efc0737e791e79e01e87480658367eb2e77c5a24f9dad87751fde3478365d32"} Nov 27 08:47:33 crc kubenswrapper[4971]: I1127 08:47:33.602284 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7efc0737e791e79e01e87480658367eb2e77c5a24f9dad87751fde3478365d32" Nov 27 08:47:33 crc kubenswrapper[4971]: I1127 08:47:33.602325 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b7cbd7fcf-wsmm6" Nov 27 08:47:33 crc kubenswrapper[4971]: I1127 08:47:33.688253 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-np4kj" Nov 27 08:47:33 crc kubenswrapper[4971]: I1127 08:47:33.722779 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7b7cbd7fcf-wsmm6"] Nov 27 08:47:33 crc kubenswrapper[4971]: I1127 08:47:33.738249 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7b7cbd7fcf-wsmm6"] Nov 27 08:47:33 crc kubenswrapper[4971]: I1127 08:47:33.777627 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/047e5a2f-96f7-4368-a26b-9692b2d3419e-catalog-content\") pod \"047e5a2f-96f7-4368-a26b-9692b2d3419e\" (UID: \"047e5a2f-96f7-4368-a26b-9692b2d3419e\") " Nov 27 08:47:33 crc kubenswrapper[4971]: I1127 08:47:33.777822 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7snbl\" (UniqueName: \"kubernetes.io/projected/047e5a2f-96f7-4368-a26b-9692b2d3419e-kube-api-access-7snbl\") pod \"047e5a2f-96f7-4368-a26b-9692b2d3419e\" (UID: \"047e5a2f-96f7-4368-a26b-9692b2d3419e\") " Nov 27 08:47:33 crc kubenswrapper[4971]: I1127 08:47:33.777855 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/047e5a2f-96f7-4368-a26b-9692b2d3419e-utilities\") pod \"047e5a2f-96f7-4368-a26b-9692b2d3419e\" (UID: \"047e5a2f-96f7-4368-a26b-9692b2d3419e\") " Nov 27 08:47:33 crc kubenswrapper[4971]: I1127 08:47:33.779242 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/047e5a2f-96f7-4368-a26b-9692b2d3419e-utilities" (OuterVolumeSpecName: "utilities") pod "047e5a2f-96f7-4368-a26b-9692b2d3419e" (UID: "047e5a2f-96f7-4368-a26b-9692b2d3419e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:47:33 crc kubenswrapper[4971]: I1127 08:47:33.782404 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/047e5a2f-96f7-4368-a26b-9692b2d3419e-kube-api-access-7snbl" (OuterVolumeSpecName: "kube-api-access-7snbl") pod "047e5a2f-96f7-4368-a26b-9692b2d3419e" (UID: "047e5a2f-96f7-4368-a26b-9692b2d3419e"). InnerVolumeSpecName "kube-api-access-7snbl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:47:33 crc kubenswrapper[4971]: I1127 08:47:33.843609 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/047e5a2f-96f7-4368-a26b-9692b2d3419e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "047e5a2f-96f7-4368-a26b-9692b2d3419e" (UID: "047e5a2f-96f7-4368-a26b-9692b2d3419e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:47:33 crc kubenswrapper[4971]: I1127 08:47:33.880928 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/047e5a2f-96f7-4368-a26b-9692b2d3419e-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 08:47:33 crc kubenswrapper[4971]: I1127 08:47:33.880983 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7snbl\" (UniqueName: \"kubernetes.io/projected/047e5a2f-96f7-4368-a26b-9692b2d3419e-kube-api-access-7snbl\") on node \"crc\" DevicePath \"\"" Nov 27 08:47:33 crc kubenswrapper[4971]: I1127 08:47:33.880998 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/047e5a2f-96f7-4368-a26b-9692b2d3419e-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 08:47:34 crc kubenswrapper[4971]: I1127 08:47:34.562986 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7cedaf56-dee4-45e5-aa67-f53f454d7eb3" path="/var/lib/kubelet/pods/7cedaf56-dee4-45e5-aa67-f53f454d7eb3/volumes" Nov 27 08:47:34 crc kubenswrapper[4971]: I1127 08:47:34.614043 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccbc467bc-bh4qq" event={"ID":"0413b693-383d-4291-bab3-447fd147e5ff","Type":"ContainerStarted","Data":"4c6e990098643cfec006aa78c7e1cc19a73c8d146e25ca9008f0015f54408485"} Nov 27 08:47:34 crc kubenswrapper[4971]: I1127 08:47:34.614064 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-np4kj" Nov 27 08:47:34 crc kubenswrapper[4971]: I1127 08:47:34.614469 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5ccbc467bc-bh4qq" Nov 27 08:47:34 crc kubenswrapper[4971]: I1127 08:47:34.644121 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5ccbc467bc-bh4qq" podStartSLOduration=3.644091627 podStartE2EDuration="3.644091627s" podCreationTimestamp="2025-11-27 08:47:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:47:34.641942425 +0000 UTC m=+6892.833986413" watchObservedRunningTime="2025-11-27 08:47:34.644091627 +0000 UTC m=+6892.836135575" Nov 27 08:47:34 crc kubenswrapper[4971]: I1127 08:47:34.669542 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-np4kj"] Nov 27 08:47:34 crc kubenswrapper[4971]: I1127 08:47:34.678480 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-np4kj"] Nov 27 08:47:36 crc kubenswrapper[4971]: I1127 08:47:36.558976 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="047e5a2f-96f7-4368-a26b-9692b2d3419e" path="/var/lib/kubelet/pods/047e5a2f-96f7-4368-a26b-9692b2d3419e/volumes" Nov 27 08:47:36 crc kubenswrapper[4971]: I1127 08:47:36.733305 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-2" Nov 27 08:47:36 crc kubenswrapper[4971]: I1127 08:47:36.811439 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-1" Nov 27 08:47:39 crc kubenswrapper[4971]: I1127 08:47:39.502342 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-copy-data"] Nov 27 08:47:39 crc kubenswrapper[4971]: E1127 08:47:39.503485 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="047e5a2f-96f7-4368-a26b-9692b2d3419e" containerName="extract-utilities" Nov 27 08:47:39 crc kubenswrapper[4971]: I1127 08:47:39.503511 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="047e5a2f-96f7-4368-a26b-9692b2d3419e" containerName="extract-utilities" Nov 27 08:47:39 crc kubenswrapper[4971]: E1127 08:47:39.503566 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="047e5a2f-96f7-4368-a26b-9692b2d3419e" containerName="extract-content" Nov 27 08:47:39 crc kubenswrapper[4971]: I1127 08:47:39.503581 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="047e5a2f-96f7-4368-a26b-9692b2d3419e" containerName="extract-content" Nov 27 08:47:39 crc kubenswrapper[4971]: E1127 08:47:39.503623 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="047e5a2f-96f7-4368-a26b-9692b2d3419e" containerName="registry-server" Nov 27 08:47:39 crc kubenswrapper[4971]: I1127 08:47:39.503637 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="047e5a2f-96f7-4368-a26b-9692b2d3419e" containerName="registry-server" Nov 27 08:47:39 crc kubenswrapper[4971]: I1127 08:47:39.504005 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="047e5a2f-96f7-4368-a26b-9692b2d3419e" containerName="registry-server" Nov 27 08:47:39 crc kubenswrapper[4971]: I1127 08:47:39.505285 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Nov 27 08:47:39 crc kubenswrapper[4971]: I1127 08:47:39.508820 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovn-data-cert" Nov 27 08:47:39 crc kubenswrapper[4971]: I1127 08:47:39.513780 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Nov 27 08:47:39 crc kubenswrapper[4971]: I1127 08:47:39.692285 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-29969a46-0e7a-4676-9e24-eedcda19284b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-29969a46-0e7a-4676-9e24-eedcda19284b\") pod \"ovn-copy-data\" (UID: \"a2476780-5471-4c3b-a8e8-7537ea708699\") " pod="openstack/ovn-copy-data" Nov 27 08:47:39 crc kubenswrapper[4971]: I1127 08:47:39.692497 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/a2476780-5471-4c3b-a8e8-7537ea708699-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"a2476780-5471-4c3b-a8e8-7537ea708699\") " pod="openstack/ovn-copy-data" Nov 27 08:47:39 crc kubenswrapper[4971]: I1127 08:47:39.693898 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2skb\" (UniqueName: \"kubernetes.io/projected/a2476780-5471-4c3b-a8e8-7537ea708699-kube-api-access-q2skb\") pod \"ovn-copy-data\" (UID: \"a2476780-5471-4c3b-a8e8-7537ea708699\") " pod="openstack/ovn-copy-data" Nov 27 08:47:39 crc kubenswrapper[4971]: I1127 08:47:39.795410 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/a2476780-5471-4c3b-a8e8-7537ea708699-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"a2476780-5471-4c3b-a8e8-7537ea708699\") " pod="openstack/ovn-copy-data" Nov 27 08:47:39 crc kubenswrapper[4971]: I1127 08:47:39.795606 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2skb\" (UniqueName: \"kubernetes.io/projected/a2476780-5471-4c3b-a8e8-7537ea708699-kube-api-access-q2skb\") pod \"ovn-copy-data\" (UID: \"a2476780-5471-4c3b-a8e8-7537ea708699\") " pod="openstack/ovn-copy-data" Nov 27 08:47:39 crc kubenswrapper[4971]: I1127 08:47:39.795676 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-29969a46-0e7a-4676-9e24-eedcda19284b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-29969a46-0e7a-4676-9e24-eedcda19284b\") pod \"ovn-copy-data\" (UID: \"a2476780-5471-4c3b-a8e8-7537ea708699\") " pod="openstack/ovn-copy-data" Nov 27 08:47:39 crc kubenswrapper[4971]: I1127 08:47:39.800850 4971 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 27 08:47:39 crc kubenswrapper[4971]: I1127 08:47:39.800915 4971 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-29969a46-0e7a-4676-9e24-eedcda19284b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-29969a46-0e7a-4676-9e24-eedcda19284b\") pod \"ovn-copy-data\" (UID: \"a2476780-5471-4c3b-a8e8-7537ea708699\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/8fef98666cc04a10dc619cf555509e92e83a518f710cf61b4cbf611042a7c74e/globalmount\"" pod="openstack/ovn-copy-data" Nov 27 08:47:39 crc kubenswrapper[4971]: I1127 08:47:39.813128 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/a2476780-5471-4c3b-a8e8-7537ea708699-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"a2476780-5471-4c3b-a8e8-7537ea708699\") " pod="openstack/ovn-copy-data" Nov 27 08:47:39 crc kubenswrapper[4971]: I1127 08:47:39.822342 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2skb\" (UniqueName: \"kubernetes.io/projected/a2476780-5471-4c3b-a8e8-7537ea708699-kube-api-access-q2skb\") pod \"ovn-copy-data\" (UID: \"a2476780-5471-4c3b-a8e8-7537ea708699\") " pod="openstack/ovn-copy-data" Nov 27 08:47:39 crc kubenswrapper[4971]: I1127 08:47:39.851712 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-29969a46-0e7a-4676-9e24-eedcda19284b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-29969a46-0e7a-4676-9e24-eedcda19284b\") pod \"ovn-copy-data\" (UID: \"a2476780-5471-4c3b-a8e8-7537ea708699\") " pod="openstack/ovn-copy-data" Nov 27 08:47:40 crc kubenswrapper[4971]: I1127 08:47:40.147844 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Nov 27 08:47:40 crc kubenswrapper[4971]: I1127 08:47:40.717919 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Nov 27 08:47:41 crc kubenswrapper[4971]: I1127 08:47:41.679874 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"a2476780-5471-4c3b-a8e8-7537ea708699","Type":"ContainerStarted","Data":"5cc65425c5a8a31100d7b91375dc9df42e60afcca47481890480d2ca7f5ab7b6"} Nov 27 08:47:41 crc kubenswrapper[4971]: I1127 08:47:41.680873 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"a2476780-5471-4c3b-a8e8-7537ea708699","Type":"ContainerStarted","Data":"edf4422bc3cfd9129f064802c6e2d2dee2150820681b939c491071aacd5464ba"} Nov 27 08:47:41 crc kubenswrapper[4971]: I1127 08:47:41.698499 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-copy-data" podStartSLOduration=3.5120472879999998 podStartE2EDuration="3.698480196s" podCreationTimestamp="2025-11-27 08:47:38 +0000 UTC" firstStartedPulling="2025-11-27 08:47:40.722924892 +0000 UTC m=+6898.914968800" lastFinishedPulling="2025-11-27 08:47:40.90935779 +0000 UTC m=+6899.101401708" observedRunningTime="2025-11-27 08:47:41.695875511 +0000 UTC m=+6899.887919449" watchObservedRunningTime="2025-11-27 08:47:41.698480196 +0000 UTC m=+6899.890524114" Nov 27 08:47:42 crc kubenswrapper[4971]: I1127 08:47:42.342880 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5ccbc467bc-bh4qq" Nov 27 08:47:42 crc kubenswrapper[4971]: I1127 08:47:42.411411 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76cb69558f-l5qnx"] Nov 27 08:47:42 crc kubenswrapper[4971]: I1127 08:47:42.411715 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-76cb69558f-l5qnx" podUID="dae38fc6-dc8c-4958-a470-69de89a0e908" containerName="dnsmasq-dns" containerID="cri-o://219cf0599a216103fa5f9a3d6a8cd13bfd2bf0149a211f5f98cc4a53530ff558" gracePeriod=10 Nov 27 08:47:42 crc kubenswrapper[4971]: I1127 08:47:42.694649 4971 generic.go:334] "Generic (PLEG): container finished" podID="dae38fc6-dc8c-4958-a470-69de89a0e908" containerID="219cf0599a216103fa5f9a3d6a8cd13bfd2bf0149a211f5f98cc4a53530ff558" exitCode=0 Nov 27 08:47:42 crc kubenswrapper[4971]: I1127 08:47:42.695317 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76cb69558f-l5qnx" event={"ID":"dae38fc6-dc8c-4958-a470-69de89a0e908","Type":"ContainerDied","Data":"219cf0599a216103fa5f9a3d6a8cd13bfd2bf0149a211f5f98cc4a53530ff558"} Nov 27 08:47:42 crc kubenswrapper[4971]: I1127 08:47:42.948758 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76cb69558f-l5qnx" Nov 27 08:47:43 crc kubenswrapper[4971]: I1127 08:47:43.058462 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dae38fc6-dc8c-4958-a470-69de89a0e908-dns-svc\") pod \"dae38fc6-dc8c-4958-a470-69de89a0e908\" (UID: \"dae38fc6-dc8c-4958-a470-69de89a0e908\") " Nov 27 08:47:43 crc kubenswrapper[4971]: I1127 08:47:43.059175 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6xzsd\" (UniqueName: \"kubernetes.io/projected/dae38fc6-dc8c-4958-a470-69de89a0e908-kube-api-access-6xzsd\") pod \"dae38fc6-dc8c-4958-a470-69de89a0e908\" (UID: \"dae38fc6-dc8c-4958-a470-69de89a0e908\") " Nov 27 08:47:43 crc kubenswrapper[4971]: I1127 08:47:43.059248 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dae38fc6-dc8c-4958-a470-69de89a0e908-config\") pod \"dae38fc6-dc8c-4958-a470-69de89a0e908\" (UID: \"dae38fc6-dc8c-4958-a470-69de89a0e908\") " Nov 27 08:47:43 crc kubenswrapper[4971]: I1127 08:47:43.066330 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dae38fc6-dc8c-4958-a470-69de89a0e908-kube-api-access-6xzsd" (OuterVolumeSpecName: "kube-api-access-6xzsd") pod "dae38fc6-dc8c-4958-a470-69de89a0e908" (UID: "dae38fc6-dc8c-4958-a470-69de89a0e908"). InnerVolumeSpecName "kube-api-access-6xzsd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:47:43 crc kubenswrapper[4971]: I1127 08:47:43.103843 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dae38fc6-dc8c-4958-a470-69de89a0e908-config" (OuterVolumeSpecName: "config") pod "dae38fc6-dc8c-4958-a470-69de89a0e908" (UID: "dae38fc6-dc8c-4958-a470-69de89a0e908"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:47:43 crc kubenswrapper[4971]: I1127 08:47:43.104047 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dae38fc6-dc8c-4958-a470-69de89a0e908-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "dae38fc6-dc8c-4958-a470-69de89a0e908" (UID: "dae38fc6-dc8c-4958-a470-69de89a0e908"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:47:43 crc kubenswrapper[4971]: I1127 08:47:43.161356 4971 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dae38fc6-dc8c-4958-a470-69de89a0e908-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 27 08:47:43 crc kubenswrapper[4971]: I1127 08:47:43.161397 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6xzsd\" (UniqueName: \"kubernetes.io/projected/dae38fc6-dc8c-4958-a470-69de89a0e908-kube-api-access-6xzsd\") on node \"crc\" DevicePath \"\"" Nov 27 08:47:43 crc kubenswrapper[4971]: I1127 08:47:43.161410 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dae38fc6-dc8c-4958-a470-69de89a0e908-config\") on node \"crc\" DevicePath \"\"" Nov 27 08:47:43 crc kubenswrapper[4971]: I1127 08:47:43.703675 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76cb69558f-l5qnx" event={"ID":"dae38fc6-dc8c-4958-a470-69de89a0e908","Type":"ContainerDied","Data":"bd56ed579974d7ef649501f3b2e65a865bd86914bb45581c8e367cb9abc6822f"} Nov 27 08:47:43 crc kubenswrapper[4971]: I1127 08:47:43.703736 4971 scope.go:117] "RemoveContainer" containerID="219cf0599a216103fa5f9a3d6a8cd13bfd2bf0149a211f5f98cc4a53530ff558" Nov 27 08:47:43 crc kubenswrapper[4971]: I1127 08:47:43.703736 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76cb69558f-l5qnx" Nov 27 08:47:43 crc kubenswrapper[4971]: I1127 08:47:43.744852 4971 scope.go:117] "RemoveContainer" containerID="553f443adf450b65b4198bfee2afe73ceb17950e1d0742bf60d40cc687c6fd78" Nov 27 08:47:43 crc kubenswrapper[4971]: I1127 08:47:43.753901 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76cb69558f-l5qnx"] Nov 27 08:47:43 crc kubenswrapper[4971]: I1127 08:47:43.762847 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-76cb69558f-l5qnx"] Nov 27 08:47:44 crc kubenswrapper[4971]: I1127 08:47:44.570311 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dae38fc6-dc8c-4958-a470-69de89a0e908" path="/var/lib/kubelet/pods/dae38fc6-dc8c-4958-a470-69de89a0e908/volumes" Nov 27 08:47:49 crc kubenswrapper[4971]: I1127 08:47:49.631093 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Nov 27 08:47:49 crc kubenswrapper[4971]: E1127 08:47:49.631858 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dae38fc6-dc8c-4958-a470-69de89a0e908" containerName="init" Nov 27 08:47:49 crc kubenswrapper[4971]: I1127 08:47:49.631875 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="dae38fc6-dc8c-4958-a470-69de89a0e908" containerName="init" Nov 27 08:47:49 crc kubenswrapper[4971]: E1127 08:47:49.631885 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dae38fc6-dc8c-4958-a470-69de89a0e908" containerName="dnsmasq-dns" Nov 27 08:47:49 crc kubenswrapper[4971]: I1127 08:47:49.631892 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="dae38fc6-dc8c-4958-a470-69de89a0e908" containerName="dnsmasq-dns" Nov 27 08:47:49 crc kubenswrapper[4971]: I1127 08:47:49.632068 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="dae38fc6-dc8c-4958-a470-69de89a0e908" containerName="dnsmasq-dns" Nov 27 08:47:49 crc kubenswrapper[4971]: I1127 08:47:49.633110 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Nov 27 08:47:49 crc kubenswrapper[4971]: I1127 08:47:49.636806 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-dppjx" Nov 27 08:47:49 crc kubenswrapper[4971]: W1127 08:47:49.637096 4971 reflector.go:561] object-"openstack"/"ovnnorthd-config": failed to list *v1.ConfigMap: configmaps "ovnnorthd-config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openstack": no relationship found between node 'crc' and this object Nov 27 08:47:49 crc kubenswrapper[4971]: E1127 08:47:49.637148 4971 reflector.go:158] "Unhandled Error" err="object-\"openstack\"/\"ovnnorthd-config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"ovnnorthd-config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openstack\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 27 08:47:49 crc kubenswrapper[4971]: I1127 08:47:49.637215 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Nov 27 08:47:49 crc kubenswrapper[4971]: I1127 08:47:49.661391 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Nov 27 08:47:49 crc kubenswrapper[4971]: I1127 08:47:49.787401 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/68d03f53-0125-471d-af31-02b3f026ec33-scripts\") pod \"ovn-northd-0\" (UID: \"68d03f53-0125-471d-af31-02b3f026ec33\") " pod="openstack/ovn-northd-0" Nov 27 08:47:49 crc kubenswrapper[4971]: I1127 08:47:49.787473 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/68d03f53-0125-471d-af31-02b3f026ec33-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"68d03f53-0125-471d-af31-02b3f026ec33\") " pod="openstack/ovn-northd-0" Nov 27 08:47:49 crc kubenswrapper[4971]: I1127 08:47:49.787507 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68d03f53-0125-471d-af31-02b3f026ec33-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"68d03f53-0125-471d-af31-02b3f026ec33\") " pod="openstack/ovn-northd-0" Nov 27 08:47:49 crc kubenswrapper[4971]: I1127 08:47:49.787553 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x5d9x\" (UniqueName: \"kubernetes.io/projected/68d03f53-0125-471d-af31-02b3f026ec33-kube-api-access-x5d9x\") pod \"ovn-northd-0\" (UID: \"68d03f53-0125-471d-af31-02b3f026ec33\") " pod="openstack/ovn-northd-0" Nov 27 08:47:49 crc kubenswrapper[4971]: I1127 08:47:49.787589 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/68d03f53-0125-471d-af31-02b3f026ec33-config\") pod \"ovn-northd-0\" (UID: \"68d03f53-0125-471d-af31-02b3f026ec33\") " pod="openstack/ovn-northd-0" Nov 27 08:47:49 crc kubenswrapper[4971]: I1127 08:47:49.889603 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/68d03f53-0125-471d-af31-02b3f026ec33-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"68d03f53-0125-471d-af31-02b3f026ec33\") " pod="openstack/ovn-northd-0" Nov 27 08:47:49 crc kubenswrapper[4971]: I1127 08:47:49.889679 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68d03f53-0125-471d-af31-02b3f026ec33-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"68d03f53-0125-471d-af31-02b3f026ec33\") " pod="openstack/ovn-northd-0" Nov 27 08:47:49 crc kubenswrapper[4971]: I1127 08:47:49.889729 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x5d9x\" (UniqueName: \"kubernetes.io/projected/68d03f53-0125-471d-af31-02b3f026ec33-kube-api-access-x5d9x\") pod \"ovn-northd-0\" (UID: \"68d03f53-0125-471d-af31-02b3f026ec33\") " pod="openstack/ovn-northd-0" Nov 27 08:47:49 crc kubenswrapper[4971]: I1127 08:47:49.889784 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/68d03f53-0125-471d-af31-02b3f026ec33-config\") pod \"ovn-northd-0\" (UID: \"68d03f53-0125-471d-af31-02b3f026ec33\") " pod="openstack/ovn-northd-0" Nov 27 08:47:49 crc kubenswrapper[4971]: I1127 08:47:49.889865 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/68d03f53-0125-471d-af31-02b3f026ec33-scripts\") pod \"ovn-northd-0\" (UID: \"68d03f53-0125-471d-af31-02b3f026ec33\") " pod="openstack/ovn-northd-0" Nov 27 08:47:49 crc kubenswrapper[4971]: I1127 08:47:49.890638 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/68d03f53-0125-471d-af31-02b3f026ec33-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"68d03f53-0125-471d-af31-02b3f026ec33\") " pod="openstack/ovn-northd-0" Nov 27 08:47:49 crc kubenswrapper[4971]: I1127 08:47:49.890807 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/68d03f53-0125-471d-af31-02b3f026ec33-scripts\") pod \"ovn-northd-0\" (UID: \"68d03f53-0125-471d-af31-02b3f026ec33\") " pod="openstack/ovn-northd-0" Nov 27 08:47:49 crc kubenswrapper[4971]: I1127 08:47:49.897861 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68d03f53-0125-471d-af31-02b3f026ec33-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"68d03f53-0125-471d-af31-02b3f026ec33\") " pod="openstack/ovn-northd-0" Nov 27 08:47:49 crc kubenswrapper[4971]: I1127 08:47:49.911876 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x5d9x\" (UniqueName: \"kubernetes.io/projected/68d03f53-0125-471d-af31-02b3f026ec33-kube-api-access-x5d9x\") pod \"ovn-northd-0\" (UID: \"68d03f53-0125-471d-af31-02b3f026ec33\") " pod="openstack/ovn-northd-0" Nov 27 08:47:50 crc kubenswrapper[4971]: E1127 08:47:50.890676 4971 configmap.go:193] Couldn't get configMap openstack/ovnnorthd-config: failed to sync configmap cache: timed out waiting for the condition Nov 27 08:47:50 crc kubenswrapper[4971]: E1127 08:47:50.890790 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/68d03f53-0125-471d-af31-02b3f026ec33-config podName:68d03f53-0125-471d-af31-02b3f026ec33 nodeName:}" failed. No retries permitted until 2025-11-27 08:47:51.390767531 +0000 UTC m=+6909.582811449 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/68d03f53-0125-471d-af31-02b3f026ec33-config") pod "ovn-northd-0" (UID: "68d03f53-0125-471d-af31-02b3f026ec33") : failed to sync configmap cache: timed out waiting for the condition Nov 27 08:47:51 crc kubenswrapper[4971]: I1127 08:47:51.170185 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Nov 27 08:47:51 crc kubenswrapper[4971]: I1127 08:47:51.419090 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/68d03f53-0125-471d-af31-02b3f026ec33-config\") pod \"ovn-northd-0\" (UID: \"68d03f53-0125-471d-af31-02b3f026ec33\") " pod="openstack/ovn-northd-0" Nov 27 08:47:51 crc kubenswrapper[4971]: I1127 08:47:51.420176 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/68d03f53-0125-471d-af31-02b3f026ec33-config\") pod \"ovn-northd-0\" (UID: \"68d03f53-0125-471d-af31-02b3f026ec33\") " pod="openstack/ovn-northd-0" Nov 27 08:47:51 crc kubenswrapper[4971]: I1127 08:47:51.454364 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Nov 27 08:47:51 crc kubenswrapper[4971]: I1127 08:47:51.933302 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Nov 27 08:47:51 crc kubenswrapper[4971]: W1127 08:47:51.937413 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod68d03f53_0125_471d_af31_02b3f026ec33.slice/crio-17fb3e479b119648426131e0edf580f0d52835a86aaa02a86a000e6d23a0d6ca WatchSource:0}: Error finding container 17fb3e479b119648426131e0edf580f0d52835a86aaa02a86a000e6d23a0d6ca: Status 404 returned error can't find the container with id 17fb3e479b119648426131e0edf580f0d52835a86aaa02a86a000e6d23a0d6ca Nov 27 08:47:52 crc kubenswrapper[4971]: I1127 08:47:52.780443 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"68d03f53-0125-471d-af31-02b3f026ec33","Type":"ContainerStarted","Data":"17fb3e479b119648426131e0edf580f0d52835a86aaa02a86a000e6d23a0d6ca"} Nov 27 08:47:53 crc kubenswrapper[4971]: I1127 08:47:53.278981 4971 scope.go:117] "RemoveContainer" containerID="0e995a4cd07d813f8fcb0c6af8ec992e6444a3694991ee2130a449ec0020b09d" Nov 27 08:47:53 crc kubenswrapper[4971]: I1127 08:47:53.297499 4971 scope.go:117] "RemoveContainer" containerID="9431eb86553b3f8153086637a4dd761aef59549053f7bd489333bbd2071ab1b3" Nov 27 08:47:53 crc kubenswrapper[4971]: I1127 08:47:53.335770 4971 scope.go:117] "RemoveContainer" containerID="8c93cd5c78360179bb11417dae7cf22a8647680f52880a3d69972bcf01b98150" Nov 27 08:47:53 crc kubenswrapper[4971]: I1127 08:47:53.789473 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"68d03f53-0125-471d-af31-02b3f026ec33","Type":"ContainerStarted","Data":"a32895c4934dcefe85db12a0a279d3d5b5decf3af7dce5139d84533181f89b9b"} Nov 27 08:47:53 crc kubenswrapper[4971]: I1127 08:47:53.789545 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"68d03f53-0125-471d-af31-02b3f026ec33","Type":"ContainerStarted","Data":"21a743ffba79e3c6be5a2d913bdecaef5b79d4571b20dd92308af590a7e7c62e"} Nov 27 08:47:53 crc kubenswrapper[4971]: I1127 08:47:53.789656 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Nov 27 08:47:53 crc kubenswrapper[4971]: I1127 08:47:53.816946 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=4.072408281 podStartE2EDuration="4.816925119s" podCreationTimestamp="2025-11-27 08:47:49 +0000 UTC" firstStartedPulling="2025-11-27 08:47:51.939524671 +0000 UTC m=+6910.131568589" lastFinishedPulling="2025-11-27 08:47:52.684041509 +0000 UTC m=+6910.876085427" observedRunningTime="2025-11-27 08:47:53.80822584 +0000 UTC m=+6912.000269758" watchObservedRunningTime="2025-11-27 08:47:53.816925119 +0000 UTC m=+6912.008969037" Nov 27 08:47:57 crc kubenswrapper[4971]: I1127 08:47:57.377913 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-rvxt8"] Nov 27 08:47:57 crc kubenswrapper[4971]: I1127 08:47:57.379388 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-rvxt8" Nov 27 08:47:57 crc kubenswrapper[4971]: I1127 08:47:57.394573 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-rvxt8"] Nov 27 08:47:57 crc kubenswrapper[4971]: I1127 08:47:57.476006 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-74f3-account-create-update-mlqzn"] Nov 27 08:47:57 crc kubenswrapper[4971]: I1127 08:47:57.477083 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-74f3-account-create-update-mlqzn" Nov 27 08:47:57 crc kubenswrapper[4971]: I1127 08:47:57.483650 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Nov 27 08:47:57 crc kubenswrapper[4971]: I1127 08:47:57.487610 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-74f3-account-create-update-mlqzn"] Nov 27 08:47:57 crc kubenswrapper[4971]: I1127 08:47:57.519102 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6zjr\" (UniqueName: \"kubernetes.io/projected/0c132b00-609c-45e7-bbd1-648bc1ec2152-kube-api-access-r6zjr\") pod \"keystone-db-create-rvxt8\" (UID: \"0c132b00-609c-45e7-bbd1-648bc1ec2152\") " pod="openstack/keystone-db-create-rvxt8" Nov 27 08:47:57 crc kubenswrapper[4971]: I1127 08:47:57.519256 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0c132b00-609c-45e7-bbd1-648bc1ec2152-operator-scripts\") pod \"keystone-db-create-rvxt8\" (UID: \"0c132b00-609c-45e7-bbd1-648bc1ec2152\") " pod="openstack/keystone-db-create-rvxt8" Nov 27 08:47:57 crc kubenswrapper[4971]: I1127 08:47:57.620782 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6zjr\" (UniqueName: \"kubernetes.io/projected/0c132b00-609c-45e7-bbd1-648bc1ec2152-kube-api-access-r6zjr\") pod \"keystone-db-create-rvxt8\" (UID: \"0c132b00-609c-45e7-bbd1-648bc1ec2152\") " pod="openstack/keystone-db-create-rvxt8" Nov 27 08:47:57 crc kubenswrapper[4971]: I1127 08:47:57.620851 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mflxj\" (UniqueName: \"kubernetes.io/projected/5e7b3ea9-343b-4e78-b126-da324d6a1143-kube-api-access-mflxj\") pod \"keystone-74f3-account-create-update-mlqzn\" (UID: \"5e7b3ea9-343b-4e78-b126-da324d6a1143\") " pod="openstack/keystone-74f3-account-create-update-mlqzn" Nov 27 08:47:57 crc kubenswrapper[4971]: I1127 08:47:57.620925 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5e7b3ea9-343b-4e78-b126-da324d6a1143-operator-scripts\") pod \"keystone-74f3-account-create-update-mlqzn\" (UID: \"5e7b3ea9-343b-4e78-b126-da324d6a1143\") " pod="openstack/keystone-74f3-account-create-update-mlqzn" Nov 27 08:47:57 crc kubenswrapper[4971]: I1127 08:47:57.620983 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0c132b00-609c-45e7-bbd1-648bc1ec2152-operator-scripts\") pod \"keystone-db-create-rvxt8\" (UID: \"0c132b00-609c-45e7-bbd1-648bc1ec2152\") " pod="openstack/keystone-db-create-rvxt8" Nov 27 08:47:57 crc kubenswrapper[4971]: I1127 08:47:57.622263 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0c132b00-609c-45e7-bbd1-648bc1ec2152-operator-scripts\") pod \"keystone-db-create-rvxt8\" (UID: \"0c132b00-609c-45e7-bbd1-648bc1ec2152\") " pod="openstack/keystone-db-create-rvxt8" Nov 27 08:47:57 crc kubenswrapper[4971]: I1127 08:47:57.640093 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6zjr\" (UniqueName: \"kubernetes.io/projected/0c132b00-609c-45e7-bbd1-648bc1ec2152-kube-api-access-r6zjr\") pod \"keystone-db-create-rvxt8\" (UID: \"0c132b00-609c-45e7-bbd1-648bc1ec2152\") " pod="openstack/keystone-db-create-rvxt8" Nov 27 08:47:57 crc kubenswrapper[4971]: I1127 08:47:57.707982 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-rvxt8" Nov 27 08:47:57 crc kubenswrapper[4971]: I1127 08:47:57.728846 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5e7b3ea9-343b-4e78-b126-da324d6a1143-operator-scripts\") pod \"keystone-74f3-account-create-update-mlqzn\" (UID: \"5e7b3ea9-343b-4e78-b126-da324d6a1143\") " pod="openstack/keystone-74f3-account-create-update-mlqzn" Nov 27 08:47:57 crc kubenswrapper[4971]: I1127 08:47:57.729374 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mflxj\" (UniqueName: \"kubernetes.io/projected/5e7b3ea9-343b-4e78-b126-da324d6a1143-kube-api-access-mflxj\") pod \"keystone-74f3-account-create-update-mlqzn\" (UID: \"5e7b3ea9-343b-4e78-b126-da324d6a1143\") " pod="openstack/keystone-74f3-account-create-update-mlqzn" Nov 27 08:47:57 crc kubenswrapper[4971]: I1127 08:47:57.730362 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5e7b3ea9-343b-4e78-b126-da324d6a1143-operator-scripts\") pod \"keystone-74f3-account-create-update-mlqzn\" (UID: \"5e7b3ea9-343b-4e78-b126-da324d6a1143\") " pod="openstack/keystone-74f3-account-create-update-mlqzn" Nov 27 08:47:57 crc kubenswrapper[4971]: I1127 08:47:57.770871 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mflxj\" (UniqueName: \"kubernetes.io/projected/5e7b3ea9-343b-4e78-b126-da324d6a1143-kube-api-access-mflxj\") pod \"keystone-74f3-account-create-update-mlqzn\" (UID: \"5e7b3ea9-343b-4e78-b126-da324d6a1143\") " pod="openstack/keystone-74f3-account-create-update-mlqzn" Nov 27 08:47:57 crc kubenswrapper[4971]: I1127 08:47:57.794229 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-74f3-account-create-update-mlqzn" Nov 27 08:47:58 crc kubenswrapper[4971]: I1127 08:47:58.342311 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-rvxt8"] Nov 27 08:47:58 crc kubenswrapper[4971]: I1127 08:47:58.399387 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-74f3-account-create-update-mlqzn"] Nov 27 08:47:58 crc kubenswrapper[4971]: I1127 08:47:58.865363 4971 generic.go:334] "Generic (PLEG): container finished" podID="5e7b3ea9-343b-4e78-b126-da324d6a1143" containerID="bbc3bedf1f403abcee9431bc34f94ad114433bf2766f43eae161613770d54fdf" exitCode=0 Nov 27 08:47:58 crc kubenswrapper[4971]: I1127 08:47:58.865438 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-74f3-account-create-update-mlqzn" event={"ID":"5e7b3ea9-343b-4e78-b126-da324d6a1143","Type":"ContainerDied","Data":"bbc3bedf1f403abcee9431bc34f94ad114433bf2766f43eae161613770d54fdf"} Nov 27 08:47:58 crc kubenswrapper[4971]: I1127 08:47:58.865470 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-74f3-account-create-update-mlqzn" event={"ID":"5e7b3ea9-343b-4e78-b126-da324d6a1143","Type":"ContainerStarted","Data":"37bd6a934ca8957dedd3f6f4688ae5fc0083e7fa7bd24b613062e61b3cdcdf83"} Nov 27 08:47:58 crc kubenswrapper[4971]: I1127 08:47:58.867450 4971 generic.go:334] "Generic (PLEG): container finished" podID="0c132b00-609c-45e7-bbd1-648bc1ec2152" containerID="fbbe32cf5da101b90379c83fba83dc224585a390ff16e0b98b39062d1d4cf6e5" exitCode=0 Nov 27 08:47:58 crc kubenswrapper[4971]: I1127 08:47:58.867481 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-rvxt8" event={"ID":"0c132b00-609c-45e7-bbd1-648bc1ec2152","Type":"ContainerDied","Data":"fbbe32cf5da101b90379c83fba83dc224585a390ff16e0b98b39062d1d4cf6e5"} Nov 27 08:47:58 crc kubenswrapper[4971]: I1127 08:47:58.867498 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-rvxt8" event={"ID":"0c132b00-609c-45e7-bbd1-648bc1ec2152","Type":"ContainerStarted","Data":"e56be56d3d3ae6cee11eb0087fb6bea363500468d95c36af887b2e18219e0899"} Nov 27 08:48:01 crc kubenswrapper[4971]: I1127 08:48:01.107253 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="metallb-system/speaker-rpws9" podUID="d39b11b5-58d7-4eb3-aea7-50e2f7f40c89" containerName="speaker" probeResult="failure" output="Get \"http://localhost:29150/metrics\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 27 08:48:01 crc kubenswrapper[4971]: I1127 08:48:01.109128 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="metallb-system/speaker-rpws9" podUID="d39b11b5-58d7-4eb3-aea7-50e2f7f40c89" containerName="speaker" probeResult="failure" output="Get \"http://localhost:29150/metrics\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 27 08:48:02 crc kubenswrapper[4971]: I1127 08:48:02.031762 4971 patch_prober.go:28] interesting pod/oauth-openshift-f94d7b7c5-z7ttw container/oauth-openshift namespace/openshift-authentication: Liveness probe status=failure output="Get \"https://10.217.0.61:6443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Nov 27 08:48:02 crc kubenswrapper[4971]: I1127 08:48:02.032061 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" podUID="6df51c47-dfe6-4996-874e-0766a1bd4232" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.61:6443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 27 08:48:02 crc kubenswrapper[4971]: I1127 08:48:02.075769 4971 patch_prober.go:28] interesting pod/oauth-openshift-f94d7b7c5-z7ttw container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.61:6443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Nov 27 08:48:02 crc kubenswrapper[4971]: I1127 08:48:02.076039 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-f94d7b7c5-z7ttw" podUID="6df51c47-dfe6-4996-874e-0766a1bd4232" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.61:6443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 27 08:48:02 crc kubenswrapper[4971]: I1127 08:48:02.204095 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-operators/infra-operator-controller-manager-57548d458d-zdjzp" podUID="eee345d4-5e02-4a96-a204-383fd410e564" containerName="manager" probeResult="failure" output="Get \"http://10.217.0.78:8081/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 27 08:48:02 crc kubenswrapper[4971]: I1127 08:48:02.204347 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-operators/infra-operator-controller-manager-57548d458d-zdjzp" podUID="eee345d4-5e02-4a96-a204-383fd410e564" containerName="manager" probeResult="failure" output="Get \"http://10.217.0.78:8081/readyz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 27 08:48:02 crc kubenswrapper[4971]: I1127 08:48:02.217423 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-marketplace/redhat-marketplace-s6lp8" podUID="8c727031-a0df-4bca-be8b-e8211a220302" containerName="registry-server" probeResult="failure" output=< Nov 27 08:48:02 crc kubenswrapper[4971]: timeout: failed to connect service ":50051" within 1s Nov 27 08:48:02 crc kubenswrapper[4971]: > Nov 27 08:48:02 crc kubenswrapper[4971]: I1127 08:48:02.494046 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-rvxt8" Nov 27 08:48:02 crc kubenswrapper[4971]: I1127 08:48:02.500987 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-74f3-account-create-update-mlqzn" Nov 27 08:48:02 crc kubenswrapper[4971]: I1127 08:48:02.597848 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mflxj\" (UniqueName: \"kubernetes.io/projected/5e7b3ea9-343b-4e78-b126-da324d6a1143-kube-api-access-mflxj\") pod \"5e7b3ea9-343b-4e78-b126-da324d6a1143\" (UID: \"5e7b3ea9-343b-4e78-b126-da324d6a1143\") " Nov 27 08:48:02 crc kubenswrapper[4971]: I1127 08:48:02.597917 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0c132b00-609c-45e7-bbd1-648bc1ec2152-operator-scripts\") pod \"0c132b00-609c-45e7-bbd1-648bc1ec2152\" (UID: \"0c132b00-609c-45e7-bbd1-648bc1ec2152\") " Nov 27 08:48:02 crc kubenswrapper[4971]: I1127 08:48:02.598161 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r6zjr\" (UniqueName: \"kubernetes.io/projected/0c132b00-609c-45e7-bbd1-648bc1ec2152-kube-api-access-r6zjr\") pod \"0c132b00-609c-45e7-bbd1-648bc1ec2152\" (UID: \"0c132b00-609c-45e7-bbd1-648bc1ec2152\") " Nov 27 08:48:02 crc kubenswrapper[4971]: I1127 08:48:02.598368 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5e7b3ea9-343b-4e78-b126-da324d6a1143-operator-scripts\") pod \"5e7b3ea9-343b-4e78-b126-da324d6a1143\" (UID: \"5e7b3ea9-343b-4e78-b126-da324d6a1143\") " Nov 27 08:48:02 crc kubenswrapper[4971]: I1127 08:48:02.599804 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e7b3ea9-343b-4e78-b126-da324d6a1143-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5e7b3ea9-343b-4e78-b126-da324d6a1143" (UID: "5e7b3ea9-343b-4e78-b126-da324d6a1143"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:48:02 crc kubenswrapper[4971]: I1127 08:48:02.600207 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0c132b00-609c-45e7-bbd1-648bc1ec2152-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0c132b00-609c-45e7-bbd1-648bc1ec2152" (UID: "0c132b00-609c-45e7-bbd1-648bc1ec2152"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:48:02 crc kubenswrapper[4971]: I1127 08:48:02.605959 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e7b3ea9-343b-4e78-b126-da324d6a1143-kube-api-access-mflxj" (OuterVolumeSpecName: "kube-api-access-mflxj") pod "5e7b3ea9-343b-4e78-b126-da324d6a1143" (UID: "5e7b3ea9-343b-4e78-b126-da324d6a1143"). InnerVolumeSpecName "kube-api-access-mflxj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:48:02 crc kubenswrapper[4971]: I1127 08:48:02.606407 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c132b00-609c-45e7-bbd1-648bc1ec2152-kube-api-access-r6zjr" (OuterVolumeSpecName: "kube-api-access-r6zjr") pod "0c132b00-609c-45e7-bbd1-648bc1ec2152" (UID: "0c132b00-609c-45e7-bbd1-648bc1ec2152"). InnerVolumeSpecName "kube-api-access-r6zjr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:48:02 crc kubenswrapper[4971]: I1127 08:48:02.700112 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5e7b3ea9-343b-4e78-b126-da324d6a1143-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 08:48:02 crc kubenswrapper[4971]: I1127 08:48:02.700141 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mflxj\" (UniqueName: \"kubernetes.io/projected/5e7b3ea9-343b-4e78-b126-da324d6a1143-kube-api-access-mflxj\") on node \"crc\" DevicePath \"\"" Nov 27 08:48:02 crc kubenswrapper[4971]: I1127 08:48:02.700152 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0c132b00-609c-45e7-bbd1-648bc1ec2152-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 08:48:02 crc kubenswrapper[4971]: I1127 08:48:02.700161 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r6zjr\" (UniqueName: \"kubernetes.io/projected/0c132b00-609c-45e7-bbd1-648bc1ec2152-kube-api-access-r6zjr\") on node \"crc\" DevicePath \"\"" Nov 27 08:48:03 crc kubenswrapper[4971]: I1127 08:48:03.308496 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-rvxt8" event={"ID":"0c132b00-609c-45e7-bbd1-648bc1ec2152","Type":"ContainerDied","Data":"e56be56d3d3ae6cee11eb0087fb6bea363500468d95c36af887b2e18219e0899"} Nov 27 08:48:03 crc kubenswrapper[4971]: I1127 08:48:03.310633 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e56be56d3d3ae6cee11eb0087fb6bea363500468d95c36af887b2e18219e0899" Nov 27 08:48:03 crc kubenswrapper[4971]: I1127 08:48:03.308594 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-rvxt8" Nov 27 08:48:03 crc kubenswrapper[4971]: I1127 08:48:03.311606 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-74f3-account-create-update-mlqzn" event={"ID":"5e7b3ea9-343b-4e78-b126-da324d6a1143","Type":"ContainerDied","Data":"37bd6a934ca8957dedd3f6f4688ae5fc0083e7fa7bd24b613062e61b3cdcdf83"} Nov 27 08:48:03 crc kubenswrapper[4971]: I1127 08:48:03.311650 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="37bd6a934ca8957dedd3f6f4688ae5fc0083e7fa7bd24b613062e61b3cdcdf83" Nov 27 08:48:03 crc kubenswrapper[4971]: I1127 08:48:03.311716 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-74f3-account-create-update-mlqzn" Nov 27 08:48:06 crc kubenswrapper[4971]: I1127 08:48:06.515733 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Nov 27 08:48:08 crc kubenswrapper[4971]: I1127 08:48:08.075438 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-lnfbm"] Nov 27 08:48:08 crc kubenswrapper[4971]: E1127 08:48:08.075868 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e7b3ea9-343b-4e78-b126-da324d6a1143" containerName="mariadb-account-create-update" Nov 27 08:48:08 crc kubenswrapper[4971]: I1127 08:48:08.075889 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e7b3ea9-343b-4e78-b126-da324d6a1143" containerName="mariadb-account-create-update" Nov 27 08:48:08 crc kubenswrapper[4971]: E1127 08:48:08.075916 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c132b00-609c-45e7-bbd1-648bc1ec2152" containerName="mariadb-database-create" Nov 27 08:48:08 crc kubenswrapper[4971]: I1127 08:48:08.075923 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c132b00-609c-45e7-bbd1-648bc1ec2152" containerName="mariadb-database-create" Nov 27 08:48:08 crc kubenswrapper[4971]: I1127 08:48:08.076132 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e7b3ea9-343b-4e78-b126-da324d6a1143" containerName="mariadb-account-create-update" Nov 27 08:48:08 crc kubenswrapper[4971]: I1127 08:48:08.076154 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c132b00-609c-45e7-bbd1-648bc1ec2152" containerName="mariadb-database-create" Nov 27 08:48:08 crc kubenswrapper[4971]: I1127 08:48:08.076770 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-lnfbm" Nov 27 08:48:08 crc kubenswrapper[4971]: I1127 08:48:08.082696 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-8ntd9" Nov 27 08:48:08 crc kubenswrapper[4971]: I1127 08:48:08.084070 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Nov 27 08:48:08 crc kubenswrapper[4971]: I1127 08:48:08.084123 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Nov 27 08:48:08 crc kubenswrapper[4971]: I1127 08:48:08.084153 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Nov 27 08:48:08 crc kubenswrapper[4971]: I1127 08:48:08.126751 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-lnfbm"] Nov 27 08:48:08 crc kubenswrapper[4971]: I1127 08:48:08.198601 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ca80319-5e50-45ff-9bd3-3ad98244e9d8-combined-ca-bundle\") pod \"keystone-db-sync-lnfbm\" (UID: \"9ca80319-5e50-45ff-9bd3-3ad98244e9d8\") " pod="openstack/keystone-db-sync-lnfbm" Nov 27 08:48:08 crc kubenswrapper[4971]: I1127 08:48:08.198798 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ca80319-5e50-45ff-9bd3-3ad98244e9d8-config-data\") pod \"keystone-db-sync-lnfbm\" (UID: \"9ca80319-5e50-45ff-9bd3-3ad98244e9d8\") " pod="openstack/keystone-db-sync-lnfbm" Nov 27 08:48:08 crc kubenswrapper[4971]: I1127 08:48:08.198914 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qlfjh\" (UniqueName: \"kubernetes.io/projected/9ca80319-5e50-45ff-9bd3-3ad98244e9d8-kube-api-access-qlfjh\") pod \"keystone-db-sync-lnfbm\" (UID: \"9ca80319-5e50-45ff-9bd3-3ad98244e9d8\") " pod="openstack/keystone-db-sync-lnfbm" Nov 27 08:48:08 crc kubenswrapper[4971]: I1127 08:48:08.300641 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ca80319-5e50-45ff-9bd3-3ad98244e9d8-combined-ca-bundle\") pod \"keystone-db-sync-lnfbm\" (UID: \"9ca80319-5e50-45ff-9bd3-3ad98244e9d8\") " pod="openstack/keystone-db-sync-lnfbm" Nov 27 08:48:08 crc kubenswrapper[4971]: I1127 08:48:08.300704 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ca80319-5e50-45ff-9bd3-3ad98244e9d8-config-data\") pod \"keystone-db-sync-lnfbm\" (UID: \"9ca80319-5e50-45ff-9bd3-3ad98244e9d8\") " pod="openstack/keystone-db-sync-lnfbm" Nov 27 08:48:08 crc kubenswrapper[4971]: I1127 08:48:08.300742 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qlfjh\" (UniqueName: \"kubernetes.io/projected/9ca80319-5e50-45ff-9bd3-3ad98244e9d8-kube-api-access-qlfjh\") pod \"keystone-db-sync-lnfbm\" (UID: \"9ca80319-5e50-45ff-9bd3-3ad98244e9d8\") " pod="openstack/keystone-db-sync-lnfbm" Nov 27 08:48:08 crc kubenswrapper[4971]: I1127 08:48:08.307458 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ca80319-5e50-45ff-9bd3-3ad98244e9d8-combined-ca-bundle\") pod \"keystone-db-sync-lnfbm\" (UID: \"9ca80319-5e50-45ff-9bd3-3ad98244e9d8\") " pod="openstack/keystone-db-sync-lnfbm" Nov 27 08:48:08 crc kubenswrapper[4971]: I1127 08:48:08.307510 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ca80319-5e50-45ff-9bd3-3ad98244e9d8-config-data\") pod \"keystone-db-sync-lnfbm\" (UID: \"9ca80319-5e50-45ff-9bd3-3ad98244e9d8\") " pod="openstack/keystone-db-sync-lnfbm" Nov 27 08:48:08 crc kubenswrapper[4971]: I1127 08:48:08.319290 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qlfjh\" (UniqueName: \"kubernetes.io/projected/9ca80319-5e50-45ff-9bd3-3ad98244e9d8-kube-api-access-qlfjh\") pod \"keystone-db-sync-lnfbm\" (UID: \"9ca80319-5e50-45ff-9bd3-3ad98244e9d8\") " pod="openstack/keystone-db-sync-lnfbm" Nov 27 08:48:08 crc kubenswrapper[4971]: I1127 08:48:08.405129 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-lnfbm" Nov 27 08:48:08 crc kubenswrapper[4971]: I1127 08:48:08.867033 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-lnfbm"] Nov 27 08:48:08 crc kubenswrapper[4971]: W1127 08:48:08.873184 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9ca80319_5e50_45ff_9bd3_3ad98244e9d8.slice/crio-6b5c86002c049182d4a5f077bcaed7fa0ca0ca82bc71b8f1bd722f43a1dce604 WatchSource:0}: Error finding container 6b5c86002c049182d4a5f077bcaed7fa0ca0ca82bc71b8f1bd722f43a1dce604: Status 404 returned error can't find the container with id 6b5c86002c049182d4a5f077bcaed7fa0ca0ca82bc71b8f1bd722f43a1dce604 Nov 27 08:48:09 crc kubenswrapper[4971]: I1127 08:48:09.363663 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-lnfbm" event={"ID":"9ca80319-5e50-45ff-9bd3-3ad98244e9d8","Type":"ContainerStarted","Data":"6b5c86002c049182d4a5f077bcaed7fa0ca0ca82bc71b8f1bd722f43a1dce604"} Nov 27 08:48:14 crc kubenswrapper[4971]: I1127 08:48:14.417168 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-lnfbm" event={"ID":"9ca80319-5e50-45ff-9bd3-3ad98244e9d8","Type":"ContainerStarted","Data":"07b0b323836e053194f4181c0e2fd5342fb88b99eb800b718b27bf8b2034975e"} Nov 27 08:48:14 crc kubenswrapper[4971]: I1127 08:48:14.449021 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-lnfbm" podStartSLOduration=1.5226399800000001 podStartE2EDuration="6.448987053s" podCreationTimestamp="2025-11-27 08:48:08 +0000 UTC" firstStartedPulling="2025-11-27 08:48:08.877897869 +0000 UTC m=+6927.069941787" lastFinishedPulling="2025-11-27 08:48:13.804244932 +0000 UTC m=+6931.996288860" observedRunningTime="2025-11-27 08:48:14.439250394 +0000 UTC m=+6932.631294342" watchObservedRunningTime="2025-11-27 08:48:14.448987053 +0000 UTC m=+6932.641030971" Nov 27 08:48:16 crc kubenswrapper[4971]: I1127 08:48:16.432393 4971 generic.go:334] "Generic (PLEG): container finished" podID="9ca80319-5e50-45ff-9bd3-3ad98244e9d8" containerID="07b0b323836e053194f4181c0e2fd5342fb88b99eb800b718b27bf8b2034975e" exitCode=0 Nov 27 08:48:16 crc kubenswrapper[4971]: I1127 08:48:16.432592 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-lnfbm" event={"ID":"9ca80319-5e50-45ff-9bd3-3ad98244e9d8","Type":"ContainerDied","Data":"07b0b323836e053194f4181c0e2fd5342fb88b99eb800b718b27bf8b2034975e"} Nov 27 08:48:17 crc kubenswrapper[4971]: I1127 08:48:17.767341 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-lnfbm" Nov 27 08:48:17 crc kubenswrapper[4971]: I1127 08:48:17.882667 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qlfjh\" (UniqueName: \"kubernetes.io/projected/9ca80319-5e50-45ff-9bd3-3ad98244e9d8-kube-api-access-qlfjh\") pod \"9ca80319-5e50-45ff-9bd3-3ad98244e9d8\" (UID: \"9ca80319-5e50-45ff-9bd3-3ad98244e9d8\") " Nov 27 08:48:17 crc kubenswrapper[4971]: I1127 08:48:17.882735 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ca80319-5e50-45ff-9bd3-3ad98244e9d8-config-data\") pod \"9ca80319-5e50-45ff-9bd3-3ad98244e9d8\" (UID: \"9ca80319-5e50-45ff-9bd3-3ad98244e9d8\") " Nov 27 08:48:17 crc kubenswrapper[4971]: I1127 08:48:17.882760 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ca80319-5e50-45ff-9bd3-3ad98244e9d8-combined-ca-bundle\") pod \"9ca80319-5e50-45ff-9bd3-3ad98244e9d8\" (UID: \"9ca80319-5e50-45ff-9bd3-3ad98244e9d8\") " Nov 27 08:48:17 crc kubenswrapper[4971]: I1127 08:48:17.890962 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ca80319-5e50-45ff-9bd3-3ad98244e9d8-kube-api-access-qlfjh" (OuterVolumeSpecName: "kube-api-access-qlfjh") pod "9ca80319-5e50-45ff-9bd3-3ad98244e9d8" (UID: "9ca80319-5e50-45ff-9bd3-3ad98244e9d8"). InnerVolumeSpecName "kube-api-access-qlfjh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:48:17 crc kubenswrapper[4971]: I1127 08:48:17.910665 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ca80319-5e50-45ff-9bd3-3ad98244e9d8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9ca80319-5e50-45ff-9bd3-3ad98244e9d8" (UID: "9ca80319-5e50-45ff-9bd3-3ad98244e9d8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:48:17 crc kubenswrapper[4971]: I1127 08:48:17.933392 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ca80319-5e50-45ff-9bd3-3ad98244e9d8-config-data" (OuterVolumeSpecName: "config-data") pod "9ca80319-5e50-45ff-9bd3-3ad98244e9d8" (UID: "9ca80319-5e50-45ff-9bd3-3ad98244e9d8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:48:17 crc kubenswrapper[4971]: I1127 08:48:17.985041 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qlfjh\" (UniqueName: \"kubernetes.io/projected/9ca80319-5e50-45ff-9bd3-3ad98244e9d8-kube-api-access-qlfjh\") on node \"crc\" DevicePath \"\"" Nov 27 08:48:17 crc kubenswrapper[4971]: I1127 08:48:17.985087 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ca80319-5e50-45ff-9bd3-3ad98244e9d8-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 08:48:17 crc kubenswrapper[4971]: I1127 08:48:17.985097 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ca80319-5e50-45ff-9bd3-3ad98244e9d8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 08:48:18 crc kubenswrapper[4971]: I1127 08:48:18.451826 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-lnfbm" event={"ID":"9ca80319-5e50-45ff-9bd3-3ad98244e9d8","Type":"ContainerDied","Data":"6b5c86002c049182d4a5f077bcaed7fa0ca0ca82bc71b8f1bd722f43a1dce604"} Nov 27 08:48:18 crc kubenswrapper[4971]: I1127 08:48:18.452281 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6b5c86002c049182d4a5f077bcaed7fa0ca0ca82bc71b8f1bd722f43a1dce604" Nov 27 08:48:18 crc kubenswrapper[4971]: I1127 08:48:18.451930 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-lnfbm" Nov 27 08:48:18 crc kubenswrapper[4971]: I1127 08:48:18.695196 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c95b65675-rnwh2"] Nov 27 08:48:18 crc kubenswrapper[4971]: E1127 08:48:18.696925 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ca80319-5e50-45ff-9bd3-3ad98244e9d8" containerName="keystone-db-sync" Nov 27 08:48:18 crc kubenswrapper[4971]: I1127 08:48:18.696955 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ca80319-5e50-45ff-9bd3-3ad98244e9d8" containerName="keystone-db-sync" Nov 27 08:48:18 crc kubenswrapper[4971]: I1127 08:48:18.697186 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ca80319-5e50-45ff-9bd3-3ad98244e9d8" containerName="keystone-db-sync" Nov 27 08:48:18 crc kubenswrapper[4971]: I1127 08:48:18.700728 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c95b65675-rnwh2" Nov 27 08:48:18 crc kubenswrapper[4971]: I1127 08:48:18.719320 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c95b65675-rnwh2"] Nov 27 08:48:18 crc kubenswrapper[4971]: I1127 08:48:18.785129 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-7vnqc"] Nov 27 08:48:18 crc kubenswrapper[4971]: I1127 08:48:18.787301 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7vnqc" Nov 27 08:48:18 crc kubenswrapper[4971]: I1127 08:48:18.791557 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Nov 27 08:48:18 crc kubenswrapper[4971]: I1127 08:48:18.791848 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Nov 27 08:48:18 crc kubenswrapper[4971]: I1127 08:48:18.792029 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-8ntd9" Nov 27 08:48:18 crc kubenswrapper[4971]: I1127 08:48:18.792953 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Nov 27 08:48:18 crc kubenswrapper[4971]: I1127 08:48:18.793322 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Nov 27 08:48:18 crc kubenswrapper[4971]: I1127 08:48:18.801743 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-7vnqc"] Nov 27 08:48:18 crc kubenswrapper[4971]: I1127 08:48:18.803059 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kwgkk\" (UniqueName: \"kubernetes.io/projected/68d2df89-95b3-45e1-809a-a5f3cefcae3d-kube-api-access-kwgkk\") pod \"dnsmasq-dns-5c95b65675-rnwh2\" (UID: \"68d2df89-95b3-45e1-809a-a5f3cefcae3d\") " pod="openstack/dnsmasq-dns-5c95b65675-rnwh2" Nov 27 08:48:18 crc kubenswrapper[4971]: I1127 08:48:18.803114 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/68d2df89-95b3-45e1-809a-a5f3cefcae3d-ovsdbserver-sb\") pod \"dnsmasq-dns-5c95b65675-rnwh2\" (UID: \"68d2df89-95b3-45e1-809a-a5f3cefcae3d\") " pod="openstack/dnsmasq-dns-5c95b65675-rnwh2" Nov 27 08:48:18 crc kubenswrapper[4971]: I1127 08:48:18.803172 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/68d2df89-95b3-45e1-809a-a5f3cefcae3d-config\") pod \"dnsmasq-dns-5c95b65675-rnwh2\" (UID: \"68d2df89-95b3-45e1-809a-a5f3cefcae3d\") " pod="openstack/dnsmasq-dns-5c95b65675-rnwh2" Nov 27 08:48:18 crc kubenswrapper[4971]: I1127 08:48:18.803218 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/68d2df89-95b3-45e1-809a-a5f3cefcae3d-dns-svc\") pod \"dnsmasq-dns-5c95b65675-rnwh2\" (UID: \"68d2df89-95b3-45e1-809a-a5f3cefcae3d\") " pod="openstack/dnsmasq-dns-5c95b65675-rnwh2" Nov 27 08:48:18 crc kubenswrapper[4971]: I1127 08:48:18.803249 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/68d2df89-95b3-45e1-809a-a5f3cefcae3d-ovsdbserver-nb\") pod \"dnsmasq-dns-5c95b65675-rnwh2\" (UID: \"68d2df89-95b3-45e1-809a-a5f3cefcae3d\") " pod="openstack/dnsmasq-dns-5c95b65675-rnwh2" Nov 27 08:48:18 crc kubenswrapper[4971]: I1127 08:48:18.907005 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/68d2df89-95b3-45e1-809a-a5f3cefcae3d-config\") pod \"dnsmasq-dns-5c95b65675-rnwh2\" (UID: \"68d2df89-95b3-45e1-809a-a5f3cefcae3d\") " pod="openstack/dnsmasq-dns-5c95b65675-rnwh2" Nov 27 08:48:18 crc kubenswrapper[4971]: I1127 08:48:18.907085 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d3c17c1e-aa48-4c6a-9998-c8c434f37309-credential-keys\") pod \"keystone-bootstrap-7vnqc\" (UID: \"d3c17c1e-aa48-4c6a-9998-c8c434f37309\") " pod="openstack/keystone-bootstrap-7vnqc" Nov 27 08:48:18 crc kubenswrapper[4971]: I1127 08:48:18.907137 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d3c17c1e-aa48-4c6a-9998-c8c434f37309-fernet-keys\") pod \"keystone-bootstrap-7vnqc\" (UID: \"d3c17c1e-aa48-4c6a-9998-c8c434f37309\") " pod="openstack/keystone-bootstrap-7vnqc" Nov 27 08:48:18 crc kubenswrapper[4971]: I1127 08:48:18.907174 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/68d2df89-95b3-45e1-809a-a5f3cefcae3d-dns-svc\") pod \"dnsmasq-dns-5c95b65675-rnwh2\" (UID: \"68d2df89-95b3-45e1-809a-a5f3cefcae3d\") " pod="openstack/dnsmasq-dns-5c95b65675-rnwh2" Nov 27 08:48:18 crc kubenswrapper[4971]: I1127 08:48:18.907206 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/68d2df89-95b3-45e1-809a-a5f3cefcae3d-ovsdbserver-nb\") pod \"dnsmasq-dns-5c95b65675-rnwh2\" (UID: \"68d2df89-95b3-45e1-809a-a5f3cefcae3d\") " pod="openstack/dnsmasq-dns-5c95b65675-rnwh2" Nov 27 08:48:18 crc kubenswrapper[4971]: I1127 08:48:18.907269 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bgpwg\" (UniqueName: \"kubernetes.io/projected/d3c17c1e-aa48-4c6a-9998-c8c434f37309-kube-api-access-bgpwg\") pod \"keystone-bootstrap-7vnqc\" (UID: \"d3c17c1e-aa48-4c6a-9998-c8c434f37309\") " pod="openstack/keystone-bootstrap-7vnqc" Nov 27 08:48:18 crc kubenswrapper[4971]: I1127 08:48:18.907337 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3c17c1e-aa48-4c6a-9998-c8c434f37309-combined-ca-bundle\") pod \"keystone-bootstrap-7vnqc\" (UID: \"d3c17c1e-aa48-4c6a-9998-c8c434f37309\") " pod="openstack/keystone-bootstrap-7vnqc" Nov 27 08:48:18 crc kubenswrapper[4971]: I1127 08:48:18.907395 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d3c17c1e-aa48-4c6a-9998-c8c434f37309-scripts\") pod \"keystone-bootstrap-7vnqc\" (UID: \"d3c17c1e-aa48-4c6a-9998-c8c434f37309\") " pod="openstack/keystone-bootstrap-7vnqc" Nov 27 08:48:18 crc kubenswrapper[4971]: I1127 08:48:18.907430 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kwgkk\" (UniqueName: \"kubernetes.io/projected/68d2df89-95b3-45e1-809a-a5f3cefcae3d-kube-api-access-kwgkk\") pod \"dnsmasq-dns-5c95b65675-rnwh2\" (UID: \"68d2df89-95b3-45e1-809a-a5f3cefcae3d\") " pod="openstack/dnsmasq-dns-5c95b65675-rnwh2" Nov 27 08:48:18 crc kubenswrapper[4971]: I1127 08:48:18.907463 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3c17c1e-aa48-4c6a-9998-c8c434f37309-config-data\") pod \"keystone-bootstrap-7vnqc\" (UID: \"d3c17c1e-aa48-4c6a-9998-c8c434f37309\") " pod="openstack/keystone-bootstrap-7vnqc" Nov 27 08:48:18 crc kubenswrapper[4971]: I1127 08:48:18.907489 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/68d2df89-95b3-45e1-809a-a5f3cefcae3d-ovsdbserver-sb\") pod \"dnsmasq-dns-5c95b65675-rnwh2\" (UID: \"68d2df89-95b3-45e1-809a-a5f3cefcae3d\") " pod="openstack/dnsmasq-dns-5c95b65675-rnwh2" Nov 27 08:48:18 crc kubenswrapper[4971]: I1127 08:48:18.912459 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/68d2df89-95b3-45e1-809a-a5f3cefcae3d-ovsdbserver-nb\") pod \"dnsmasq-dns-5c95b65675-rnwh2\" (UID: \"68d2df89-95b3-45e1-809a-a5f3cefcae3d\") " pod="openstack/dnsmasq-dns-5c95b65675-rnwh2" Nov 27 08:48:18 crc kubenswrapper[4971]: I1127 08:48:18.912476 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/68d2df89-95b3-45e1-809a-a5f3cefcae3d-dns-svc\") pod \"dnsmasq-dns-5c95b65675-rnwh2\" (UID: \"68d2df89-95b3-45e1-809a-a5f3cefcae3d\") " pod="openstack/dnsmasq-dns-5c95b65675-rnwh2" Nov 27 08:48:18 crc kubenswrapper[4971]: I1127 08:48:18.913682 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/68d2df89-95b3-45e1-809a-a5f3cefcae3d-config\") pod \"dnsmasq-dns-5c95b65675-rnwh2\" (UID: \"68d2df89-95b3-45e1-809a-a5f3cefcae3d\") " pod="openstack/dnsmasq-dns-5c95b65675-rnwh2" Nov 27 08:48:18 crc kubenswrapper[4971]: I1127 08:48:18.917054 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/68d2df89-95b3-45e1-809a-a5f3cefcae3d-ovsdbserver-sb\") pod \"dnsmasq-dns-5c95b65675-rnwh2\" (UID: \"68d2df89-95b3-45e1-809a-a5f3cefcae3d\") " pod="openstack/dnsmasq-dns-5c95b65675-rnwh2" Nov 27 08:48:18 crc kubenswrapper[4971]: I1127 08:48:18.934474 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kwgkk\" (UniqueName: \"kubernetes.io/projected/68d2df89-95b3-45e1-809a-a5f3cefcae3d-kube-api-access-kwgkk\") pod \"dnsmasq-dns-5c95b65675-rnwh2\" (UID: \"68d2df89-95b3-45e1-809a-a5f3cefcae3d\") " pod="openstack/dnsmasq-dns-5c95b65675-rnwh2" Nov 27 08:48:19 crc kubenswrapper[4971]: I1127 08:48:19.009868 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3c17c1e-aa48-4c6a-9998-c8c434f37309-combined-ca-bundle\") pod \"keystone-bootstrap-7vnqc\" (UID: \"d3c17c1e-aa48-4c6a-9998-c8c434f37309\") " pod="openstack/keystone-bootstrap-7vnqc" Nov 27 08:48:19 crc kubenswrapper[4971]: I1127 08:48:19.010296 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d3c17c1e-aa48-4c6a-9998-c8c434f37309-scripts\") pod \"keystone-bootstrap-7vnqc\" (UID: \"d3c17c1e-aa48-4c6a-9998-c8c434f37309\") " pod="openstack/keystone-bootstrap-7vnqc" Nov 27 08:48:19 crc kubenswrapper[4971]: I1127 08:48:19.010334 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3c17c1e-aa48-4c6a-9998-c8c434f37309-config-data\") pod \"keystone-bootstrap-7vnqc\" (UID: \"d3c17c1e-aa48-4c6a-9998-c8c434f37309\") " pod="openstack/keystone-bootstrap-7vnqc" Nov 27 08:48:19 crc kubenswrapper[4971]: I1127 08:48:19.010381 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d3c17c1e-aa48-4c6a-9998-c8c434f37309-credential-keys\") pod \"keystone-bootstrap-7vnqc\" (UID: \"d3c17c1e-aa48-4c6a-9998-c8c434f37309\") " pod="openstack/keystone-bootstrap-7vnqc" Nov 27 08:48:19 crc kubenswrapper[4971]: I1127 08:48:19.010410 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d3c17c1e-aa48-4c6a-9998-c8c434f37309-fernet-keys\") pod \"keystone-bootstrap-7vnqc\" (UID: \"d3c17c1e-aa48-4c6a-9998-c8c434f37309\") " pod="openstack/keystone-bootstrap-7vnqc" Nov 27 08:48:19 crc kubenswrapper[4971]: I1127 08:48:19.010469 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bgpwg\" (UniqueName: \"kubernetes.io/projected/d3c17c1e-aa48-4c6a-9998-c8c434f37309-kube-api-access-bgpwg\") pod \"keystone-bootstrap-7vnqc\" (UID: \"d3c17c1e-aa48-4c6a-9998-c8c434f37309\") " pod="openstack/keystone-bootstrap-7vnqc" Nov 27 08:48:19 crc kubenswrapper[4971]: I1127 08:48:19.020186 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3c17c1e-aa48-4c6a-9998-c8c434f37309-config-data\") pod \"keystone-bootstrap-7vnqc\" (UID: \"d3c17c1e-aa48-4c6a-9998-c8c434f37309\") " pod="openstack/keystone-bootstrap-7vnqc" Nov 27 08:48:19 crc kubenswrapper[4971]: I1127 08:48:19.020526 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d3c17c1e-aa48-4c6a-9998-c8c434f37309-fernet-keys\") pod \"keystone-bootstrap-7vnqc\" (UID: \"d3c17c1e-aa48-4c6a-9998-c8c434f37309\") " pod="openstack/keystone-bootstrap-7vnqc" Nov 27 08:48:19 crc kubenswrapper[4971]: I1127 08:48:19.024213 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d3c17c1e-aa48-4c6a-9998-c8c434f37309-credential-keys\") pod \"keystone-bootstrap-7vnqc\" (UID: \"d3c17c1e-aa48-4c6a-9998-c8c434f37309\") " pod="openstack/keystone-bootstrap-7vnqc" Nov 27 08:48:19 crc kubenswrapper[4971]: I1127 08:48:19.030623 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3c17c1e-aa48-4c6a-9998-c8c434f37309-combined-ca-bundle\") pod \"keystone-bootstrap-7vnqc\" (UID: \"d3c17c1e-aa48-4c6a-9998-c8c434f37309\") " pod="openstack/keystone-bootstrap-7vnqc" Nov 27 08:48:19 crc kubenswrapper[4971]: I1127 08:48:19.036359 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c95b65675-rnwh2" Nov 27 08:48:19 crc kubenswrapper[4971]: I1127 08:48:19.042514 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d3c17c1e-aa48-4c6a-9998-c8c434f37309-scripts\") pod \"keystone-bootstrap-7vnqc\" (UID: \"d3c17c1e-aa48-4c6a-9998-c8c434f37309\") " pod="openstack/keystone-bootstrap-7vnqc" Nov 27 08:48:19 crc kubenswrapper[4971]: I1127 08:48:19.053317 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bgpwg\" (UniqueName: \"kubernetes.io/projected/d3c17c1e-aa48-4c6a-9998-c8c434f37309-kube-api-access-bgpwg\") pod \"keystone-bootstrap-7vnqc\" (UID: \"d3c17c1e-aa48-4c6a-9998-c8c434f37309\") " pod="openstack/keystone-bootstrap-7vnqc" Nov 27 08:48:19 crc kubenswrapper[4971]: I1127 08:48:19.128397 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7vnqc" Nov 27 08:48:19 crc kubenswrapper[4971]: I1127 08:48:19.593273 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c95b65675-rnwh2"] Nov 27 08:48:19 crc kubenswrapper[4971]: I1127 08:48:19.688628 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-7vnqc"] Nov 27 08:48:20 crc kubenswrapper[4971]: I1127 08:48:20.476568 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7vnqc" event={"ID":"d3c17c1e-aa48-4c6a-9998-c8c434f37309","Type":"ContainerStarted","Data":"42fcb129990a3a9bce82819ac4069813165da82d40f539beb37df00c7359ac86"} Nov 27 08:48:20 crc kubenswrapper[4971]: I1127 08:48:20.477056 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7vnqc" event={"ID":"d3c17c1e-aa48-4c6a-9998-c8c434f37309","Type":"ContainerStarted","Data":"3cc8b007d505c748ee8b4ece6e5a298d38a56cabf9517a4de06048059dbd3202"} Nov 27 08:48:20 crc kubenswrapper[4971]: I1127 08:48:20.480648 4971 generic.go:334] "Generic (PLEG): container finished" podID="68d2df89-95b3-45e1-809a-a5f3cefcae3d" containerID="b91d0a2d15b716527ada5f5b0968d3b249c1929c613a5079aea5016c0c0c88d7" exitCode=0 Nov 27 08:48:20 crc kubenswrapper[4971]: I1127 08:48:20.480699 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c95b65675-rnwh2" event={"ID":"68d2df89-95b3-45e1-809a-a5f3cefcae3d","Type":"ContainerDied","Data":"b91d0a2d15b716527ada5f5b0968d3b249c1929c613a5079aea5016c0c0c88d7"} Nov 27 08:48:20 crc kubenswrapper[4971]: I1127 08:48:20.480734 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c95b65675-rnwh2" event={"ID":"68d2df89-95b3-45e1-809a-a5f3cefcae3d","Type":"ContainerStarted","Data":"a245cc8fd1cb4c5e4ac3db6b52804f527483ea3968738a928f83647ba93512dc"} Nov 27 08:48:20 crc kubenswrapper[4971]: I1127 08:48:20.508706 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-7vnqc" podStartSLOduration=2.508682479 podStartE2EDuration="2.508682479s" podCreationTimestamp="2025-11-27 08:48:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:48:20.501688929 +0000 UTC m=+6938.693732847" watchObservedRunningTime="2025-11-27 08:48:20.508682479 +0000 UTC m=+6938.700726387" Nov 27 08:48:21 crc kubenswrapper[4971]: I1127 08:48:21.544137 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c95b65675-rnwh2" event={"ID":"68d2df89-95b3-45e1-809a-a5f3cefcae3d","Type":"ContainerStarted","Data":"a02a6dbb765e454822f7db60bb0e8d37be0cd1594baa386cc82052e3ca98b3e0"} Nov 27 08:48:21 crc kubenswrapper[4971]: I1127 08:48:21.545319 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c95b65675-rnwh2" Nov 27 08:48:21 crc kubenswrapper[4971]: I1127 08:48:21.571799 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c95b65675-rnwh2" podStartSLOduration=3.571779859 podStartE2EDuration="3.571779859s" podCreationTimestamp="2025-11-27 08:48:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:48:21.564075958 +0000 UTC m=+6939.756119886" watchObservedRunningTime="2025-11-27 08:48:21.571779859 +0000 UTC m=+6939.763823777" Nov 27 08:48:24 crc kubenswrapper[4971]: I1127 08:48:24.572615 4971 generic.go:334] "Generic (PLEG): container finished" podID="d3c17c1e-aa48-4c6a-9998-c8c434f37309" containerID="42fcb129990a3a9bce82819ac4069813165da82d40f539beb37df00c7359ac86" exitCode=0 Nov 27 08:48:24 crc kubenswrapper[4971]: I1127 08:48:24.572696 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7vnqc" event={"ID":"d3c17c1e-aa48-4c6a-9998-c8c434f37309","Type":"ContainerDied","Data":"42fcb129990a3a9bce82819ac4069813165da82d40f539beb37df00c7359ac86"} Nov 27 08:48:25 crc kubenswrapper[4971]: I1127 08:48:25.962288 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7vnqc" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.054901 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d3c17c1e-aa48-4c6a-9998-c8c434f37309-scripts\") pod \"d3c17c1e-aa48-4c6a-9998-c8c434f37309\" (UID: \"d3c17c1e-aa48-4c6a-9998-c8c434f37309\") " Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.054960 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3c17c1e-aa48-4c6a-9998-c8c434f37309-config-data\") pod \"d3c17c1e-aa48-4c6a-9998-c8c434f37309\" (UID: \"d3c17c1e-aa48-4c6a-9998-c8c434f37309\") " Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.055113 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d3c17c1e-aa48-4c6a-9998-c8c434f37309-fernet-keys\") pod \"d3c17c1e-aa48-4c6a-9998-c8c434f37309\" (UID: \"d3c17c1e-aa48-4c6a-9998-c8c434f37309\") " Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.055143 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d3c17c1e-aa48-4c6a-9998-c8c434f37309-credential-keys\") pod \"d3c17c1e-aa48-4c6a-9998-c8c434f37309\" (UID: \"d3c17c1e-aa48-4c6a-9998-c8c434f37309\") " Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.055184 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bgpwg\" (UniqueName: \"kubernetes.io/projected/d3c17c1e-aa48-4c6a-9998-c8c434f37309-kube-api-access-bgpwg\") pod \"d3c17c1e-aa48-4c6a-9998-c8c434f37309\" (UID: \"d3c17c1e-aa48-4c6a-9998-c8c434f37309\") " Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.055242 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3c17c1e-aa48-4c6a-9998-c8c434f37309-combined-ca-bundle\") pod \"d3c17c1e-aa48-4c6a-9998-c8c434f37309\" (UID: \"d3c17c1e-aa48-4c6a-9998-c8c434f37309\") " Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.062597 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3c17c1e-aa48-4c6a-9998-c8c434f37309-kube-api-access-bgpwg" (OuterVolumeSpecName: "kube-api-access-bgpwg") pod "d3c17c1e-aa48-4c6a-9998-c8c434f37309" (UID: "d3c17c1e-aa48-4c6a-9998-c8c434f37309"). InnerVolumeSpecName "kube-api-access-bgpwg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.062739 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3c17c1e-aa48-4c6a-9998-c8c434f37309-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "d3c17c1e-aa48-4c6a-9998-c8c434f37309" (UID: "d3c17c1e-aa48-4c6a-9998-c8c434f37309"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.063097 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3c17c1e-aa48-4c6a-9998-c8c434f37309-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "d3c17c1e-aa48-4c6a-9998-c8c434f37309" (UID: "d3c17c1e-aa48-4c6a-9998-c8c434f37309"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.064053 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3c17c1e-aa48-4c6a-9998-c8c434f37309-scripts" (OuterVolumeSpecName: "scripts") pod "d3c17c1e-aa48-4c6a-9998-c8c434f37309" (UID: "d3c17c1e-aa48-4c6a-9998-c8c434f37309"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.100709 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3c17c1e-aa48-4c6a-9998-c8c434f37309-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d3c17c1e-aa48-4c6a-9998-c8c434f37309" (UID: "d3c17c1e-aa48-4c6a-9998-c8c434f37309"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.119924 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3c17c1e-aa48-4c6a-9998-c8c434f37309-config-data" (OuterVolumeSpecName: "config-data") pod "d3c17c1e-aa48-4c6a-9998-c8c434f37309" (UID: "d3c17c1e-aa48-4c6a-9998-c8c434f37309"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.159519 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bgpwg\" (UniqueName: \"kubernetes.io/projected/d3c17c1e-aa48-4c6a-9998-c8c434f37309-kube-api-access-bgpwg\") on node \"crc\" DevicePath \"\"" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.159588 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3c17c1e-aa48-4c6a-9998-c8c434f37309-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.159602 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d3c17c1e-aa48-4c6a-9998-c8c434f37309-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.159611 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3c17c1e-aa48-4c6a-9998-c8c434f37309-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.159624 4971 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d3c17c1e-aa48-4c6a-9998-c8c434f37309-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.159635 4971 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d3c17c1e-aa48-4c6a-9998-c8c434f37309-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.629846 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7vnqc" event={"ID":"d3c17c1e-aa48-4c6a-9998-c8c434f37309","Type":"ContainerDied","Data":"3cc8b007d505c748ee8b4ece6e5a298d38a56cabf9517a4de06048059dbd3202"} Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.630298 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3cc8b007d505c748ee8b4ece6e5a298d38a56cabf9517a4de06048059dbd3202" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.630408 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7vnqc" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.730399 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-7vnqc"] Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.737921 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-7vnqc"] Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.789207 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-tgqvj"] Nov 27 08:48:26 crc kubenswrapper[4971]: E1127 08:48:26.790063 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3c17c1e-aa48-4c6a-9998-c8c434f37309" containerName="keystone-bootstrap" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.790092 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3c17c1e-aa48-4c6a-9998-c8c434f37309" containerName="keystone-bootstrap" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.790301 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3c17c1e-aa48-4c6a-9998-c8c434f37309" containerName="keystone-bootstrap" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.790912 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-tgqvj" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.792953 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.793157 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.793216 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.793790 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-8ntd9" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.794103 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.810802 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-tgqvj"] Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.886962 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6b83d4ed-92d4-41a2-9dff-071a1b6932ad-fernet-keys\") pod \"keystone-bootstrap-tgqvj\" (UID: \"6b83d4ed-92d4-41a2-9dff-071a1b6932ad\") " pod="openstack/keystone-bootstrap-tgqvj" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.887036 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b83d4ed-92d4-41a2-9dff-071a1b6932ad-combined-ca-bundle\") pod \"keystone-bootstrap-tgqvj\" (UID: \"6b83d4ed-92d4-41a2-9dff-071a1b6932ad\") " pod="openstack/keystone-bootstrap-tgqvj" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.887116 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6pczf\" (UniqueName: \"kubernetes.io/projected/6b83d4ed-92d4-41a2-9dff-071a1b6932ad-kube-api-access-6pczf\") pod \"keystone-bootstrap-tgqvj\" (UID: \"6b83d4ed-92d4-41a2-9dff-071a1b6932ad\") " pod="openstack/keystone-bootstrap-tgqvj" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.887184 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b83d4ed-92d4-41a2-9dff-071a1b6932ad-scripts\") pod \"keystone-bootstrap-tgqvj\" (UID: \"6b83d4ed-92d4-41a2-9dff-071a1b6932ad\") " pod="openstack/keystone-bootstrap-tgqvj" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.887210 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6b83d4ed-92d4-41a2-9dff-071a1b6932ad-credential-keys\") pod \"keystone-bootstrap-tgqvj\" (UID: \"6b83d4ed-92d4-41a2-9dff-071a1b6932ad\") " pod="openstack/keystone-bootstrap-tgqvj" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.887320 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b83d4ed-92d4-41a2-9dff-071a1b6932ad-config-data\") pod \"keystone-bootstrap-tgqvj\" (UID: \"6b83d4ed-92d4-41a2-9dff-071a1b6932ad\") " pod="openstack/keystone-bootstrap-tgqvj" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.989003 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6pczf\" (UniqueName: \"kubernetes.io/projected/6b83d4ed-92d4-41a2-9dff-071a1b6932ad-kube-api-access-6pczf\") pod \"keystone-bootstrap-tgqvj\" (UID: \"6b83d4ed-92d4-41a2-9dff-071a1b6932ad\") " pod="openstack/keystone-bootstrap-tgqvj" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.989090 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b83d4ed-92d4-41a2-9dff-071a1b6932ad-scripts\") pod \"keystone-bootstrap-tgqvj\" (UID: \"6b83d4ed-92d4-41a2-9dff-071a1b6932ad\") " pod="openstack/keystone-bootstrap-tgqvj" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.989123 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6b83d4ed-92d4-41a2-9dff-071a1b6932ad-credential-keys\") pod \"keystone-bootstrap-tgqvj\" (UID: \"6b83d4ed-92d4-41a2-9dff-071a1b6932ad\") " pod="openstack/keystone-bootstrap-tgqvj" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.989156 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b83d4ed-92d4-41a2-9dff-071a1b6932ad-config-data\") pod \"keystone-bootstrap-tgqvj\" (UID: \"6b83d4ed-92d4-41a2-9dff-071a1b6932ad\") " pod="openstack/keystone-bootstrap-tgqvj" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.989230 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6b83d4ed-92d4-41a2-9dff-071a1b6932ad-fernet-keys\") pod \"keystone-bootstrap-tgqvj\" (UID: \"6b83d4ed-92d4-41a2-9dff-071a1b6932ad\") " pod="openstack/keystone-bootstrap-tgqvj" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.989275 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b83d4ed-92d4-41a2-9dff-071a1b6932ad-combined-ca-bundle\") pod \"keystone-bootstrap-tgqvj\" (UID: \"6b83d4ed-92d4-41a2-9dff-071a1b6932ad\") " pod="openstack/keystone-bootstrap-tgqvj" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.993960 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b83d4ed-92d4-41a2-9dff-071a1b6932ad-scripts\") pod \"keystone-bootstrap-tgqvj\" (UID: \"6b83d4ed-92d4-41a2-9dff-071a1b6932ad\") " pod="openstack/keystone-bootstrap-tgqvj" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.994167 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6b83d4ed-92d4-41a2-9dff-071a1b6932ad-credential-keys\") pod \"keystone-bootstrap-tgqvj\" (UID: \"6b83d4ed-92d4-41a2-9dff-071a1b6932ad\") " pod="openstack/keystone-bootstrap-tgqvj" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.994277 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b83d4ed-92d4-41a2-9dff-071a1b6932ad-combined-ca-bundle\") pod \"keystone-bootstrap-tgqvj\" (UID: \"6b83d4ed-92d4-41a2-9dff-071a1b6932ad\") " pod="openstack/keystone-bootstrap-tgqvj" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.994637 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6b83d4ed-92d4-41a2-9dff-071a1b6932ad-fernet-keys\") pod \"keystone-bootstrap-tgqvj\" (UID: \"6b83d4ed-92d4-41a2-9dff-071a1b6932ad\") " pod="openstack/keystone-bootstrap-tgqvj" Nov 27 08:48:26 crc kubenswrapper[4971]: I1127 08:48:26.994873 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b83d4ed-92d4-41a2-9dff-071a1b6932ad-config-data\") pod \"keystone-bootstrap-tgqvj\" (UID: \"6b83d4ed-92d4-41a2-9dff-071a1b6932ad\") " pod="openstack/keystone-bootstrap-tgqvj" Nov 27 08:48:27 crc kubenswrapper[4971]: I1127 08:48:27.008522 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6pczf\" (UniqueName: \"kubernetes.io/projected/6b83d4ed-92d4-41a2-9dff-071a1b6932ad-kube-api-access-6pczf\") pod \"keystone-bootstrap-tgqvj\" (UID: \"6b83d4ed-92d4-41a2-9dff-071a1b6932ad\") " pod="openstack/keystone-bootstrap-tgqvj" Nov 27 08:48:27 crc kubenswrapper[4971]: I1127 08:48:27.108998 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-tgqvj" Nov 27 08:48:27 crc kubenswrapper[4971]: I1127 08:48:27.539748 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-tgqvj"] Nov 27 08:48:27 crc kubenswrapper[4971]: I1127 08:48:27.641730 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-tgqvj" event={"ID":"6b83d4ed-92d4-41a2-9dff-071a1b6932ad","Type":"ContainerStarted","Data":"0cbaaecdf99010d955d401f7d91488f1b25e457d2058db605a064bb3e78a4fa2"} Nov 27 08:48:28 crc kubenswrapper[4971]: I1127 08:48:28.562167 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3c17c1e-aa48-4c6a-9998-c8c434f37309" path="/var/lib/kubelet/pods/d3c17c1e-aa48-4c6a-9998-c8c434f37309/volumes" Nov 27 08:48:28 crc kubenswrapper[4971]: I1127 08:48:28.655201 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-tgqvj" event={"ID":"6b83d4ed-92d4-41a2-9dff-071a1b6932ad","Type":"ContainerStarted","Data":"a3295fbfde623d2924ea84b3ba0a09ed9ce0017b32e5be386a1d7c05c0e6b051"} Nov 27 08:48:28 crc kubenswrapper[4971]: I1127 08:48:28.681262 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-tgqvj" podStartSLOduration=2.681244114 podStartE2EDuration="2.681244114s" podCreationTimestamp="2025-11-27 08:48:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:48:28.673576004 +0000 UTC m=+6946.865619922" watchObservedRunningTime="2025-11-27 08:48:28.681244114 +0000 UTC m=+6946.873288032" Nov 27 08:48:29 crc kubenswrapper[4971]: I1127 08:48:29.039793 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c95b65675-rnwh2" Nov 27 08:48:29 crc kubenswrapper[4971]: I1127 08:48:29.106383 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ccbc467bc-bh4qq"] Nov 27 08:48:29 crc kubenswrapper[4971]: I1127 08:48:29.106653 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5ccbc467bc-bh4qq" podUID="0413b693-383d-4291-bab3-447fd147e5ff" containerName="dnsmasq-dns" containerID="cri-o://4c6e990098643cfec006aa78c7e1cc19a73c8d146e25ca9008f0015f54408485" gracePeriod=10 Nov 27 08:48:29 crc kubenswrapper[4971]: I1127 08:48:29.617869 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccbc467bc-bh4qq" Nov 27 08:48:29 crc kubenswrapper[4971]: I1127 08:48:29.666264 4971 generic.go:334] "Generic (PLEG): container finished" podID="0413b693-383d-4291-bab3-447fd147e5ff" containerID="4c6e990098643cfec006aa78c7e1cc19a73c8d146e25ca9008f0015f54408485" exitCode=0 Nov 27 08:48:29 crc kubenswrapper[4971]: I1127 08:48:29.666384 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccbc467bc-bh4qq" Nov 27 08:48:29 crc kubenswrapper[4971]: I1127 08:48:29.666383 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccbc467bc-bh4qq" event={"ID":"0413b693-383d-4291-bab3-447fd147e5ff","Type":"ContainerDied","Data":"4c6e990098643cfec006aa78c7e1cc19a73c8d146e25ca9008f0015f54408485"} Nov 27 08:48:29 crc kubenswrapper[4971]: I1127 08:48:29.666564 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccbc467bc-bh4qq" event={"ID":"0413b693-383d-4291-bab3-447fd147e5ff","Type":"ContainerDied","Data":"1b208c6bad050a2ddde9c859757be81aaa076c89b3cb54f992cc1768259b1a1f"} Nov 27 08:48:29 crc kubenswrapper[4971]: I1127 08:48:29.666587 4971 scope.go:117] "RemoveContainer" containerID="4c6e990098643cfec006aa78c7e1cc19a73c8d146e25ca9008f0015f54408485" Nov 27 08:48:29 crc kubenswrapper[4971]: I1127 08:48:29.697943 4971 scope.go:117] "RemoveContainer" containerID="1b7f8a572e2887a79e4dc5e3c00774af3d1b9ec6faf1103f3d830a96390f967c" Nov 27 08:48:29 crc kubenswrapper[4971]: I1127 08:48:29.728629 4971 scope.go:117] "RemoveContainer" containerID="4c6e990098643cfec006aa78c7e1cc19a73c8d146e25ca9008f0015f54408485" Nov 27 08:48:29 crc kubenswrapper[4971]: E1127 08:48:29.729207 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c6e990098643cfec006aa78c7e1cc19a73c8d146e25ca9008f0015f54408485\": container with ID starting with 4c6e990098643cfec006aa78c7e1cc19a73c8d146e25ca9008f0015f54408485 not found: ID does not exist" containerID="4c6e990098643cfec006aa78c7e1cc19a73c8d146e25ca9008f0015f54408485" Nov 27 08:48:29 crc kubenswrapper[4971]: I1127 08:48:29.729273 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c6e990098643cfec006aa78c7e1cc19a73c8d146e25ca9008f0015f54408485"} err="failed to get container status \"4c6e990098643cfec006aa78c7e1cc19a73c8d146e25ca9008f0015f54408485\": rpc error: code = NotFound desc = could not find container \"4c6e990098643cfec006aa78c7e1cc19a73c8d146e25ca9008f0015f54408485\": container with ID starting with 4c6e990098643cfec006aa78c7e1cc19a73c8d146e25ca9008f0015f54408485 not found: ID does not exist" Nov 27 08:48:29 crc kubenswrapper[4971]: I1127 08:48:29.729309 4971 scope.go:117] "RemoveContainer" containerID="1b7f8a572e2887a79e4dc5e3c00774af3d1b9ec6faf1103f3d830a96390f967c" Nov 27 08:48:29 crc kubenswrapper[4971]: E1127 08:48:29.729762 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b7f8a572e2887a79e4dc5e3c00774af3d1b9ec6faf1103f3d830a96390f967c\": container with ID starting with 1b7f8a572e2887a79e4dc5e3c00774af3d1b9ec6faf1103f3d830a96390f967c not found: ID does not exist" containerID="1b7f8a572e2887a79e4dc5e3c00774af3d1b9ec6faf1103f3d830a96390f967c" Nov 27 08:48:29 crc kubenswrapper[4971]: I1127 08:48:29.729801 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b7f8a572e2887a79e4dc5e3c00774af3d1b9ec6faf1103f3d830a96390f967c"} err="failed to get container status \"1b7f8a572e2887a79e4dc5e3c00774af3d1b9ec6faf1103f3d830a96390f967c\": rpc error: code = NotFound desc = could not find container \"1b7f8a572e2887a79e4dc5e3c00774af3d1b9ec6faf1103f3d830a96390f967c\": container with ID starting with 1b7f8a572e2887a79e4dc5e3c00774af3d1b9ec6faf1103f3d830a96390f967c not found: ID does not exist" Nov 27 08:48:29 crc kubenswrapper[4971]: I1127 08:48:29.770767 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0413b693-383d-4291-bab3-447fd147e5ff-config\") pod \"0413b693-383d-4291-bab3-447fd147e5ff\" (UID: \"0413b693-383d-4291-bab3-447fd147e5ff\") " Nov 27 08:48:29 crc kubenswrapper[4971]: I1127 08:48:29.770902 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s5r75\" (UniqueName: \"kubernetes.io/projected/0413b693-383d-4291-bab3-447fd147e5ff-kube-api-access-s5r75\") pod \"0413b693-383d-4291-bab3-447fd147e5ff\" (UID: \"0413b693-383d-4291-bab3-447fd147e5ff\") " Nov 27 08:48:29 crc kubenswrapper[4971]: I1127 08:48:29.770930 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0413b693-383d-4291-bab3-447fd147e5ff-dns-svc\") pod \"0413b693-383d-4291-bab3-447fd147e5ff\" (UID: \"0413b693-383d-4291-bab3-447fd147e5ff\") " Nov 27 08:48:29 crc kubenswrapper[4971]: I1127 08:48:29.771042 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0413b693-383d-4291-bab3-447fd147e5ff-ovsdbserver-sb\") pod \"0413b693-383d-4291-bab3-447fd147e5ff\" (UID: \"0413b693-383d-4291-bab3-447fd147e5ff\") " Nov 27 08:48:29 crc kubenswrapper[4971]: I1127 08:48:29.771069 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0413b693-383d-4291-bab3-447fd147e5ff-ovsdbserver-nb\") pod \"0413b693-383d-4291-bab3-447fd147e5ff\" (UID: \"0413b693-383d-4291-bab3-447fd147e5ff\") " Nov 27 08:48:29 crc kubenswrapper[4971]: I1127 08:48:29.780853 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0413b693-383d-4291-bab3-447fd147e5ff-kube-api-access-s5r75" (OuterVolumeSpecName: "kube-api-access-s5r75") pod "0413b693-383d-4291-bab3-447fd147e5ff" (UID: "0413b693-383d-4291-bab3-447fd147e5ff"). InnerVolumeSpecName "kube-api-access-s5r75". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:48:29 crc kubenswrapper[4971]: I1127 08:48:29.826495 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0413b693-383d-4291-bab3-447fd147e5ff-config" (OuterVolumeSpecName: "config") pod "0413b693-383d-4291-bab3-447fd147e5ff" (UID: "0413b693-383d-4291-bab3-447fd147e5ff"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:48:29 crc kubenswrapper[4971]: I1127 08:48:29.831199 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0413b693-383d-4291-bab3-447fd147e5ff-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "0413b693-383d-4291-bab3-447fd147e5ff" (UID: "0413b693-383d-4291-bab3-447fd147e5ff"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:48:29 crc kubenswrapper[4971]: I1127 08:48:29.836065 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0413b693-383d-4291-bab3-447fd147e5ff-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0413b693-383d-4291-bab3-447fd147e5ff" (UID: "0413b693-383d-4291-bab3-447fd147e5ff"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:48:29 crc kubenswrapper[4971]: I1127 08:48:29.837086 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0413b693-383d-4291-bab3-447fd147e5ff-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "0413b693-383d-4291-bab3-447fd147e5ff" (UID: "0413b693-383d-4291-bab3-447fd147e5ff"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:48:29 crc kubenswrapper[4971]: I1127 08:48:29.872739 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0413b693-383d-4291-bab3-447fd147e5ff-config\") on node \"crc\" DevicePath \"\"" Nov 27 08:48:29 crc kubenswrapper[4971]: I1127 08:48:29.872797 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s5r75\" (UniqueName: \"kubernetes.io/projected/0413b693-383d-4291-bab3-447fd147e5ff-kube-api-access-s5r75\") on node \"crc\" DevicePath \"\"" Nov 27 08:48:29 crc kubenswrapper[4971]: I1127 08:48:29.872817 4971 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0413b693-383d-4291-bab3-447fd147e5ff-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 27 08:48:29 crc kubenswrapper[4971]: I1127 08:48:29.872846 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0413b693-383d-4291-bab3-447fd147e5ff-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 27 08:48:29 crc kubenswrapper[4971]: I1127 08:48:29.872859 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0413b693-383d-4291-bab3-447fd147e5ff-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 27 08:48:30 crc kubenswrapper[4971]: I1127 08:48:30.009238 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ccbc467bc-bh4qq"] Nov 27 08:48:30 crc kubenswrapper[4971]: I1127 08:48:30.020917 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5ccbc467bc-bh4qq"] Nov 27 08:48:30 crc kubenswrapper[4971]: I1127 08:48:30.562930 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0413b693-383d-4291-bab3-447fd147e5ff" path="/var/lib/kubelet/pods/0413b693-383d-4291-bab3-447fd147e5ff/volumes" Nov 27 08:48:31 crc kubenswrapper[4971]: I1127 08:48:31.690726 4971 generic.go:334] "Generic (PLEG): container finished" podID="6b83d4ed-92d4-41a2-9dff-071a1b6932ad" containerID="a3295fbfde623d2924ea84b3ba0a09ed9ce0017b32e5be386a1d7c05c0e6b051" exitCode=0 Nov 27 08:48:31 crc kubenswrapper[4971]: I1127 08:48:31.691201 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-tgqvj" event={"ID":"6b83d4ed-92d4-41a2-9dff-071a1b6932ad","Type":"ContainerDied","Data":"a3295fbfde623d2924ea84b3ba0a09ed9ce0017b32e5be386a1d7c05c0e6b051"} Nov 27 08:48:33 crc kubenswrapper[4971]: I1127 08:48:33.080266 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-tgqvj" Nov 27 08:48:33 crc kubenswrapper[4971]: I1127 08:48:33.230311 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b83d4ed-92d4-41a2-9dff-071a1b6932ad-combined-ca-bundle\") pod \"6b83d4ed-92d4-41a2-9dff-071a1b6932ad\" (UID: \"6b83d4ed-92d4-41a2-9dff-071a1b6932ad\") " Nov 27 08:48:33 crc kubenswrapper[4971]: I1127 08:48:33.230409 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6b83d4ed-92d4-41a2-9dff-071a1b6932ad-credential-keys\") pod \"6b83d4ed-92d4-41a2-9dff-071a1b6932ad\" (UID: \"6b83d4ed-92d4-41a2-9dff-071a1b6932ad\") " Nov 27 08:48:33 crc kubenswrapper[4971]: I1127 08:48:33.230441 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6b83d4ed-92d4-41a2-9dff-071a1b6932ad-fernet-keys\") pod \"6b83d4ed-92d4-41a2-9dff-071a1b6932ad\" (UID: \"6b83d4ed-92d4-41a2-9dff-071a1b6932ad\") " Nov 27 08:48:33 crc kubenswrapper[4971]: I1127 08:48:33.230476 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6pczf\" (UniqueName: \"kubernetes.io/projected/6b83d4ed-92d4-41a2-9dff-071a1b6932ad-kube-api-access-6pczf\") pod \"6b83d4ed-92d4-41a2-9dff-071a1b6932ad\" (UID: \"6b83d4ed-92d4-41a2-9dff-071a1b6932ad\") " Nov 27 08:48:33 crc kubenswrapper[4971]: I1127 08:48:33.230653 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b83d4ed-92d4-41a2-9dff-071a1b6932ad-scripts\") pod \"6b83d4ed-92d4-41a2-9dff-071a1b6932ad\" (UID: \"6b83d4ed-92d4-41a2-9dff-071a1b6932ad\") " Nov 27 08:48:33 crc kubenswrapper[4971]: I1127 08:48:33.230691 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b83d4ed-92d4-41a2-9dff-071a1b6932ad-config-data\") pod \"6b83d4ed-92d4-41a2-9dff-071a1b6932ad\" (UID: \"6b83d4ed-92d4-41a2-9dff-071a1b6932ad\") " Nov 27 08:48:33 crc kubenswrapper[4971]: I1127 08:48:33.237587 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b83d4ed-92d4-41a2-9dff-071a1b6932ad-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "6b83d4ed-92d4-41a2-9dff-071a1b6932ad" (UID: "6b83d4ed-92d4-41a2-9dff-071a1b6932ad"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:48:33 crc kubenswrapper[4971]: I1127 08:48:33.242871 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b83d4ed-92d4-41a2-9dff-071a1b6932ad-scripts" (OuterVolumeSpecName: "scripts") pod "6b83d4ed-92d4-41a2-9dff-071a1b6932ad" (UID: "6b83d4ed-92d4-41a2-9dff-071a1b6932ad"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:48:33 crc kubenswrapper[4971]: I1127 08:48:33.243621 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b83d4ed-92d4-41a2-9dff-071a1b6932ad-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "6b83d4ed-92d4-41a2-9dff-071a1b6932ad" (UID: "6b83d4ed-92d4-41a2-9dff-071a1b6932ad"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:48:33 crc kubenswrapper[4971]: I1127 08:48:33.244107 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b83d4ed-92d4-41a2-9dff-071a1b6932ad-kube-api-access-6pczf" (OuterVolumeSpecName: "kube-api-access-6pczf") pod "6b83d4ed-92d4-41a2-9dff-071a1b6932ad" (UID: "6b83d4ed-92d4-41a2-9dff-071a1b6932ad"). InnerVolumeSpecName "kube-api-access-6pczf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:48:33 crc kubenswrapper[4971]: E1127 08:48:33.253057 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6b83d4ed-92d4-41a2-9dff-071a1b6932ad-combined-ca-bundle podName:6b83d4ed-92d4-41a2-9dff-071a1b6932ad nodeName:}" failed. No retries permitted until 2025-11-27 08:48:33.753027064 +0000 UTC m=+6951.945070982 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/6b83d4ed-92d4-41a2-9dff-071a1b6932ad-combined-ca-bundle") pod "6b83d4ed-92d4-41a2-9dff-071a1b6932ad" (UID: "6b83d4ed-92d4-41a2-9dff-071a1b6932ad") : error deleting /var/lib/kubelet/pods/6b83d4ed-92d4-41a2-9dff-071a1b6932ad/volume-subpaths: remove /var/lib/kubelet/pods/6b83d4ed-92d4-41a2-9dff-071a1b6932ad/volume-subpaths: no such file or directory Nov 27 08:48:33 crc kubenswrapper[4971]: I1127 08:48:33.255805 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b83d4ed-92d4-41a2-9dff-071a1b6932ad-config-data" (OuterVolumeSpecName: "config-data") pod "6b83d4ed-92d4-41a2-9dff-071a1b6932ad" (UID: "6b83d4ed-92d4-41a2-9dff-071a1b6932ad"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:48:33 crc kubenswrapper[4971]: I1127 08:48:33.332149 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b83d4ed-92d4-41a2-9dff-071a1b6932ad-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 08:48:33 crc kubenswrapper[4971]: I1127 08:48:33.332194 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b83d4ed-92d4-41a2-9dff-071a1b6932ad-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 08:48:33 crc kubenswrapper[4971]: I1127 08:48:33.332203 4971 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6b83d4ed-92d4-41a2-9dff-071a1b6932ad-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 27 08:48:33 crc kubenswrapper[4971]: I1127 08:48:33.332212 4971 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6b83d4ed-92d4-41a2-9dff-071a1b6932ad-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 27 08:48:33 crc kubenswrapper[4971]: I1127 08:48:33.332221 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6pczf\" (UniqueName: \"kubernetes.io/projected/6b83d4ed-92d4-41a2-9dff-071a1b6932ad-kube-api-access-6pczf\") on node \"crc\" DevicePath \"\"" Nov 27 08:48:33 crc kubenswrapper[4971]: I1127 08:48:33.707433 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-tgqvj" event={"ID":"6b83d4ed-92d4-41a2-9dff-071a1b6932ad","Type":"ContainerDied","Data":"0cbaaecdf99010d955d401f7d91488f1b25e457d2058db605a064bb3e78a4fa2"} Nov 27 08:48:33 crc kubenswrapper[4971]: I1127 08:48:33.707472 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0cbaaecdf99010d955d401f7d91488f1b25e457d2058db605a064bb3e78a4fa2" Nov 27 08:48:33 crc kubenswrapper[4971]: I1127 08:48:33.707525 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-tgqvj" Nov 27 08:48:33 crc kubenswrapper[4971]: I1127 08:48:33.841507 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b83d4ed-92d4-41a2-9dff-071a1b6932ad-combined-ca-bundle\") pod \"6b83d4ed-92d4-41a2-9dff-071a1b6932ad\" (UID: \"6b83d4ed-92d4-41a2-9dff-071a1b6932ad\") " Nov 27 08:48:33 crc kubenswrapper[4971]: I1127 08:48:33.867761 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b83d4ed-92d4-41a2-9dff-071a1b6932ad-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6b83d4ed-92d4-41a2-9dff-071a1b6932ad" (UID: "6b83d4ed-92d4-41a2-9dff-071a1b6932ad"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:48:33 crc kubenswrapper[4971]: I1127 08:48:33.873576 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-6d6f88957b-tm5kw"] Nov 27 08:48:33 crc kubenswrapper[4971]: E1127 08:48:33.873923 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0413b693-383d-4291-bab3-447fd147e5ff" containerName="init" Nov 27 08:48:33 crc kubenswrapper[4971]: I1127 08:48:33.873940 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="0413b693-383d-4291-bab3-447fd147e5ff" containerName="init" Nov 27 08:48:33 crc kubenswrapper[4971]: E1127 08:48:33.873954 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b83d4ed-92d4-41a2-9dff-071a1b6932ad" containerName="keystone-bootstrap" Nov 27 08:48:33 crc kubenswrapper[4971]: I1127 08:48:33.873961 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b83d4ed-92d4-41a2-9dff-071a1b6932ad" containerName="keystone-bootstrap" Nov 27 08:48:33 crc kubenswrapper[4971]: E1127 08:48:33.873985 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0413b693-383d-4291-bab3-447fd147e5ff" containerName="dnsmasq-dns" Nov 27 08:48:33 crc kubenswrapper[4971]: I1127 08:48:33.873992 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="0413b693-383d-4291-bab3-447fd147e5ff" containerName="dnsmasq-dns" Nov 27 08:48:33 crc kubenswrapper[4971]: I1127 08:48:33.874161 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b83d4ed-92d4-41a2-9dff-071a1b6932ad" containerName="keystone-bootstrap" Nov 27 08:48:33 crc kubenswrapper[4971]: I1127 08:48:33.874183 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="0413b693-383d-4291-bab3-447fd147e5ff" containerName="dnsmasq-dns" Nov 27 08:48:33 crc kubenswrapper[4971]: I1127 08:48:33.874769 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6d6f88957b-tm5kw" Nov 27 08:48:33 crc kubenswrapper[4971]: I1127 08:48:33.885959 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6d6f88957b-tm5kw"] Nov 27 08:48:33 crc kubenswrapper[4971]: I1127 08:48:33.943926 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dd725c35-d118-42cd-b18e-52de53fd7fc0-fernet-keys\") pod \"keystone-6d6f88957b-tm5kw\" (UID: \"dd725c35-d118-42cd-b18e-52de53fd7fc0\") " pod="openstack/keystone-6d6f88957b-tm5kw" Nov 27 08:48:33 crc kubenswrapper[4971]: I1127 08:48:33.944038 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd725c35-d118-42cd-b18e-52de53fd7fc0-scripts\") pod \"keystone-6d6f88957b-tm5kw\" (UID: \"dd725c35-d118-42cd-b18e-52de53fd7fc0\") " pod="openstack/keystone-6d6f88957b-tm5kw" Nov 27 08:48:33 crc kubenswrapper[4971]: I1127 08:48:33.944085 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd725c35-d118-42cd-b18e-52de53fd7fc0-config-data\") pod \"keystone-6d6f88957b-tm5kw\" (UID: \"dd725c35-d118-42cd-b18e-52de53fd7fc0\") " pod="openstack/keystone-6d6f88957b-tm5kw" Nov 27 08:48:33 crc kubenswrapper[4971]: I1127 08:48:33.944138 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dd725c35-d118-42cd-b18e-52de53fd7fc0-credential-keys\") pod \"keystone-6d6f88957b-tm5kw\" (UID: \"dd725c35-d118-42cd-b18e-52de53fd7fc0\") " pod="openstack/keystone-6d6f88957b-tm5kw" Nov 27 08:48:33 crc kubenswrapper[4971]: I1127 08:48:33.944209 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd725c35-d118-42cd-b18e-52de53fd7fc0-combined-ca-bundle\") pod \"keystone-6d6f88957b-tm5kw\" (UID: \"dd725c35-d118-42cd-b18e-52de53fd7fc0\") " pod="openstack/keystone-6d6f88957b-tm5kw" Nov 27 08:48:33 crc kubenswrapper[4971]: I1127 08:48:33.944252 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4x9n\" (UniqueName: \"kubernetes.io/projected/dd725c35-d118-42cd-b18e-52de53fd7fc0-kube-api-access-f4x9n\") pod \"keystone-6d6f88957b-tm5kw\" (UID: \"dd725c35-d118-42cd-b18e-52de53fd7fc0\") " pod="openstack/keystone-6d6f88957b-tm5kw" Nov 27 08:48:33 crc kubenswrapper[4971]: I1127 08:48:33.944326 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b83d4ed-92d4-41a2-9dff-071a1b6932ad-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 08:48:34 crc kubenswrapper[4971]: I1127 08:48:34.045959 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dd725c35-d118-42cd-b18e-52de53fd7fc0-fernet-keys\") pod \"keystone-6d6f88957b-tm5kw\" (UID: \"dd725c35-d118-42cd-b18e-52de53fd7fc0\") " pod="openstack/keystone-6d6f88957b-tm5kw" Nov 27 08:48:34 crc kubenswrapper[4971]: I1127 08:48:34.046070 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd725c35-d118-42cd-b18e-52de53fd7fc0-scripts\") pod \"keystone-6d6f88957b-tm5kw\" (UID: \"dd725c35-d118-42cd-b18e-52de53fd7fc0\") " pod="openstack/keystone-6d6f88957b-tm5kw" Nov 27 08:48:34 crc kubenswrapper[4971]: I1127 08:48:34.046113 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd725c35-d118-42cd-b18e-52de53fd7fc0-config-data\") pod \"keystone-6d6f88957b-tm5kw\" (UID: \"dd725c35-d118-42cd-b18e-52de53fd7fc0\") " pod="openstack/keystone-6d6f88957b-tm5kw" Nov 27 08:48:34 crc kubenswrapper[4971]: I1127 08:48:34.046140 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dd725c35-d118-42cd-b18e-52de53fd7fc0-credential-keys\") pod \"keystone-6d6f88957b-tm5kw\" (UID: \"dd725c35-d118-42cd-b18e-52de53fd7fc0\") " pod="openstack/keystone-6d6f88957b-tm5kw" Nov 27 08:48:34 crc kubenswrapper[4971]: I1127 08:48:34.046199 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd725c35-d118-42cd-b18e-52de53fd7fc0-combined-ca-bundle\") pod \"keystone-6d6f88957b-tm5kw\" (UID: \"dd725c35-d118-42cd-b18e-52de53fd7fc0\") " pod="openstack/keystone-6d6f88957b-tm5kw" Nov 27 08:48:34 crc kubenswrapper[4971]: I1127 08:48:34.046235 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4x9n\" (UniqueName: \"kubernetes.io/projected/dd725c35-d118-42cd-b18e-52de53fd7fc0-kube-api-access-f4x9n\") pod \"keystone-6d6f88957b-tm5kw\" (UID: \"dd725c35-d118-42cd-b18e-52de53fd7fc0\") " pod="openstack/keystone-6d6f88957b-tm5kw" Nov 27 08:48:34 crc kubenswrapper[4971]: I1127 08:48:34.055651 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dd725c35-d118-42cd-b18e-52de53fd7fc0-fernet-keys\") pod \"keystone-6d6f88957b-tm5kw\" (UID: \"dd725c35-d118-42cd-b18e-52de53fd7fc0\") " pod="openstack/keystone-6d6f88957b-tm5kw" Nov 27 08:48:34 crc kubenswrapper[4971]: I1127 08:48:34.055755 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd725c35-d118-42cd-b18e-52de53fd7fc0-config-data\") pod \"keystone-6d6f88957b-tm5kw\" (UID: \"dd725c35-d118-42cd-b18e-52de53fd7fc0\") " pod="openstack/keystone-6d6f88957b-tm5kw" Nov 27 08:48:34 crc kubenswrapper[4971]: I1127 08:48:34.057181 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd725c35-d118-42cd-b18e-52de53fd7fc0-combined-ca-bundle\") pod \"keystone-6d6f88957b-tm5kw\" (UID: \"dd725c35-d118-42cd-b18e-52de53fd7fc0\") " pod="openstack/keystone-6d6f88957b-tm5kw" Nov 27 08:48:34 crc kubenswrapper[4971]: I1127 08:48:34.058469 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dd725c35-d118-42cd-b18e-52de53fd7fc0-credential-keys\") pod \"keystone-6d6f88957b-tm5kw\" (UID: \"dd725c35-d118-42cd-b18e-52de53fd7fc0\") " pod="openstack/keystone-6d6f88957b-tm5kw" Nov 27 08:48:34 crc kubenswrapper[4971]: I1127 08:48:34.059989 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd725c35-d118-42cd-b18e-52de53fd7fc0-scripts\") pod \"keystone-6d6f88957b-tm5kw\" (UID: \"dd725c35-d118-42cd-b18e-52de53fd7fc0\") " pod="openstack/keystone-6d6f88957b-tm5kw" Nov 27 08:48:34 crc kubenswrapper[4971]: I1127 08:48:34.073481 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4x9n\" (UniqueName: \"kubernetes.io/projected/dd725c35-d118-42cd-b18e-52de53fd7fc0-kube-api-access-f4x9n\") pod \"keystone-6d6f88957b-tm5kw\" (UID: \"dd725c35-d118-42cd-b18e-52de53fd7fc0\") " pod="openstack/keystone-6d6f88957b-tm5kw" Nov 27 08:48:34 crc kubenswrapper[4971]: I1127 08:48:34.216969 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6d6f88957b-tm5kw" Nov 27 08:48:34 crc kubenswrapper[4971]: I1127 08:48:34.684892 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6d6f88957b-tm5kw"] Nov 27 08:48:34 crc kubenswrapper[4971]: I1127 08:48:34.717619 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6d6f88957b-tm5kw" event={"ID":"dd725c35-d118-42cd-b18e-52de53fd7fc0","Type":"ContainerStarted","Data":"ccc109d47f5a23de9431982e8e9b10e976670a7c83d1e06bf7932f3aac6e44e5"} Nov 27 08:48:35 crc kubenswrapper[4971]: I1127 08:48:35.726673 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6d6f88957b-tm5kw" event={"ID":"dd725c35-d118-42cd-b18e-52de53fd7fc0","Type":"ContainerStarted","Data":"b4987c146d152b1801b24a3a4058129428a44149e7757d8b0b423d03cf86659a"} Nov 27 08:48:35 crc kubenswrapper[4971]: I1127 08:48:35.728327 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-6d6f88957b-tm5kw" Nov 27 08:48:35 crc kubenswrapper[4971]: I1127 08:48:35.748869 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-6d6f88957b-tm5kw" podStartSLOduration=2.74884987 podStartE2EDuration="2.74884987s" podCreationTimestamp="2025-11-27 08:48:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:48:35.743179397 +0000 UTC m=+6953.935223365" watchObservedRunningTime="2025-11-27 08:48:35.74884987 +0000 UTC m=+6953.940893788" Nov 27 08:49:05 crc kubenswrapper[4971]: I1127 08:49:05.815165 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-6d6f88957b-tm5kw" Nov 27 08:49:07 crc kubenswrapper[4971]: I1127 08:49:07.390840 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Nov 27 08:49:07 crc kubenswrapper[4971]: I1127 08:49:07.392786 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 27 08:49:07 crc kubenswrapper[4971]: I1127 08:49:07.395786 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Nov 27 08:49:07 crc kubenswrapper[4971]: I1127 08:49:07.395995 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-nfwkg" Nov 27 08:49:07 crc kubenswrapper[4971]: I1127 08:49:07.398464 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Nov 27 08:49:07 crc kubenswrapper[4971]: I1127 08:49:07.398880 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2-openstack-config\") pod \"openstackclient\" (UID: \"8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2\") " pod="openstack/openstackclient" Nov 27 08:49:07 crc kubenswrapper[4971]: I1127 08:49:07.398914 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2-openstack-config-secret\") pod \"openstackclient\" (UID: \"8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2\") " pod="openstack/openstackclient" Nov 27 08:49:07 crc kubenswrapper[4971]: I1127 08:49:07.398965 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxgpb\" (UniqueName: \"kubernetes.io/projected/8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2-kube-api-access-jxgpb\") pod \"openstackclient\" (UID: \"8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2\") " pod="openstack/openstackclient" Nov 27 08:49:07 crc kubenswrapper[4971]: I1127 08:49:07.402068 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Nov 27 08:49:07 crc kubenswrapper[4971]: I1127 08:49:07.501344 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxgpb\" (UniqueName: \"kubernetes.io/projected/8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2-kube-api-access-jxgpb\") pod \"openstackclient\" (UID: \"8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2\") " pod="openstack/openstackclient" Nov 27 08:49:07 crc kubenswrapper[4971]: I1127 08:49:07.501648 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2-openstack-config\") pod \"openstackclient\" (UID: \"8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2\") " pod="openstack/openstackclient" Nov 27 08:49:07 crc kubenswrapper[4971]: I1127 08:49:07.501713 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2-openstack-config-secret\") pod \"openstackclient\" (UID: \"8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2\") " pod="openstack/openstackclient" Nov 27 08:49:07 crc kubenswrapper[4971]: I1127 08:49:07.502759 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2-openstack-config\") pod \"openstackclient\" (UID: \"8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2\") " pod="openstack/openstackclient" Nov 27 08:49:07 crc kubenswrapper[4971]: I1127 08:49:07.509207 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2-openstack-config-secret\") pod \"openstackclient\" (UID: \"8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2\") " pod="openstack/openstackclient" Nov 27 08:49:07 crc kubenswrapper[4971]: I1127 08:49:07.521801 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxgpb\" (UniqueName: \"kubernetes.io/projected/8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2-kube-api-access-jxgpb\") pod \"openstackclient\" (UID: \"8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2\") " pod="openstack/openstackclient" Nov 27 08:49:07 crc kubenswrapper[4971]: I1127 08:49:07.720752 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 27 08:49:08 crc kubenswrapper[4971]: I1127 08:49:08.151903 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Nov 27 08:49:09 crc kubenswrapper[4971]: I1127 08:49:09.067144 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2","Type":"ContainerStarted","Data":"29dc4690b51792c34c2abbcddd0bdfb93d65851252a44e62ca4cc92a48baa33b"} Nov 27 08:49:19 crc kubenswrapper[4971]: I1127 08:49:19.155062 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2","Type":"ContainerStarted","Data":"3765b97474f278be19917650cfbfdf8fd192f74de6b098bd04bcc4bea85fbacf"} Nov 27 08:49:19 crc kubenswrapper[4971]: I1127 08:49:19.176340 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=1.555102266 podStartE2EDuration="12.176322197s" podCreationTimestamp="2025-11-27 08:49:07 +0000 UTC" firstStartedPulling="2025-11-27 08:49:08.16517537 +0000 UTC m=+6986.357219288" lastFinishedPulling="2025-11-27 08:49:18.786395301 +0000 UTC m=+6996.978439219" observedRunningTime="2025-11-27 08:49:19.170612093 +0000 UTC m=+6997.362656011" watchObservedRunningTime="2025-11-27 08:49:19.176322197 +0000 UTC m=+6997.368366115" Nov 27 08:49:26 crc kubenswrapper[4971]: I1127 08:49:26.413628 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 08:49:26 crc kubenswrapper[4971]: I1127 08:49:26.414431 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 08:49:56 crc kubenswrapper[4971]: I1127 08:49:56.413822 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 08:49:56 crc kubenswrapper[4971]: I1127 08:49:56.414720 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 08:50:26 crc kubenswrapper[4971]: I1127 08:50:26.413147 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 08:50:26 crc kubenswrapper[4971]: I1127 08:50:26.413916 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 08:50:26 crc kubenswrapper[4971]: I1127 08:50:26.413967 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 08:50:26 crc kubenswrapper[4971]: I1127 08:50:26.414732 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"503fed002d2daf941b71890d178a428a5c3cffb1b48a2221858dc92e28178dc4"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 08:50:26 crc kubenswrapper[4971]: I1127 08:50:26.414791 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://503fed002d2daf941b71890d178a428a5c3cffb1b48a2221858dc92e28178dc4" gracePeriod=600 Nov 27 08:50:26 crc kubenswrapper[4971]: E1127 08:50:26.547678 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:50:26 crc kubenswrapper[4971]: I1127 08:50:26.813603 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="503fed002d2daf941b71890d178a428a5c3cffb1b48a2221858dc92e28178dc4" exitCode=0 Nov 27 08:50:26 crc kubenswrapper[4971]: I1127 08:50:26.813671 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"503fed002d2daf941b71890d178a428a5c3cffb1b48a2221858dc92e28178dc4"} Nov 27 08:50:26 crc kubenswrapper[4971]: I1127 08:50:26.813722 4971 scope.go:117] "RemoveContainer" containerID="972a96022e7888e797ef56d94e28ce8ef6b71b2858abcc7c40439a752ca23142" Nov 27 08:50:26 crc kubenswrapper[4971]: I1127 08:50:26.814115 4971 scope.go:117] "RemoveContainer" containerID="503fed002d2daf941b71890d178a428a5c3cffb1b48a2221858dc92e28178dc4" Nov 27 08:50:26 crc kubenswrapper[4971]: E1127 08:50:26.814374 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:50:38 crc kubenswrapper[4971]: I1127 08:50:38.551161 4971 scope.go:117] "RemoveContainer" containerID="503fed002d2daf941b71890d178a428a5c3cffb1b48a2221858dc92e28178dc4" Nov 27 08:50:38 crc kubenswrapper[4971]: E1127 08:50:38.553477 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:50:45 crc kubenswrapper[4971]: I1127 08:50:45.366767 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5b4pp"] Nov 27 08:50:45 crc kubenswrapper[4971]: I1127 08:50:45.369206 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5b4pp" Nov 27 08:50:45 crc kubenswrapper[4971]: I1127 08:50:45.381720 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5b4pp"] Nov 27 08:50:45 crc kubenswrapper[4971]: I1127 08:50:45.424633 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58da1d43-b47b-48b0-9654-c43b137097cc-catalog-content\") pod \"redhat-operators-5b4pp\" (UID: \"58da1d43-b47b-48b0-9654-c43b137097cc\") " pod="openshift-marketplace/redhat-operators-5b4pp" Nov 27 08:50:45 crc kubenswrapper[4971]: I1127 08:50:45.424728 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58da1d43-b47b-48b0-9654-c43b137097cc-utilities\") pod \"redhat-operators-5b4pp\" (UID: \"58da1d43-b47b-48b0-9654-c43b137097cc\") " pod="openshift-marketplace/redhat-operators-5b4pp" Nov 27 08:50:45 crc kubenswrapper[4971]: I1127 08:50:45.424780 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ljd7z\" (UniqueName: \"kubernetes.io/projected/58da1d43-b47b-48b0-9654-c43b137097cc-kube-api-access-ljd7z\") pod \"redhat-operators-5b4pp\" (UID: \"58da1d43-b47b-48b0-9654-c43b137097cc\") " pod="openshift-marketplace/redhat-operators-5b4pp" Nov 27 08:50:45 crc kubenswrapper[4971]: I1127 08:50:45.526814 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ljd7z\" (UniqueName: \"kubernetes.io/projected/58da1d43-b47b-48b0-9654-c43b137097cc-kube-api-access-ljd7z\") pod \"redhat-operators-5b4pp\" (UID: \"58da1d43-b47b-48b0-9654-c43b137097cc\") " pod="openshift-marketplace/redhat-operators-5b4pp" Nov 27 08:50:45 crc kubenswrapper[4971]: I1127 08:50:45.527001 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58da1d43-b47b-48b0-9654-c43b137097cc-catalog-content\") pod \"redhat-operators-5b4pp\" (UID: \"58da1d43-b47b-48b0-9654-c43b137097cc\") " pod="openshift-marketplace/redhat-operators-5b4pp" Nov 27 08:50:45 crc kubenswrapper[4971]: I1127 08:50:45.527062 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58da1d43-b47b-48b0-9654-c43b137097cc-utilities\") pod \"redhat-operators-5b4pp\" (UID: \"58da1d43-b47b-48b0-9654-c43b137097cc\") " pod="openshift-marketplace/redhat-operators-5b4pp" Nov 27 08:50:45 crc kubenswrapper[4971]: I1127 08:50:45.527684 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58da1d43-b47b-48b0-9654-c43b137097cc-catalog-content\") pod \"redhat-operators-5b4pp\" (UID: \"58da1d43-b47b-48b0-9654-c43b137097cc\") " pod="openshift-marketplace/redhat-operators-5b4pp" Nov 27 08:50:45 crc kubenswrapper[4971]: I1127 08:50:45.527876 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58da1d43-b47b-48b0-9654-c43b137097cc-utilities\") pod \"redhat-operators-5b4pp\" (UID: \"58da1d43-b47b-48b0-9654-c43b137097cc\") " pod="openshift-marketplace/redhat-operators-5b4pp" Nov 27 08:50:45 crc kubenswrapper[4971]: I1127 08:50:45.559027 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ljd7z\" (UniqueName: \"kubernetes.io/projected/58da1d43-b47b-48b0-9654-c43b137097cc-kube-api-access-ljd7z\") pod \"redhat-operators-5b4pp\" (UID: \"58da1d43-b47b-48b0-9654-c43b137097cc\") " pod="openshift-marketplace/redhat-operators-5b4pp" Nov 27 08:50:45 crc kubenswrapper[4971]: I1127 08:50:45.686971 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5b4pp" Nov 27 08:50:46 crc kubenswrapper[4971]: I1127 08:50:46.254510 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5b4pp"] Nov 27 08:50:47 crc kubenswrapper[4971]: I1127 08:50:47.035041 4971 generic.go:334] "Generic (PLEG): container finished" podID="58da1d43-b47b-48b0-9654-c43b137097cc" containerID="7005e5bdd4c8ee1daaa127b29f78f8b42aef210e835fe9463da723bb32663719" exitCode=0 Nov 27 08:50:47 crc kubenswrapper[4971]: I1127 08:50:47.035122 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5b4pp" event={"ID":"58da1d43-b47b-48b0-9654-c43b137097cc","Type":"ContainerDied","Data":"7005e5bdd4c8ee1daaa127b29f78f8b42aef210e835fe9463da723bb32663719"} Nov 27 08:50:47 crc kubenswrapper[4971]: I1127 08:50:47.035464 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5b4pp" event={"ID":"58da1d43-b47b-48b0-9654-c43b137097cc","Type":"ContainerStarted","Data":"1740914675e3577ca4a0de4ead50f809e693c6740c573c03c3dd5b3aa5f71bf1"} Nov 27 08:50:49 crc kubenswrapper[4971]: I1127 08:50:49.055284 4971 generic.go:334] "Generic (PLEG): container finished" podID="58da1d43-b47b-48b0-9654-c43b137097cc" containerID="21efbf4c72c104abcaa228d584faf1c91e23c377adb073226050246d080f9150" exitCode=0 Nov 27 08:50:49 crc kubenswrapper[4971]: I1127 08:50:49.055408 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5b4pp" event={"ID":"58da1d43-b47b-48b0-9654-c43b137097cc","Type":"ContainerDied","Data":"21efbf4c72c104abcaa228d584faf1c91e23c377adb073226050246d080f9150"} Nov 27 08:50:49 crc kubenswrapper[4971]: I1127 08:50:49.533430 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-dc82-account-create-update-9kklc"] Nov 27 08:50:49 crc kubenswrapper[4971]: I1127 08:50:49.535197 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-dc82-account-create-update-9kklc" Nov 27 08:50:49 crc kubenswrapper[4971]: I1127 08:50:49.539782 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Nov 27 08:50:49 crc kubenswrapper[4971]: I1127 08:50:49.540083 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-jszcg"] Nov 27 08:50:49 crc kubenswrapper[4971]: I1127 08:50:49.541252 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-jszcg" Nov 27 08:50:49 crc kubenswrapper[4971]: I1127 08:50:49.554360 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-dc82-account-create-update-9kklc"] Nov 27 08:50:49 crc kubenswrapper[4971]: I1127 08:50:49.567709 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-jszcg"] Nov 27 08:50:49 crc kubenswrapper[4971]: I1127 08:50:49.710999 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5edbdbbc-7378-431c-aa48-e3f1c19ef1af-operator-scripts\") pod \"barbican-dc82-account-create-update-9kklc\" (UID: \"5edbdbbc-7378-431c-aa48-e3f1c19ef1af\") " pod="openstack/barbican-dc82-account-create-update-9kklc" Nov 27 08:50:49 crc kubenswrapper[4971]: I1127 08:50:49.711503 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ds68t\" (UniqueName: \"kubernetes.io/projected/5edbdbbc-7378-431c-aa48-e3f1c19ef1af-kube-api-access-ds68t\") pod \"barbican-dc82-account-create-update-9kklc\" (UID: \"5edbdbbc-7378-431c-aa48-e3f1c19ef1af\") " pod="openstack/barbican-dc82-account-create-update-9kklc" Nov 27 08:50:49 crc kubenswrapper[4971]: I1127 08:50:49.711698 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tbksq\" (UniqueName: \"kubernetes.io/projected/e966785d-966d-479c-8bff-9ed9214a5162-kube-api-access-tbksq\") pod \"barbican-db-create-jszcg\" (UID: \"e966785d-966d-479c-8bff-9ed9214a5162\") " pod="openstack/barbican-db-create-jszcg" Nov 27 08:50:49 crc kubenswrapper[4971]: I1127 08:50:49.711880 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e966785d-966d-479c-8bff-9ed9214a5162-operator-scripts\") pod \"barbican-db-create-jszcg\" (UID: \"e966785d-966d-479c-8bff-9ed9214a5162\") " pod="openstack/barbican-db-create-jszcg" Nov 27 08:50:49 crc kubenswrapper[4971]: I1127 08:50:49.813924 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e966785d-966d-479c-8bff-9ed9214a5162-operator-scripts\") pod \"barbican-db-create-jszcg\" (UID: \"e966785d-966d-479c-8bff-9ed9214a5162\") " pod="openstack/barbican-db-create-jszcg" Nov 27 08:50:49 crc kubenswrapper[4971]: I1127 08:50:49.814034 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5edbdbbc-7378-431c-aa48-e3f1c19ef1af-operator-scripts\") pod \"barbican-dc82-account-create-update-9kklc\" (UID: \"5edbdbbc-7378-431c-aa48-e3f1c19ef1af\") " pod="openstack/barbican-dc82-account-create-update-9kklc" Nov 27 08:50:49 crc kubenswrapper[4971]: I1127 08:50:49.814098 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ds68t\" (UniqueName: \"kubernetes.io/projected/5edbdbbc-7378-431c-aa48-e3f1c19ef1af-kube-api-access-ds68t\") pod \"barbican-dc82-account-create-update-9kklc\" (UID: \"5edbdbbc-7378-431c-aa48-e3f1c19ef1af\") " pod="openstack/barbican-dc82-account-create-update-9kklc" Nov 27 08:50:49 crc kubenswrapper[4971]: I1127 08:50:49.814146 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tbksq\" (UniqueName: \"kubernetes.io/projected/e966785d-966d-479c-8bff-9ed9214a5162-kube-api-access-tbksq\") pod \"barbican-db-create-jszcg\" (UID: \"e966785d-966d-479c-8bff-9ed9214a5162\") " pod="openstack/barbican-db-create-jszcg" Nov 27 08:50:49 crc kubenswrapper[4971]: I1127 08:50:49.814978 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5edbdbbc-7378-431c-aa48-e3f1c19ef1af-operator-scripts\") pod \"barbican-dc82-account-create-update-9kklc\" (UID: \"5edbdbbc-7378-431c-aa48-e3f1c19ef1af\") " pod="openstack/barbican-dc82-account-create-update-9kklc" Nov 27 08:50:49 crc kubenswrapper[4971]: I1127 08:50:49.815180 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e966785d-966d-479c-8bff-9ed9214a5162-operator-scripts\") pod \"barbican-db-create-jszcg\" (UID: \"e966785d-966d-479c-8bff-9ed9214a5162\") " pod="openstack/barbican-db-create-jszcg" Nov 27 08:50:49 crc kubenswrapper[4971]: I1127 08:50:49.839203 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tbksq\" (UniqueName: \"kubernetes.io/projected/e966785d-966d-479c-8bff-9ed9214a5162-kube-api-access-tbksq\") pod \"barbican-db-create-jszcg\" (UID: \"e966785d-966d-479c-8bff-9ed9214a5162\") " pod="openstack/barbican-db-create-jszcg" Nov 27 08:50:49 crc kubenswrapper[4971]: I1127 08:50:49.840107 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ds68t\" (UniqueName: \"kubernetes.io/projected/5edbdbbc-7378-431c-aa48-e3f1c19ef1af-kube-api-access-ds68t\") pod \"barbican-dc82-account-create-update-9kklc\" (UID: \"5edbdbbc-7378-431c-aa48-e3f1c19ef1af\") " pod="openstack/barbican-dc82-account-create-update-9kklc" Nov 27 08:50:49 crc kubenswrapper[4971]: I1127 08:50:49.860165 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-dc82-account-create-update-9kklc" Nov 27 08:50:49 crc kubenswrapper[4971]: I1127 08:50:49.870124 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-jszcg" Nov 27 08:50:50 crc kubenswrapper[4971]: I1127 08:50:50.065576 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5b4pp" event={"ID":"58da1d43-b47b-48b0-9654-c43b137097cc","Type":"ContainerStarted","Data":"8008e6d72bb57ba00cb5ab8f038e11d284744c992f6848b46d2a29e20f3683d8"} Nov 27 08:50:50 crc kubenswrapper[4971]: I1127 08:50:50.116267 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5b4pp" podStartSLOduration=2.627265251 podStartE2EDuration="5.116237981s" podCreationTimestamp="2025-11-27 08:50:45 +0000 UTC" firstStartedPulling="2025-11-27 08:50:47.036789073 +0000 UTC m=+7085.228832991" lastFinishedPulling="2025-11-27 08:50:49.525761803 +0000 UTC m=+7087.717805721" observedRunningTime="2025-11-27 08:50:50.105017559 +0000 UTC m=+7088.297061477" watchObservedRunningTime="2025-11-27 08:50:50.116237981 +0000 UTC m=+7088.308281899" Nov 27 08:50:50 crc kubenswrapper[4971]: I1127 08:50:50.437326 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-dc82-account-create-update-9kklc"] Nov 27 08:50:50 crc kubenswrapper[4971]: I1127 08:50:50.570527 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-jszcg"] Nov 27 08:50:50 crc kubenswrapper[4971]: W1127 08:50:50.576291 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode966785d_966d_479c_8bff_9ed9214a5162.slice/crio-c2ccfc9a5dfe7b6e6f74097d5a703635b6b78bf0dfe0f6c63224d9a630fe296d WatchSource:0}: Error finding container c2ccfc9a5dfe7b6e6f74097d5a703635b6b78bf0dfe0f6c63224d9a630fe296d: Status 404 returned error can't find the container with id c2ccfc9a5dfe7b6e6f74097d5a703635b6b78bf0dfe0f6c63224d9a630fe296d Nov 27 08:50:51 crc kubenswrapper[4971]: I1127 08:50:51.076061 4971 generic.go:334] "Generic (PLEG): container finished" podID="5edbdbbc-7378-431c-aa48-e3f1c19ef1af" containerID="9da6c734226223aace17cf0a409c888b16889ef45e1f5ca74431516f4e0282f4" exitCode=0 Nov 27 08:50:51 crc kubenswrapper[4971]: I1127 08:50:51.076156 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-dc82-account-create-update-9kklc" event={"ID":"5edbdbbc-7378-431c-aa48-e3f1c19ef1af","Type":"ContainerDied","Data":"9da6c734226223aace17cf0a409c888b16889ef45e1f5ca74431516f4e0282f4"} Nov 27 08:50:51 crc kubenswrapper[4971]: I1127 08:50:51.076196 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-dc82-account-create-update-9kklc" event={"ID":"5edbdbbc-7378-431c-aa48-e3f1c19ef1af","Type":"ContainerStarted","Data":"0b2dfc92ed1b2d139e35af8d85e80d045e81211cbdcf55b87a7bd49d5b5bb127"} Nov 27 08:50:51 crc kubenswrapper[4971]: I1127 08:50:51.078449 4971 generic.go:334] "Generic (PLEG): container finished" podID="e966785d-966d-479c-8bff-9ed9214a5162" containerID="43de497582f93c27b22667ff78779b7acd0e3dee68a6f43624fe5880dc2e2a1c" exitCode=0 Nov 27 08:50:51 crc kubenswrapper[4971]: I1127 08:50:51.078524 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-jszcg" event={"ID":"e966785d-966d-479c-8bff-9ed9214a5162","Type":"ContainerDied","Data":"43de497582f93c27b22667ff78779b7acd0e3dee68a6f43624fe5880dc2e2a1c"} Nov 27 08:50:51 crc kubenswrapper[4971]: I1127 08:50:51.078582 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-jszcg" event={"ID":"e966785d-966d-479c-8bff-9ed9214a5162","Type":"ContainerStarted","Data":"c2ccfc9a5dfe7b6e6f74097d5a703635b6b78bf0dfe0f6c63224d9a630fe296d"} Nov 27 08:50:52 crc kubenswrapper[4971]: I1127 08:50:52.538865 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-dc82-account-create-update-9kklc" Nov 27 08:50:52 crc kubenswrapper[4971]: I1127 08:50:52.547082 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-jszcg" Nov 27 08:50:52 crc kubenswrapper[4971]: I1127 08:50:52.665690 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tbksq\" (UniqueName: \"kubernetes.io/projected/e966785d-966d-479c-8bff-9ed9214a5162-kube-api-access-tbksq\") pod \"e966785d-966d-479c-8bff-9ed9214a5162\" (UID: \"e966785d-966d-479c-8bff-9ed9214a5162\") " Nov 27 08:50:52 crc kubenswrapper[4971]: I1127 08:50:52.666299 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ds68t\" (UniqueName: \"kubernetes.io/projected/5edbdbbc-7378-431c-aa48-e3f1c19ef1af-kube-api-access-ds68t\") pod \"5edbdbbc-7378-431c-aa48-e3f1c19ef1af\" (UID: \"5edbdbbc-7378-431c-aa48-e3f1c19ef1af\") " Nov 27 08:50:52 crc kubenswrapper[4971]: I1127 08:50:52.666437 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e966785d-966d-479c-8bff-9ed9214a5162-operator-scripts\") pod \"e966785d-966d-479c-8bff-9ed9214a5162\" (UID: \"e966785d-966d-479c-8bff-9ed9214a5162\") " Nov 27 08:50:52 crc kubenswrapper[4971]: I1127 08:50:52.666473 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5edbdbbc-7378-431c-aa48-e3f1c19ef1af-operator-scripts\") pod \"5edbdbbc-7378-431c-aa48-e3f1c19ef1af\" (UID: \"5edbdbbc-7378-431c-aa48-e3f1c19ef1af\") " Nov 27 08:50:52 crc kubenswrapper[4971]: I1127 08:50:52.667390 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e966785d-966d-479c-8bff-9ed9214a5162-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e966785d-966d-479c-8bff-9ed9214a5162" (UID: "e966785d-966d-479c-8bff-9ed9214a5162"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:50:52 crc kubenswrapper[4971]: I1127 08:50:52.667516 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5edbdbbc-7378-431c-aa48-e3f1c19ef1af-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5edbdbbc-7378-431c-aa48-e3f1c19ef1af" (UID: "5edbdbbc-7378-431c-aa48-e3f1c19ef1af"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:50:52 crc kubenswrapper[4971]: I1127 08:50:52.673089 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e966785d-966d-479c-8bff-9ed9214a5162-kube-api-access-tbksq" (OuterVolumeSpecName: "kube-api-access-tbksq") pod "e966785d-966d-479c-8bff-9ed9214a5162" (UID: "e966785d-966d-479c-8bff-9ed9214a5162"). InnerVolumeSpecName "kube-api-access-tbksq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:50:52 crc kubenswrapper[4971]: I1127 08:50:52.674855 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5edbdbbc-7378-431c-aa48-e3f1c19ef1af-kube-api-access-ds68t" (OuterVolumeSpecName: "kube-api-access-ds68t") pod "5edbdbbc-7378-431c-aa48-e3f1c19ef1af" (UID: "5edbdbbc-7378-431c-aa48-e3f1c19ef1af"). InnerVolumeSpecName "kube-api-access-ds68t". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:50:52 crc kubenswrapper[4971]: I1127 08:50:52.769300 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tbksq\" (UniqueName: \"kubernetes.io/projected/e966785d-966d-479c-8bff-9ed9214a5162-kube-api-access-tbksq\") on node \"crc\" DevicePath \"\"" Nov 27 08:50:52 crc kubenswrapper[4971]: I1127 08:50:52.769341 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ds68t\" (UniqueName: \"kubernetes.io/projected/5edbdbbc-7378-431c-aa48-e3f1c19ef1af-kube-api-access-ds68t\") on node \"crc\" DevicePath \"\"" Nov 27 08:50:52 crc kubenswrapper[4971]: I1127 08:50:52.769353 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e966785d-966d-479c-8bff-9ed9214a5162-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 08:50:52 crc kubenswrapper[4971]: I1127 08:50:52.769364 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5edbdbbc-7378-431c-aa48-e3f1c19ef1af-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 08:50:53 crc kubenswrapper[4971]: I1127 08:50:53.100498 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-jszcg" event={"ID":"e966785d-966d-479c-8bff-9ed9214a5162","Type":"ContainerDied","Data":"c2ccfc9a5dfe7b6e6f74097d5a703635b6b78bf0dfe0f6c63224d9a630fe296d"} Nov 27 08:50:53 crc kubenswrapper[4971]: I1127 08:50:53.100569 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c2ccfc9a5dfe7b6e6f74097d5a703635b6b78bf0dfe0f6c63224d9a630fe296d" Nov 27 08:50:53 crc kubenswrapper[4971]: I1127 08:50:53.100585 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-jszcg" Nov 27 08:50:53 crc kubenswrapper[4971]: I1127 08:50:53.102780 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-dc82-account-create-update-9kklc" event={"ID":"5edbdbbc-7378-431c-aa48-e3f1c19ef1af","Type":"ContainerDied","Data":"0b2dfc92ed1b2d139e35af8d85e80d045e81211cbdcf55b87a7bd49d5b5bb127"} Nov 27 08:50:53 crc kubenswrapper[4971]: I1127 08:50:53.102836 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0b2dfc92ed1b2d139e35af8d85e80d045e81211cbdcf55b87a7bd49d5b5bb127" Nov 27 08:50:53 crc kubenswrapper[4971]: I1127 08:50:53.102843 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-dc82-account-create-update-9kklc" Nov 27 08:50:53 crc kubenswrapper[4971]: I1127 08:50:53.608618 4971 scope.go:117] "RemoveContainer" containerID="503fed002d2daf941b71890d178a428a5c3cffb1b48a2221858dc92e28178dc4" Nov 27 08:50:53 crc kubenswrapper[4971]: E1127 08:50:53.608906 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:50:54 crc kubenswrapper[4971]: I1127 08:50:54.946384 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-s8sgc"] Nov 27 08:50:54 crc kubenswrapper[4971]: E1127 08:50:54.947832 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5edbdbbc-7378-431c-aa48-e3f1c19ef1af" containerName="mariadb-account-create-update" Nov 27 08:50:54 crc kubenswrapper[4971]: I1127 08:50:54.947849 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="5edbdbbc-7378-431c-aa48-e3f1c19ef1af" containerName="mariadb-account-create-update" Nov 27 08:50:54 crc kubenswrapper[4971]: E1127 08:50:54.947893 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e966785d-966d-479c-8bff-9ed9214a5162" containerName="mariadb-database-create" Nov 27 08:50:54 crc kubenswrapper[4971]: I1127 08:50:54.947901 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="e966785d-966d-479c-8bff-9ed9214a5162" containerName="mariadb-database-create" Nov 27 08:50:54 crc kubenswrapper[4971]: I1127 08:50:54.948119 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="e966785d-966d-479c-8bff-9ed9214a5162" containerName="mariadb-database-create" Nov 27 08:50:54 crc kubenswrapper[4971]: I1127 08:50:54.948165 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="5edbdbbc-7378-431c-aa48-e3f1c19ef1af" containerName="mariadb-account-create-update" Nov 27 08:50:54 crc kubenswrapper[4971]: I1127 08:50:54.949093 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-s8sgc" Nov 27 08:50:54 crc kubenswrapper[4971]: I1127 08:50:54.954747 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-s8sgc"] Nov 27 08:50:54 crc kubenswrapper[4971]: I1127 08:50:54.960399 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-wgl77" Nov 27 08:50:54 crc kubenswrapper[4971]: I1127 08:50:54.961052 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Nov 27 08:50:55 crc kubenswrapper[4971]: I1127 08:50:55.037146 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mbntt\" (UniqueName: \"kubernetes.io/projected/7b89043e-9696-49f5-900b-5ea4c74851a6-kube-api-access-mbntt\") pod \"barbican-db-sync-s8sgc\" (UID: \"7b89043e-9696-49f5-900b-5ea4c74851a6\") " pod="openstack/barbican-db-sync-s8sgc" Nov 27 08:50:55 crc kubenswrapper[4971]: I1127 08:50:55.037342 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7b89043e-9696-49f5-900b-5ea4c74851a6-db-sync-config-data\") pod \"barbican-db-sync-s8sgc\" (UID: \"7b89043e-9696-49f5-900b-5ea4c74851a6\") " pod="openstack/barbican-db-sync-s8sgc" Nov 27 08:50:55 crc kubenswrapper[4971]: I1127 08:50:55.037420 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b89043e-9696-49f5-900b-5ea4c74851a6-combined-ca-bundle\") pod \"barbican-db-sync-s8sgc\" (UID: \"7b89043e-9696-49f5-900b-5ea4c74851a6\") " pod="openstack/barbican-db-sync-s8sgc" Nov 27 08:50:55 crc kubenswrapper[4971]: I1127 08:50:55.139136 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7b89043e-9696-49f5-900b-5ea4c74851a6-db-sync-config-data\") pod \"barbican-db-sync-s8sgc\" (UID: \"7b89043e-9696-49f5-900b-5ea4c74851a6\") " pod="openstack/barbican-db-sync-s8sgc" Nov 27 08:50:55 crc kubenswrapper[4971]: I1127 08:50:55.139257 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b89043e-9696-49f5-900b-5ea4c74851a6-combined-ca-bundle\") pod \"barbican-db-sync-s8sgc\" (UID: \"7b89043e-9696-49f5-900b-5ea4c74851a6\") " pod="openstack/barbican-db-sync-s8sgc" Nov 27 08:50:55 crc kubenswrapper[4971]: I1127 08:50:55.139345 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mbntt\" (UniqueName: \"kubernetes.io/projected/7b89043e-9696-49f5-900b-5ea4c74851a6-kube-api-access-mbntt\") pod \"barbican-db-sync-s8sgc\" (UID: \"7b89043e-9696-49f5-900b-5ea4c74851a6\") " pod="openstack/barbican-db-sync-s8sgc" Nov 27 08:50:55 crc kubenswrapper[4971]: I1127 08:50:55.148285 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7b89043e-9696-49f5-900b-5ea4c74851a6-db-sync-config-data\") pod \"barbican-db-sync-s8sgc\" (UID: \"7b89043e-9696-49f5-900b-5ea4c74851a6\") " pod="openstack/barbican-db-sync-s8sgc" Nov 27 08:50:55 crc kubenswrapper[4971]: I1127 08:50:55.151611 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b89043e-9696-49f5-900b-5ea4c74851a6-combined-ca-bundle\") pod \"barbican-db-sync-s8sgc\" (UID: \"7b89043e-9696-49f5-900b-5ea4c74851a6\") " pod="openstack/barbican-db-sync-s8sgc" Nov 27 08:50:55 crc kubenswrapper[4971]: I1127 08:50:55.156838 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mbntt\" (UniqueName: \"kubernetes.io/projected/7b89043e-9696-49f5-900b-5ea4c74851a6-kube-api-access-mbntt\") pod \"barbican-db-sync-s8sgc\" (UID: \"7b89043e-9696-49f5-900b-5ea4c74851a6\") " pod="openstack/barbican-db-sync-s8sgc" Nov 27 08:50:55 crc kubenswrapper[4971]: I1127 08:50:55.312243 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-s8sgc" Nov 27 08:50:55 crc kubenswrapper[4971]: I1127 08:50:55.688860 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5b4pp" Nov 27 08:50:55 crc kubenswrapper[4971]: I1127 08:50:55.689683 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5b4pp" Nov 27 08:50:55 crc kubenswrapper[4971]: I1127 08:50:55.740637 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-5b4pp" Nov 27 08:50:55 crc kubenswrapper[4971]: I1127 08:50:55.856472 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-s8sgc"] Nov 27 08:50:56 crc kubenswrapper[4971]: I1127 08:50:56.138964 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-s8sgc" event={"ID":"7b89043e-9696-49f5-900b-5ea4c74851a6","Type":"ContainerStarted","Data":"c5e0e7e4c4fe9c3c556bdf8ca2e35a5baeb4afc5966bcdc3546a7a81dfd54f5a"} Nov 27 08:50:56 crc kubenswrapper[4971]: I1127 08:50:56.190067 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-5b4pp" Nov 27 08:50:57 crc kubenswrapper[4971]: I1127 08:50:57.356037 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5b4pp"] Nov 27 08:50:59 crc kubenswrapper[4971]: I1127 08:50:59.167206 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-5b4pp" podUID="58da1d43-b47b-48b0-9654-c43b137097cc" containerName="registry-server" containerID="cri-o://8008e6d72bb57ba00cb5ab8f038e11d284744c992f6848b46d2a29e20f3683d8" gracePeriod=2 Nov 27 08:51:00 crc kubenswrapper[4971]: I1127 08:51:00.182554 4971 generic.go:334] "Generic (PLEG): container finished" podID="58da1d43-b47b-48b0-9654-c43b137097cc" containerID="8008e6d72bb57ba00cb5ab8f038e11d284744c992f6848b46d2a29e20f3683d8" exitCode=0 Nov 27 08:51:00 crc kubenswrapper[4971]: I1127 08:51:00.182615 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5b4pp" event={"ID":"58da1d43-b47b-48b0-9654-c43b137097cc","Type":"ContainerDied","Data":"8008e6d72bb57ba00cb5ab8f038e11d284744c992f6848b46d2a29e20f3683d8"} Nov 27 08:51:00 crc kubenswrapper[4971]: I1127 08:51:00.968832 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5b4pp" Nov 27 08:51:01 crc kubenswrapper[4971]: I1127 08:51:01.068928 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58da1d43-b47b-48b0-9654-c43b137097cc-utilities\") pod \"58da1d43-b47b-48b0-9654-c43b137097cc\" (UID: \"58da1d43-b47b-48b0-9654-c43b137097cc\") " Nov 27 08:51:01 crc kubenswrapper[4971]: I1127 08:51:01.069048 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ljd7z\" (UniqueName: \"kubernetes.io/projected/58da1d43-b47b-48b0-9654-c43b137097cc-kube-api-access-ljd7z\") pod \"58da1d43-b47b-48b0-9654-c43b137097cc\" (UID: \"58da1d43-b47b-48b0-9654-c43b137097cc\") " Nov 27 08:51:01 crc kubenswrapper[4971]: I1127 08:51:01.069168 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58da1d43-b47b-48b0-9654-c43b137097cc-catalog-content\") pod \"58da1d43-b47b-48b0-9654-c43b137097cc\" (UID: \"58da1d43-b47b-48b0-9654-c43b137097cc\") " Nov 27 08:51:01 crc kubenswrapper[4971]: I1127 08:51:01.070596 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/58da1d43-b47b-48b0-9654-c43b137097cc-utilities" (OuterVolumeSpecName: "utilities") pod "58da1d43-b47b-48b0-9654-c43b137097cc" (UID: "58da1d43-b47b-48b0-9654-c43b137097cc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:51:01 crc kubenswrapper[4971]: I1127 08:51:01.074928 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58da1d43-b47b-48b0-9654-c43b137097cc-kube-api-access-ljd7z" (OuterVolumeSpecName: "kube-api-access-ljd7z") pod "58da1d43-b47b-48b0-9654-c43b137097cc" (UID: "58da1d43-b47b-48b0-9654-c43b137097cc"). InnerVolumeSpecName "kube-api-access-ljd7z". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:51:01 crc kubenswrapper[4971]: I1127 08:51:01.160019 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/58da1d43-b47b-48b0-9654-c43b137097cc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "58da1d43-b47b-48b0-9654-c43b137097cc" (UID: "58da1d43-b47b-48b0-9654-c43b137097cc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:51:01 crc kubenswrapper[4971]: I1127 08:51:01.170948 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58da1d43-b47b-48b0-9654-c43b137097cc-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 08:51:01 crc kubenswrapper[4971]: I1127 08:51:01.170986 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ljd7z\" (UniqueName: \"kubernetes.io/projected/58da1d43-b47b-48b0-9654-c43b137097cc-kube-api-access-ljd7z\") on node \"crc\" DevicePath \"\"" Nov 27 08:51:01 crc kubenswrapper[4971]: I1127 08:51:01.170997 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58da1d43-b47b-48b0-9654-c43b137097cc-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 08:51:01 crc kubenswrapper[4971]: I1127 08:51:01.200358 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5b4pp" event={"ID":"58da1d43-b47b-48b0-9654-c43b137097cc","Type":"ContainerDied","Data":"1740914675e3577ca4a0de4ead50f809e693c6740c573c03c3dd5b3aa5f71bf1"} Nov 27 08:51:01 crc kubenswrapper[4971]: I1127 08:51:01.200410 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5b4pp" Nov 27 08:51:01 crc kubenswrapper[4971]: I1127 08:51:01.200450 4971 scope.go:117] "RemoveContainer" containerID="8008e6d72bb57ba00cb5ab8f038e11d284744c992f6848b46d2a29e20f3683d8" Nov 27 08:51:01 crc kubenswrapper[4971]: I1127 08:51:01.203683 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-s8sgc" event={"ID":"7b89043e-9696-49f5-900b-5ea4c74851a6","Type":"ContainerStarted","Data":"bcc668e4727c4c3eb7c94152d8196bededd688fb51f88a1551267cb0de848fe9"} Nov 27 08:51:01 crc kubenswrapper[4971]: I1127 08:51:01.238949 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-s8sgc" podStartSLOduration=2.433648844 podStartE2EDuration="7.23891371s" podCreationTimestamp="2025-11-27 08:50:54 +0000 UTC" firstStartedPulling="2025-11-27 08:50:55.858409743 +0000 UTC m=+7094.050453671" lastFinishedPulling="2025-11-27 08:51:00.663674629 +0000 UTC m=+7098.855718537" observedRunningTime="2025-11-27 08:51:01.222696986 +0000 UTC m=+7099.414740904" watchObservedRunningTime="2025-11-27 08:51:01.23891371 +0000 UTC m=+7099.430957638" Nov 27 08:51:01 crc kubenswrapper[4971]: I1127 08:51:01.242136 4971 scope.go:117] "RemoveContainer" containerID="21efbf4c72c104abcaa228d584faf1c91e23c377adb073226050246d080f9150" Nov 27 08:51:01 crc kubenswrapper[4971]: I1127 08:51:01.252365 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5b4pp"] Nov 27 08:51:01 crc kubenswrapper[4971]: I1127 08:51:01.262164 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-5b4pp"] Nov 27 08:51:01 crc kubenswrapper[4971]: I1127 08:51:01.268595 4971 scope.go:117] "RemoveContainer" containerID="7005e5bdd4c8ee1daaa127b29f78f8b42aef210e835fe9463da723bb32663719" Nov 27 08:51:02 crc kubenswrapper[4971]: I1127 08:51:02.561425 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="58da1d43-b47b-48b0-9654-c43b137097cc" path="/var/lib/kubelet/pods/58da1d43-b47b-48b0-9654-c43b137097cc/volumes" Nov 27 08:51:03 crc kubenswrapper[4971]: I1127 08:51:03.226640 4971 generic.go:334] "Generic (PLEG): container finished" podID="7b89043e-9696-49f5-900b-5ea4c74851a6" containerID="bcc668e4727c4c3eb7c94152d8196bededd688fb51f88a1551267cb0de848fe9" exitCode=0 Nov 27 08:51:03 crc kubenswrapper[4971]: I1127 08:51:03.226689 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-s8sgc" event={"ID":"7b89043e-9696-49f5-900b-5ea4c74851a6","Type":"ContainerDied","Data":"bcc668e4727c4c3eb7c94152d8196bededd688fb51f88a1551267cb0de848fe9"} Nov 27 08:51:04 crc kubenswrapper[4971]: I1127 08:51:04.597677 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-s8sgc" Nov 27 08:51:04 crc kubenswrapper[4971]: I1127 08:51:04.741604 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mbntt\" (UniqueName: \"kubernetes.io/projected/7b89043e-9696-49f5-900b-5ea4c74851a6-kube-api-access-mbntt\") pod \"7b89043e-9696-49f5-900b-5ea4c74851a6\" (UID: \"7b89043e-9696-49f5-900b-5ea4c74851a6\") " Nov 27 08:51:04 crc kubenswrapper[4971]: I1127 08:51:04.742023 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7b89043e-9696-49f5-900b-5ea4c74851a6-db-sync-config-data\") pod \"7b89043e-9696-49f5-900b-5ea4c74851a6\" (UID: \"7b89043e-9696-49f5-900b-5ea4c74851a6\") " Nov 27 08:51:04 crc kubenswrapper[4971]: I1127 08:51:04.742226 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b89043e-9696-49f5-900b-5ea4c74851a6-combined-ca-bundle\") pod \"7b89043e-9696-49f5-900b-5ea4c74851a6\" (UID: \"7b89043e-9696-49f5-900b-5ea4c74851a6\") " Nov 27 08:51:04 crc kubenswrapper[4971]: I1127 08:51:04.748599 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b89043e-9696-49f5-900b-5ea4c74851a6-kube-api-access-mbntt" (OuterVolumeSpecName: "kube-api-access-mbntt") pod "7b89043e-9696-49f5-900b-5ea4c74851a6" (UID: "7b89043e-9696-49f5-900b-5ea4c74851a6"). InnerVolumeSpecName "kube-api-access-mbntt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:51:04 crc kubenswrapper[4971]: I1127 08:51:04.748814 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b89043e-9696-49f5-900b-5ea4c74851a6-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "7b89043e-9696-49f5-900b-5ea4c74851a6" (UID: "7b89043e-9696-49f5-900b-5ea4c74851a6"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:51:04 crc kubenswrapper[4971]: I1127 08:51:04.767582 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b89043e-9696-49f5-900b-5ea4c74851a6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7b89043e-9696-49f5-900b-5ea4c74851a6" (UID: "7b89043e-9696-49f5-900b-5ea4c74851a6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:51:04 crc kubenswrapper[4971]: I1127 08:51:04.844737 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mbntt\" (UniqueName: \"kubernetes.io/projected/7b89043e-9696-49f5-900b-5ea4c74851a6-kube-api-access-mbntt\") on node \"crc\" DevicePath \"\"" Nov 27 08:51:04 crc kubenswrapper[4971]: I1127 08:51:04.844796 4971 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7b89043e-9696-49f5-900b-5ea4c74851a6-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 08:51:04 crc kubenswrapper[4971]: I1127 08:51:04.844812 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b89043e-9696-49f5-900b-5ea4c74851a6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.245466 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-s8sgc" event={"ID":"7b89043e-9696-49f5-900b-5ea4c74851a6","Type":"ContainerDied","Data":"c5e0e7e4c4fe9c3c556bdf8ca2e35a5baeb4afc5966bcdc3546a7a81dfd54f5a"} Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.245513 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c5e0e7e4c4fe9c3c556bdf8ca2e35a5baeb4afc5966bcdc3546a7a81dfd54f5a" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.245615 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-s8sgc" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.524758 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-579556d9cc-tzpkz"] Nov 27 08:51:05 crc kubenswrapper[4971]: E1127 08:51:05.525657 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58da1d43-b47b-48b0-9654-c43b137097cc" containerName="registry-server" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.525678 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="58da1d43-b47b-48b0-9654-c43b137097cc" containerName="registry-server" Nov 27 08:51:05 crc kubenswrapper[4971]: E1127 08:51:05.525708 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58da1d43-b47b-48b0-9654-c43b137097cc" containerName="extract-utilities" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.525717 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="58da1d43-b47b-48b0-9654-c43b137097cc" containerName="extract-utilities" Nov 27 08:51:05 crc kubenswrapper[4971]: E1127 08:51:05.525747 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58da1d43-b47b-48b0-9654-c43b137097cc" containerName="extract-content" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.525754 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="58da1d43-b47b-48b0-9654-c43b137097cc" containerName="extract-content" Nov 27 08:51:05 crc kubenswrapper[4971]: E1127 08:51:05.525779 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b89043e-9696-49f5-900b-5ea4c74851a6" containerName="barbican-db-sync" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.525786 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b89043e-9696-49f5-900b-5ea4c74851a6" containerName="barbican-db-sync" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.527030 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="58da1d43-b47b-48b0-9654-c43b137097cc" containerName="registry-server" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.527075 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b89043e-9696-49f5-900b-5ea4c74851a6" containerName="barbican-db-sync" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.543218 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-579556d9cc-tzpkz" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.548207 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.549704 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-wgl77" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.552637 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.612921 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-579556d9cc-tzpkz"] Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.632627 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-86bc479d88-stpvp"] Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.641302 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-86bc479d88-stpvp"] Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.641447 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-86bc479d88-stpvp" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.647582 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.664303 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-54dd4f7d4c-8t54q"] Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.671755 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54dd4f7d4c-8t54q" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.672225 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3289cb6f-6334-47db-a85b-8fcc20a12295-config-data-custom\") pod \"barbican-worker-579556d9cc-tzpkz\" (UID: \"3289cb6f-6334-47db-a85b-8fcc20a12295\") " pod="openstack/barbican-worker-579556d9cc-tzpkz" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.672304 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vvqm\" (UniqueName: \"kubernetes.io/projected/3289cb6f-6334-47db-a85b-8fcc20a12295-kube-api-access-9vvqm\") pod \"barbican-worker-579556d9cc-tzpkz\" (UID: \"3289cb6f-6334-47db-a85b-8fcc20a12295\") " pod="openstack/barbican-worker-579556d9cc-tzpkz" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.672677 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3289cb6f-6334-47db-a85b-8fcc20a12295-logs\") pod \"barbican-worker-579556d9cc-tzpkz\" (UID: \"3289cb6f-6334-47db-a85b-8fcc20a12295\") " pod="openstack/barbican-worker-579556d9cc-tzpkz" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.672822 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3289cb6f-6334-47db-a85b-8fcc20a12295-combined-ca-bundle\") pod \"barbican-worker-579556d9cc-tzpkz\" (UID: \"3289cb6f-6334-47db-a85b-8fcc20a12295\") " pod="openstack/barbican-worker-579556d9cc-tzpkz" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.672873 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3289cb6f-6334-47db-a85b-8fcc20a12295-config-data\") pod \"barbican-worker-579556d9cc-tzpkz\" (UID: \"3289cb6f-6334-47db-a85b-8fcc20a12295\") " pod="openstack/barbican-worker-579556d9cc-tzpkz" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.673465 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-54dd4f7d4c-8t54q"] Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.778373 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3289cb6f-6334-47db-a85b-8fcc20a12295-logs\") pod \"barbican-worker-579556d9cc-tzpkz\" (UID: \"3289cb6f-6334-47db-a85b-8fcc20a12295\") " pod="openstack/barbican-worker-579556d9cc-tzpkz" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.778466 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3289cb6f-6334-47db-a85b-8fcc20a12295-combined-ca-bundle\") pod \"barbican-worker-579556d9cc-tzpkz\" (UID: \"3289cb6f-6334-47db-a85b-8fcc20a12295\") " pod="openstack/barbican-worker-579556d9cc-tzpkz" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.778500 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3289cb6f-6334-47db-a85b-8fcc20a12295-config-data\") pod \"barbican-worker-579556d9cc-tzpkz\" (UID: \"3289cb6f-6334-47db-a85b-8fcc20a12295\") " pod="openstack/barbican-worker-579556d9cc-tzpkz" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.778556 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e91e8ba-b5c3-437f-9406-d76e697a9036-logs\") pod \"barbican-keystone-listener-86bc479d88-stpvp\" (UID: \"5e91e8ba-b5c3-437f-9406-d76e697a9036\") " pod="openstack/barbican-keystone-listener-86bc479d88-stpvp" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.778598 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/44034162-7899-44d2-b36b-abedea905618-ovsdbserver-nb\") pod \"dnsmasq-dns-54dd4f7d4c-8t54q\" (UID: \"44034162-7899-44d2-b36b-abedea905618\") " pod="openstack/dnsmasq-dns-54dd4f7d4c-8t54q" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.778638 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/44034162-7899-44d2-b36b-abedea905618-ovsdbserver-sb\") pod \"dnsmasq-dns-54dd4f7d4c-8t54q\" (UID: \"44034162-7899-44d2-b36b-abedea905618\") " pod="openstack/dnsmasq-dns-54dd4f7d4c-8t54q" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.778669 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/44034162-7899-44d2-b36b-abedea905618-config\") pod \"dnsmasq-dns-54dd4f7d4c-8t54q\" (UID: \"44034162-7899-44d2-b36b-abedea905618\") " pod="openstack/dnsmasq-dns-54dd4f7d4c-8t54q" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.778723 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5frrm\" (UniqueName: \"kubernetes.io/projected/5e91e8ba-b5c3-437f-9406-d76e697a9036-kube-api-access-5frrm\") pod \"barbican-keystone-listener-86bc479d88-stpvp\" (UID: \"5e91e8ba-b5c3-437f-9406-d76e697a9036\") " pod="openstack/barbican-keystone-listener-86bc479d88-stpvp" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.778746 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdlcp\" (UniqueName: \"kubernetes.io/projected/44034162-7899-44d2-b36b-abedea905618-kube-api-access-bdlcp\") pod \"dnsmasq-dns-54dd4f7d4c-8t54q\" (UID: \"44034162-7899-44d2-b36b-abedea905618\") " pod="openstack/dnsmasq-dns-54dd4f7d4c-8t54q" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.778788 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e91e8ba-b5c3-437f-9406-d76e697a9036-combined-ca-bundle\") pod \"barbican-keystone-listener-86bc479d88-stpvp\" (UID: \"5e91e8ba-b5c3-437f-9406-d76e697a9036\") " pod="openstack/barbican-keystone-listener-86bc479d88-stpvp" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.778809 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e91e8ba-b5c3-437f-9406-d76e697a9036-config-data\") pod \"barbican-keystone-listener-86bc479d88-stpvp\" (UID: \"5e91e8ba-b5c3-437f-9406-d76e697a9036\") " pod="openstack/barbican-keystone-listener-86bc479d88-stpvp" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.778832 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5e91e8ba-b5c3-437f-9406-d76e697a9036-config-data-custom\") pod \"barbican-keystone-listener-86bc479d88-stpvp\" (UID: \"5e91e8ba-b5c3-437f-9406-d76e697a9036\") " pod="openstack/barbican-keystone-listener-86bc479d88-stpvp" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.778898 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3289cb6f-6334-47db-a85b-8fcc20a12295-config-data-custom\") pod \"barbican-worker-579556d9cc-tzpkz\" (UID: \"3289cb6f-6334-47db-a85b-8fcc20a12295\") " pod="openstack/barbican-worker-579556d9cc-tzpkz" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.778951 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vvqm\" (UniqueName: \"kubernetes.io/projected/3289cb6f-6334-47db-a85b-8fcc20a12295-kube-api-access-9vvqm\") pod \"barbican-worker-579556d9cc-tzpkz\" (UID: \"3289cb6f-6334-47db-a85b-8fcc20a12295\") " pod="openstack/barbican-worker-579556d9cc-tzpkz" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.778989 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/44034162-7899-44d2-b36b-abedea905618-dns-svc\") pod \"dnsmasq-dns-54dd4f7d4c-8t54q\" (UID: \"44034162-7899-44d2-b36b-abedea905618\") " pod="openstack/dnsmasq-dns-54dd4f7d4c-8t54q" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.779608 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3289cb6f-6334-47db-a85b-8fcc20a12295-logs\") pod \"barbican-worker-579556d9cc-tzpkz\" (UID: \"3289cb6f-6334-47db-a85b-8fcc20a12295\") " pod="openstack/barbican-worker-579556d9cc-tzpkz" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.795124 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3289cb6f-6334-47db-a85b-8fcc20a12295-config-data-custom\") pod \"barbican-worker-579556d9cc-tzpkz\" (UID: \"3289cb6f-6334-47db-a85b-8fcc20a12295\") " pod="openstack/barbican-worker-579556d9cc-tzpkz" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.796176 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3289cb6f-6334-47db-a85b-8fcc20a12295-config-data\") pod \"barbican-worker-579556d9cc-tzpkz\" (UID: \"3289cb6f-6334-47db-a85b-8fcc20a12295\") " pod="openstack/barbican-worker-579556d9cc-tzpkz" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.796292 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3289cb6f-6334-47db-a85b-8fcc20a12295-combined-ca-bundle\") pod \"barbican-worker-579556d9cc-tzpkz\" (UID: \"3289cb6f-6334-47db-a85b-8fcc20a12295\") " pod="openstack/barbican-worker-579556d9cc-tzpkz" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.805575 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vvqm\" (UniqueName: \"kubernetes.io/projected/3289cb6f-6334-47db-a85b-8fcc20a12295-kube-api-access-9vvqm\") pod \"barbican-worker-579556d9cc-tzpkz\" (UID: \"3289cb6f-6334-47db-a85b-8fcc20a12295\") " pod="openstack/barbican-worker-579556d9cc-tzpkz" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.813470 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-7f9867f5f8-5v9cv"] Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.814913 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7f9867f5f8-5v9cv" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.825883 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.837648 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7f9867f5f8-5v9cv"] Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.881677 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/44034162-7899-44d2-b36b-abedea905618-dns-svc\") pod \"dnsmasq-dns-54dd4f7d4c-8t54q\" (UID: \"44034162-7899-44d2-b36b-abedea905618\") " pod="openstack/dnsmasq-dns-54dd4f7d4c-8t54q" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.881800 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e91e8ba-b5c3-437f-9406-d76e697a9036-logs\") pod \"barbican-keystone-listener-86bc479d88-stpvp\" (UID: \"5e91e8ba-b5c3-437f-9406-d76e697a9036\") " pod="openstack/barbican-keystone-listener-86bc479d88-stpvp" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.881842 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/44034162-7899-44d2-b36b-abedea905618-ovsdbserver-nb\") pod \"dnsmasq-dns-54dd4f7d4c-8t54q\" (UID: \"44034162-7899-44d2-b36b-abedea905618\") " pod="openstack/dnsmasq-dns-54dd4f7d4c-8t54q" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.881869 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/44034162-7899-44d2-b36b-abedea905618-ovsdbserver-sb\") pod \"dnsmasq-dns-54dd4f7d4c-8t54q\" (UID: \"44034162-7899-44d2-b36b-abedea905618\") " pod="openstack/dnsmasq-dns-54dd4f7d4c-8t54q" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.881899 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/44034162-7899-44d2-b36b-abedea905618-config\") pod \"dnsmasq-dns-54dd4f7d4c-8t54q\" (UID: \"44034162-7899-44d2-b36b-abedea905618\") " pod="openstack/dnsmasq-dns-54dd4f7d4c-8t54q" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.881941 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdlcp\" (UniqueName: \"kubernetes.io/projected/44034162-7899-44d2-b36b-abedea905618-kube-api-access-bdlcp\") pod \"dnsmasq-dns-54dd4f7d4c-8t54q\" (UID: \"44034162-7899-44d2-b36b-abedea905618\") " pod="openstack/dnsmasq-dns-54dd4f7d4c-8t54q" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.881964 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5frrm\" (UniqueName: \"kubernetes.io/projected/5e91e8ba-b5c3-437f-9406-d76e697a9036-kube-api-access-5frrm\") pod \"barbican-keystone-listener-86bc479d88-stpvp\" (UID: \"5e91e8ba-b5c3-437f-9406-d76e697a9036\") " pod="openstack/barbican-keystone-listener-86bc479d88-stpvp" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.881994 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e91e8ba-b5c3-437f-9406-d76e697a9036-combined-ca-bundle\") pod \"barbican-keystone-listener-86bc479d88-stpvp\" (UID: \"5e91e8ba-b5c3-437f-9406-d76e697a9036\") " pod="openstack/barbican-keystone-listener-86bc479d88-stpvp" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.882016 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e91e8ba-b5c3-437f-9406-d76e697a9036-config-data\") pod \"barbican-keystone-listener-86bc479d88-stpvp\" (UID: \"5e91e8ba-b5c3-437f-9406-d76e697a9036\") " pod="openstack/barbican-keystone-listener-86bc479d88-stpvp" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.882042 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5e91e8ba-b5c3-437f-9406-d76e697a9036-config-data-custom\") pod \"barbican-keystone-listener-86bc479d88-stpvp\" (UID: \"5e91e8ba-b5c3-437f-9406-d76e697a9036\") " pod="openstack/barbican-keystone-listener-86bc479d88-stpvp" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.882329 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e91e8ba-b5c3-437f-9406-d76e697a9036-logs\") pod \"barbican-keystone-listener-86bc479d88-stpvp\" (UID: \"5e91e8ba-b5c3-437f-9406-d76e697a9036\") " pod="openstack/barbican-keystone-listener-86bc479d88-stpvp" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.883194 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/44034162-7899-44d2-b36b-abedea905618-dns-svc\") pod \"dnsmasq-dns-54dd4f7d4c-8t54q\" (UID: \"44034162-7899-44d2-b36b-abedea905618\") " pod="openstack/dnsmasq-dns-54dd4f7d4c-8t54q" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.883335 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/44034162-7899-44d2-b36b-abedea905618-config\") pod \"dnsmasq-dns-54dd4f7d4c-8t54q\" (UID: \"44034162-7899-44d2-b36b-abedea905618\") " pod="openstack/dnsmasq-dns-54dd4f7d4c-8t54q" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.884100 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/44034162-7899-44d2-b36b-abedea905618-ovsdbserver-nb\") pod \"dnsmasq-dns-54dd4f7d4c-8t54q\" (UID: \"44034162-7899-44d2-b36b-abedea905618\") " pod="openstack/dnsmasq-dns-54dd4f7d4c-8t54q" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.887712 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/44034162-7899-44d2-b36b-abedea905618-ovsdbserver-sb\") pod \"dnsmasq-dns-54dd4f7d4c-8t54q\" (UID: \"44034162-7899-44d2-b36b-abedea905618\") " pod="openstack/dnsmasq-dns-54dd4f7d4c-8t54q" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.889120 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5e91e8ba-b5c3-437f-9406-d76e697a9036-config-data-custom\") pod \"barbican-keystone-listener-86bc479d88-stpvp\" (UID: \"5e91e8ba-b5c3-437f-9406-d76e697a9036\") " pod="openstack/barbican-keystone-listener-86bc479d88-stpvp" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.895756 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-579556d9cc-tzpkz" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.910616 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e91e8ba-b5c3-437f-9406-d76e697a9036-combined-ca-bundle\") pod \"barbican-keystone-listener-86bc479d88-stpvp\" (UID: \"5e91e8ba-b5c3-437f-9406-d76e697a9036\") " pod="openstack/barbican-keystone-listener-86bc479d88-stpvp" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.921742 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdlcp\" (UniqueName: \"kubernetes.io/projected/44034162-7899-44d2-b36b-abedea905618-kube-api-access-bdlcp\") pod \"dnsmasq-dns-54dd4f7d4c-8t54q\" (UID: \"44034162-7899-44d2-b36b-abedea905618\") " pod="openstack/dnsmasq-dns-54dd4f7d4c-8t54q" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.932755 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e91e8ba-b5c3-437f-9406-d76e697a9036-config-data\") pod \"barbican-keystone-listener-86bc479d88-stpvp\" (UID: \"5e91e8ba-b5c3-437f-9406-d76e697a9036\") " pod="openstack/barbican-keystone-listener-86bc479d88-stpvp" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.942412 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5frrm\" (UniqueName: \"kubernetes.io/projected/5e91e8ba-b5c3-437f-9406-d76e697a9036-kube-api-access-5frrm\") pod \"barbican-keystone-listener-86bc479d88-stpvp\" (UID: \"5e91e8ba-b5c3-437f-9406-d76e697a9036\") " pod="openstack/barbican-keystone-listener-86bc479d88-stpvp" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.986580 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-86bc479d88-stpvp" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.989232 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-knc45\" (UniqueName: \"kubernetes.io/projected/b5931b59-b508-45f2-9c53-428808f072cb-kube-api-access-knc45\") pod \"barbican-api-7f9867f5f8-5v9cv\" (UID: \"b5931b59-b508-45f2-9c53-428808f072cb\") " pod="openstack/barbican-api-7f9867f5f8-5v9cv" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.989275 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b5931b59-b508-45f2-9c53-428808f072cb-logs\") pod \"barbican-api-7f9867f5f8-5v9cv\" (UID: \"b5931b59-b508-45f2-9c53-428808f072cb\") " pod="openstack/barbican-api-7f9867f5f8-5v9cv" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.989310 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b5931b59-b508-45f2-9c53-428808f072cb-config-data-custom\") pod \"barbican-api-7f9867f5f8-5v9cv\" (UID: \"b5931b59-b508-45f2-9c53-428808f072cb\") " pod="openstack/barbican-api-7f9867f5f8-5v9cv" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.989358 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5931b59-b508-45f2-9c53-428808f072cb-config-data\") pod \"barbican-api-7f9867f5f8-5v9cv\" (UID: \"b5931b59-b508-45f2-9c53-428808f072cb\") " pod="openstack/barbican-api-7f9867f5f8-5v9cv" Nov 27 08:51:05 crc kubenswrapper[4971]: I1127 08:51:05.989377 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5931b59-b508-45f2-9c53-428808f072cb-combined-ca-bundle\") pod \"barbican-api-7f9867f5f8-5v9cv\" (UID: \"b5931b59-b508-45f2-9c53-428808f072cb\") " pod="openstack/barbican-api-7f9867f5f8-5v9cv" Nov 27 08:51:06 crc kubenswrapper[4971]: I1127 08:51:05.999231 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54dd4f7d4c-8t54q" Nov 27 08:51:06 crc kubenswrapper[4971]: I1127 08:51:06.092926 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b5931b59-b508-45f2-9c53-428808f072cb-config-data-custom\") pod \"barbican-api-7f9867f5f8-5v9cv\" (UID: \"b5931b59-b508-45f2-9c53-428808f072cb\") " pod="openstack/barbican-api-7f9867f5f8-5v9cv" Nov 27 08:51:06 crc kubenswrapper[4971]: I1127 08:51:06.093387 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5931b59-b508-45f2-9c53-428808f072cb-config-data\") pod \"barbican-api-7f9867f5f8-5v9cv\" (UID: \"b5931b59-b508-45f2-9c53-428808f072cb\") " pod="openstack/barbican-api-7f9867f5f8-5v9cv" Nov 27 08:51:06 crc kubenswrapper[4971]: I1127 08:51:06.093416 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5931b59-b508-45f2-9c53-428808f072cb-combined-ca-bundle\") pod \"barbican-api-7f9867f5f8-5v9cv\" (UID: \"b5931b59-b508-45f2-9c53-428808f072cb\") " pod="openstack/barbican-api-7f9867f5f8-5v9cv" Nov 27 08:51:06 crc kubenswrapper[4971]: I1127 08:51:06.093513 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-knc45\" (UniqueName: \"kubernetes.io/projected/b5931b59-b508-45f2-9c53-428808f072cb-kube-api-access-knc45\") pod \"barbican-api-7f9867f5f8-5v9cv\" (UID: \"b5931b59-b508-45f2-9c53-428808f072cb\") " pod="openstack/barbican-api-7f9867f5f8-5v9cv" Nov 27 08:51:06 crc kubenswrapper[4971]: I1127 08:51:06.093561 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b5931b59-b508-45f2-9c53-428808f072cb-logs\") pod \"barbican-api-7f9867f5f8-5v9cv\" (UID: \"b5931b59-b508-45f2-9c53-428808f072cb\") " pod="openstack/barbican-api-7f9867f5f8-5v9cv" Nov 27 08:51:06 crc kubenswrapper[4971]: I1127 08:51:06.094043 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b5931b59-b508-45f2-9c53-428808f072cb-logs\") pod \"barbican-api-7f9867f5f8-5v9cv\" (UID: \"b5931b59-b508-45f2-9c53-428808f072cb\") " pod="openstack/barbican-api-7f9867f5f8-5v9cv" Nov 27 08:51:06 crc kubenswrapper[4971]: I1127 08:51:06.100821 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b5931b59-b508-45f2-9c53-428808f072cb-config-data-custom\") pod \"barbican-api-7f9867f5f8-5v9cv\" (UID: \"b5931b59-b508-45f2-9c53-428808f072cb\") " pod="openstack/barbican-api-7f9867f5f8-5v9cv" Nov 27 08:51:06 crc kubenswrapper[4971]: I1127 08:51:06.112653 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5931b59-b508-45f2-9c53-428808f072cb-config-data\") pod \"barbican-api-7f9867f5f8-5v9cv\" (UID: \"b5931b59-b508-45f2-9c53-428808f072cb\") " pod="openstack/barbican-api-7f9867f5f8-5v9cv" Nov 27 08:51:06 crc kubenswrapper[4971]: I1127 08:51:06.113313 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5931b59-b508-45f2-9c53-428808f072cb-combined-ca-bundle\") pod \"barbican-api-7f9867f5f8-5v9cv\" (UID: \"b5931b59-b508-45f2-9c53-428808f072cb\") " pod="openstack/barbican-api-7f9867f5f8-5v9cv" Nov 27 08:51:06 crc kubenswrapper[4971]: I1127 08:51:06.130922 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-knc45\" (UniqueName: \"kubernetes.io/projected/b5931b59-b508-45f2-9c53-428808f072cb-kube-api-access-knc45\") pod \"barbican-api-7f9867f5f8-5v9cv\" (UID: \"b5931b59-b508-45f2-9c53-428808f072cb\") " pod="openstack/barbican-api-7f9867f5f8-5v9cv" Nov 27 08:51:06 crc kubenswrapper[4971]: I1127 08:51:06.312749 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7f9867f5f8-5v9cv" Nov 27 08:51:06 crc kubenswrapper[4971]: I1127 08:51:06.532359 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-579556d9cc-tzpkz"] Nov 27 08:51:06 crc kubenswrapper[4971]: I1127 08:51:06.551322 4971 scope.go:117] "RemoveContainer" containerID="503fed002d2daf941b71890d178a428a5c3cffb1b48a2221858dc92e28178dc4" Nov 27 08:51:06 crc kubenswrapper[4971]: E1127 08:51:06.551597 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:51:06 crc kubenswrapper[4971]: I1127 08:51:06.612721 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-86bc479d88-stpvp"] Nov 27 08:51:06 crc kubenswrapper[4971]: I1127 08:51:06.631124 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-54dd4f7d4c-8t54q"] Nov 27 08:51:06 crc kubenswrapper[4971]: I1127 08:51:06.879608 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7f9867f5f8-5v9cv"] Nov 27 08:51:07 crc kubenswrapper[4971]: I1127 08:51:07.271980 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7f9867f5f8-5v9cv" event={"ID":"b5931b59-b508-45f2-9c53-428808f072cb","Type":"ContainerStarted","Data":"e9d721f2e12105bc31ff27eb59285030a55978a7d382f8f5b172060cb1050cef"} Nov 27 08:51:07 crc kubenswrapper[4971]: I1127 08:51:07.272041 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7f9867f5f8-5v9cv" event={"ID":"b5931b59-b508-45f2-9c53-428808f072cb","Type":"ContainerStarted","Data":"bb76d7f1319f986146f458f1c59fc6911b486518ef44137a2f67b08002909594"} Nov 27 08:51:07 crc kubenswrapper[4971]: I1127 08:51:07.275257 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-86bc479d88-stpvp" event={"ID":"5e91e8ba-b5c3-437f-9406-d76e697a9036","Type":"ContainerStarted","Data":"2faa40bce6ea1de34f200aac07279d93bd569df581e12e798a9ad839e7c8aa3f"} Nov 27 08:51:07 crc kubenswrapper[4971]: I1127 08:51:07.278806 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54dd4f7d4c-8t54q" event={"ID":"44034162-7899-44d2-b36b-abedea905618","Type":"ContainerStarted","Data":"13669135962cd4f22510fdc4c86b6b1a1d6d29a2f3ad8709a1f1d78d68940d63"} Nov 27 08:51:07 crc kubenswrapper[4971]: I1127 08:51:07.278845 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54dd4f7d4c-8t54q" event={"ID":"44034162-7899-44d2-b36b-abedea905618","Type":"ContainerStarted","Data":"397beb53c5aa81ae3a470554c925ee0bbe353e183973d586698aa98813483fd4"} Nov 27 08:51:07 crc kubenswrapper[4971]: I1127 08:51:07.282738 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-579556d9cc-tzpkz" event={"ID":"3289cb6f-6334-47db-a85b-8fcc20a12295","Type":"ContainerStarted","Data":"3bfb23ddbc63b52dffd37bcb0b5620557b12536f67240f8a893c1357a1ee7056"} Nov 27 08:51:08 crc kubenswrapper[4971]: I1127 08:51:08.310035 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7f9867f5f8-5v9cv" event={"ID":"b5931b59-b508-45f2-9c53-428808f072cb","Type":"ContainerStarted","Data":"adef821368070926251991e902c21636d3a00513713a2b502e2ae82e3622a4ae"} Nov 27 08:51:08 crc kubenswrapper[4971]: I1127 08:51:08.310713 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7f9867f5f8-5v9cv" Nov 27 08:51:08 crc kubenswrapper[4971]: I1127 08:51:08.310747 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7f9867f5f8-5v9cv" Nov 27 08:51:08 crc kubenswrapper[4971]: I1127 08:51:08.329039 4971 generic.go:334] "Generic (PLEG): container finished" podID="44034162-7899-44d2-b36b-abedea905618" containerID="13669135962cd4f22510fdc4c86b6b1a1d6d29a2f3ad8709a1f1d78d68940d63" exitCode=0 Nov 27 08:51:08 crc kubenswrapper[4971]: I1127 08:51:08.329155 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54dd4f7d4c-8t54q" event={"ID":"44034162-7899-44d2-b36b-abedea905618","Type":"ContainerDied","Data":"13669135962cd4f22510fdc4c86b6b1a1d6d29a2f3ad8709a1f1d78d68940d63"} Nov 27 08:51:08 crc kubenswrapper[4971]: I1127 08:51:08.329211 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54dd4f7d4c-8t54q" event={"ID":"44034162-7899-44d2-b36b-abedea905618","Type":"ContainerStarted","Data":"b091ee6d2d6d57be6c7898aa901d0dc3f9573f551acb683b9762a407332f7cff"} Nov 27 08:51:08 crc kubenswrapper[4971]: I1127 08:51:08.330966 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-54dd4f7d4c-8t54q" Nov 27 08:51:08 crc kubenswrapper[4971]: I1127 08:51:08.350231 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-7f9867f5f8-5v9cv" podStartSLOduration=3.350186597 podStartE2EDuration="3.350186597s" podCreationTimestamp="2025-11-27 08:51:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:51:08.337770641 +0000 UTC m=+7106.529814579" watchObservedRunningTime="2025-11-27 08:51:08.350186597 +0000 UTC m=+7106.542230535" Nov 27 08:51:08 crc kubenswrapper[4971]: I1127 08:51:08.370353 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-54dd4f7d4c-8t54q" podStartSLOduration=3.3703323530000002 podStartE2EDuration="3.370332353s" podCreationTimestamp="2025-11-27 08:51:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:51:08.361765808 +0000 UTC m=+7106.553809736" watchObservedRunningTime="2025-11-27 08:51:08.370332353 +0000 UTC m=+7106.562376271" Nov 27 08:51:09 crc kubenswrapper[4971]: I1127 08:51:09.342475 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-86bc479d88-stpvp" event={"ID":"5e91e8ba-b5c3-437f-9406-d76e697a9036","Type":"ContainerStarted","Data":"c3d0ab110af6f06c3ee806a64496226f898984351a3f07406fa7ac79aa0d2549"} Nov 27 08:51:09 crc kubenswrapper[4971]: I1127 08:51:09.342922 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-86bc479d88-stpvp" event={"ID":"5e91e8ba-b5c3-437f-9406-d76e697a9036","Type":"ContainerStarted","Data":"c25d05fc660f5614ea5376f1152baeaf873f042a39640a65d8d577241086bd71"} Nov 27 08:51:09 crc kubenswrapper[4971]: I1127 08:51:09.348416 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-579556d9cc-tzpkz" event={"ID":"3289cb6f-6334-47db-a85b-8fcc20a12295","Type":"ContainerStarted","Data":"781cc90dc59483413df8424c72d18b9d42f572e525f593f50fc85681e07b5eed"} Nov 27 08:51:09 crc kubenswrapper[4971]: I1127 08:51:09.348459 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-579556d9cc-tzpkz" event={"ID":"3289cb6f-6334-47db-a85b-8fcc20a12295","Type":"ContainerStarted","Data":"9d363b6f264b8474144b513eee5d4350ee67273b5a8acca4e0c26465d6225edd"} Nov 27 08:51:09 crc kubenswrapper[4971]: I1127 08:51:09.373869 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-86bc479d88-stpvp" podStartSLOduration=2.734788743 podStartE2EDuration="4.373834107s" podCreationTimestamp="2025-11-27 08:51:05 +0000 UTC" firstStartedPulling="2025-11-27 08:51:06.609868073 +0000 UTC m=+7104.801911991" lastFinishedPulling="2025-11-27 08:51:08.248913437 +0000 UTC m=+7106.440957355" observedRunningTime="2025-11-27 08:51:09.365073336 +0000 UTC m=+7107.557117264" watchObservedRunningTime="2025-11-27 08:51:09.373834107 +0000 UTC m=+7107.565878025" Nov 27 08:51:09 crc kubenswrapper[4971]: I1127 08:51:09.394623 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-579556d9cc-tzpkz" podStartSLOduration=2.705962338 podStartE2EDuration="4.39454252s" podCreationTimestamp="2025-11-27 08:51:05 +0000 UTC" firstStartedPulling="2025-11-27 08:51:06.558323777 +0000 UTC m=+7104.750367685" lastFinishedPulling="2025-11-27 08:51:08.246903949 +0000 UTC m=+7106.438947867" observedRunningTime="2025-11-27 08:51:09.387233001 +0000 UTC m=+7107.579276919" watchObservedRunningTime="2025-11-27 08:51:09.39454252 +0000 UTC m=+7107.586586438" Nov 27 08:51:16 crc kubenswrapper[4971]: I1127 08:51:16.001558 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-54dd4f7d4c-8t54q" Nov 27 08:51:16 crc kubenswrapper[4971]: I1127 08:51:16.057556 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c95b65675-rnwh2"] Nov 27 08:51:16 crc kubenswrapper[4971]: I1127 08:51:16.057832 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c95b65675-rnwh2" podUID="68d2df89-95b3-45e1-809a-a5f3cefcae3d" containerName="dnsmasq-dns" containerID="cri-o://a02a6dbb765e454822f7db60bb0e8d37be0cd1594baa386cc82052e3ca98b3e0" gracePeriod=10 Nov 27 08:51:16 crc kubenswrapper[4971]: I1127 08:51:16.428186 4971 generic.go:334] "Generic (PLEG): container finished" podID="68d2df89-95b3-45e1-809a-a5f3cefcae3d" containerID="a02a6dbb765e454822f7db60bb0e8d37be0cd1594baa386cc82052e3ca98b3e0" exitCode=0 Nov 27 08:51:16 crc kubenswrapper[4971]: I1127 08:51:16.428234 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c95b65675-rnwh2" event={"ID":"68d2df89-95b3-45e1-809a-a5f3cefcae3d","Type":"ContainerDied","Data":"a02a6dbb765e454822f7db60bb0e8d37be0cd1594baa386cc82052e3ca98b3e0"} Nov 27 08:51:16 crc kubenswrapper[4971]: I1127 08:51:16.631505 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c95b65675-rnwh2" Nov 27 08:51:16 crc kubenswrapper[4971]: I1127 08:51:16.735975 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/68d2df89-95b3-45e1-809a-a5f3cefcae3d-ovsdbserver-sb\") pod \"68d2df89-95b3-45e1-809a-a5f3cefcae3d\" (UID: \"68d2df89-95b3-45e1-809a-a5f3cefcae3d\") " Nov 27 08:51:16 crc kubenswrapper[4971]: I1127 08:51:16.736078 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/68d2df89-95b3-45e1-809a-a5f3cefcae3d-config\") pod \"68d2df89-95b3-45e1-809a-a5f3cefcae3d\" (UID: \"68d2df89-95b3-45e1-809a-a5f3cefcae3d\") " Nov 27 08:51:16 crc kubenswrapper[4971]: I1127 08:51:16.736263 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kwgkk\" (UniqueName: \"kubernetes.io/projected/68d2df89-95b3-45e1-809a-a5f3cefcae3d-kube-api-access-kwgkk\") pod \"68d2df89-95b3-45e1-809a-a5f3cefcae3d\" (UID: \"68d2df89-95b3-45e1-809a-a5f3cefcae3d\") " Nov 27 08:51:16 crc kubenswrapper[4971]: I1127 08:51:16.736306 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/68d2df89-95b3-45e1-809a-a5f3cefcae3d-ovsdbserver-nb\") pod \"68d2df89-95b3-45e1-809a-a5f3cefcae3d\" (UID: \"68d2df89-95b3-45e1-809a-a5f3cefcae3d\") " Nov 27 08:51:16 crc kubenswrapper[4971]: I1127 08:51:16.736353 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/68d2df89-95b3-45e1-809a-a5f3cefcae3d-dns-svc\") pod \"68d2df89-95b3-45e1-809a-a5f3cefcae3d\" (UID: \"68d2df89-95b3-45e1-809a-a5f3cefcae3d\") " Nov 27 08:51:16 crc kubenswrapper[4971]: I1127 08:51:16.743628 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68d2df89-95b3-45e1-809a-a5f3cefcae3d-kube-api-access-kwgkk" (OuterVolumeSpecName: "kube-api-access-kwgkk") pod "68d2df89-95b3-45e1-809a-a5f3cefcae3d" (UID: "68d2df89-95b3-45e1-809a-a5f3cefcae3d"). InnerVolumeSpecName "kube-api-access-kwgkk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:51:16 crc kubenswrapper[4971]: I1127 08:51:16.783917 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/68d2df89-95b3-45e1-809a-a5f3cefcae3d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "68d2df89-95b3-45e1-809a-a5f3cefcae3d" (UID: "68d2df89-95b3-45e1-809a-a5f3cefcae3d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:51:16 crc kubenswrapper[4971]: I1127 08:51:16.791377 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/68d2df89-95b3-45e1-809a-a5f3cefcae3d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "68d2df89-95b3-45e1-809a-a5f3cefcae3d" (UID: "68d2df89-95b3-45e1-809a-a5f3cefcae3d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:51:16 crc kubenswrapper[4971]: I1127 08:51:16.793133 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/68d2df89-95b3-45e1-809a-a5f3cefcae3d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "68d2df89-95b3-45e1-809a-a5f3cefcae3d" (UID: "68d2df89-95b3-45e1-809a-a5f3cefcae3d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:51:16 crc kubenswrapper[4971]: I1127 08:51:16.795216 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/68d2df89-95b3-45e1-809a-a5f3cefcae3d-config" (OuterVolumeSpecName: "config") pod "68d2df89-95b3-45e1-809a-a5f3cefcae3d" (UID: "68d2df89-95b3-45e1-809a-a5f3cefcae3d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:51:16 crc kubenswrapper[4971]: I1127 08:51:16.840222 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kwgkk\" (UniqueName: \"kubernetes.io/projected/68d2df89-95b3-45e1-809a-a5f3cefcae3d-kube-api-access-kwgkk\") on node \"crc\" DevicePath \"\"" Nov 27 08:51:16 crc kubenswrapper[4971]: I1127 08:51:16.840260 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/68d2df89-95b3-45e1-809a-a5f3cefcae3d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 27 08:51:16 crc kubenswrapper[4971]: I1127 08:51:16.840273 4971 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/68d2df89-95b3-45e1-809a-a5f3cefcae3d-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 27 08:51:16 crc kubenswrapper[4971]: I1127 08:51:16.840285 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/68d2df89-95b3-45e1-809a-a5f3cefcae3d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 27 08:51:16 crc kubenswrapper[4971]: I1127 08:51:16.840295 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/68d2df89-95b3-45e1-809a-a5f3cefcae3d-config\") on node \"crc\" DevicePath \"\"" Nov 27 08:51:17 crc kubenswrapper[4971]: I1127 08:51:17.438977 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c95b65675-rnwh2" event={"ID":"68d2df89-95b3-45e1-809a-a5f3cefcae3d","Type":"ContainerDied","Data":"a245cc8fd1cb4c5e4ac3db6b52804f527483ea3968738a928f83647ba93512dc"} Nov 27 08:51:17 crc kubenswrapper[4971]: I1127 08:51:17.439096 4971 scope.go:117] "RemoveContainer" containerID="a02a6dbb765e454822f7db60bb0e8d37be0cd1594baa386cc82052e3ca98b3e0" Nov 27 08:51:17 crc kubenswrapper[4971]: I1127 08:51:17.439100 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c95b65675-rnwh2" Nov 27 08:51:17 crc kubenswrapper[4971]: I1127 08:51:17.470740 4971 scope.go:117] "RemoveContainer" containerID="b91d0a2d15b716527ada5f5b0968d3b249c1929c613a5079aea5016c0c0c88d7" Nov 27 08:51:17 crc kubenswrapper[4971]: I1127 08:51:17.487454 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c95b65675-rnwh2"] Nov 27 08:51:17 crc kubenswrapper[4971]: I1127 08:51:17.502920 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c95b65675-rnwh2"] Nov 27 08:51:17 crc kubenswrapper[4971]: I1127 08:51:17.551724 4971 scope.go:117] "RemoveContainer" containerID="503fed002d2daf941b71890d178a428a5c3cffb1b48a2221858dc92e28178dc4" Nov 27 08:51:17 crc kubenswrapper[4971]: E1127 08:51:17.552016 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:51:17 crc kubenswrapper[4971]: I1127 08:51:17.825325 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7f9867f5f8-5v9cv" Nov 27 08:51:17 crc kubenswrapper[4971]: I1127 08:51:17.906918 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7f9867f5f8-5v9cv" Nov 27 08:51:18 crc kubenswrapper[4971]: I1127 08:51:18.564717 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="68d2df89-95b3-45e1-809a-a5f3cefcae3d" path="/var/lib/kubelet/pods/68d2df89-95b3-45e1-809a-a5f3cefcae3d/volumes" Nov 27 08:51:25 crc kubenswrapper[4971]: I1127 08:51:25.061704 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-xscwm"] Nov 27 08:51:25 crc kubenswrapper[4971]: E1127 08:51:25.062874 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68d2df89-95b3-45e1-809a-a5f3cefcae3d" containerName="init" Nov 27 08:51:25 crc kubenswrapper[4971]: I1127 08:51:25.062892 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="68d2df89-95b3-45e1-809a-a5f3cefcae3d" containerName="init" Nov 27 08:51:25 crc kubenswrapper[4971]: E1127 08:51:25.062913 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68d2df89-95b3-45e1-809a-a5f3cefcae3d" containerName="dnsmasq-dns" Nov 27 08:51:25 crc kubenswrapper[4971]: I1127 08:51:25.062920 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="68d2df89-95b3-45e1-809a-a5f3cefcae3d" containerName="dnsmasq-dns" Nov 27 08:51:25 crc kubenswrapper[4971]: I1127 08:51:25.063149 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="68d2df89-95b3-45e1-809a-a5f3cefcae3d" containerName="dnsmasq-dns" Nov 27 08:51:25 crc kubenswrapper[4971]: I1127 08:51:25.063871 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-xscwm"] Nov 27 08:51:25 crc kubenswrapper[4971]: I1127 08:51:25.063982 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-xscwm" Nov 27 08:51:25 crc kubenswrapper[4971]: I1127 08:51:25.156486 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-671e-account-create-update-sgvgw"] Nov 27 08:51:25 crc kubenswrapper[4971]: I1127 08:51:25.158124 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-671e-account-create-update-sgvgw" Nov 27 08:51:25 crc kubenswrapper[4971]: I1127 08:51:25.161097 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Nov 27 08:51:25 crc kubenswrapper[4971]: I1127 08:51:25.167686 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-671e-account-create-update-sgvgw"] Nov 27 08:51:25 crc kubenswrapper[4971]: I1127 08:51:25.217170 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4g7pt\" (UniqueName: \"kubernetes.io/projected/558798ff-c172-44e0-b329-79cb68adc93e-kube-api-access-4g7pt\") pod \"neutron-db-create-xscwm\" (UID: \"558798ff-c172-44e0-b329-79cb68adc93e\") " pod="openstack/neutron-db-create-xscwm" Nov 27 08:51:25 crc kubenswrapper[4971]: I1127 08:51:25.217276 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/558798ff-c172-44e0-b329-79cb68adc93e-operator-scripts\") pod \"neutron-db-create-xscwm\" (UID: \"558798ff-c172-44e0-b329-79cb68adc93e\") " pod="openstack/neutron-db-create-xscwm" Nov 27 08:51:25 crc kubenswrapper[4971]: I1127 08:51:25.318941 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0aa92f81-6dce-45d4-ad97-f736266bcc69-operator-scripts\") pod \"neutron-671e-account-create-update-sgvgw\" (UID: \"0aa92f81-6dce-45d4-ad97-f736266bcc69\") " pod="openstack/neutron-671e-account-create-update-sgvgw" Nov 27 08:51:25 crc kubenswrapper[4971]: I1127 08:51:25.319013 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4g7pt\" (UniqueName: \"kubernetes.io/projected/558798ff-c172-44e0-b329-79cb68adc93e-kube-api-access-4g7pt\") pod \"neutron-db-create-xscwm\" (UID: \"558798ff-c172-44e0-b329-79cb68adc93e\") " pod="openstack/neutron-db-create-xscwm" Nov 27 08:51:25 crc kubenswrapper[4971]: I1127 08:51:25.319203 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/558798ff-c172-44e0-b329-79cb68adc93e-operator-scripts\") pod \"neutron-db-create-xscwm\" (UID: \"558798ff-c172-44e0-b329-79cb68adc93e\") " pod="openstack/neutron-db-create-xscwm" Nov 27 08:51:25 crc kubenswrapper[4971]: I1127 08:51:25.319379 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p95f9\" (UniqueName: \"kubernetes.io/projected/0aa92f81-6dce-45d4-ad97-f736266bcc69-kube-api-access-p95f9\") pod \"neutron-671e-account-create-update-sgvgw\" (UID: \"0aa92f81-6dce-45d4-ad97-f736266bcc69\") " pod="openstack/neutron-671e-account-create-update-sgvgw" Nov 27 08:51:25 crc kubenswrapper[4971]: I1127 08:51:25.320165 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/558798ff-c172-44e0-b329-79cb68adc93e-operator-scripts\") pod \"neutron-db-create-xscwm\" (UID: \"558798ff-c172-44e0-b329-79cb68adc93e\") " pod="openstack/neutron-db-create-xscwm" Nov 27 08:51:25 crc kubenswrapper[4971]: I1127 08:51:25.339700 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4g7pt\" (UniqueName: \"kubernetes.io/projected/558798ff-c172-44e0-b329-79cb68adc93e-kube-api-access-4g7pt\") pod \"neutron-db-create-xscwm\" (UID: \"558798ff-c172-44e0-b329-79cb68adc93e\") " pod="openstack/neutron-db-create-xscwm" Nov 27 08:51:25 crc kubenswrapper[4971]: I1127 08:51:25.414457 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-xscwm" Nov 27 08:51:25 crc kubenswrapper[4971]: I1127 08:51:25.421730 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p95f9\" (UniqueName: \"kubernetes.io/projected/0aa92f81-6dce-45d4-ad97-f736266bcc69-kube-api-access-p95f9\") pod \"neutron-671e-account-create-update-sgvgw\" (UID: \"0aa92f81-6dce-45d4-ad97-f736266bcc69\") " pod="openstack/neutron-671e-account-create-update-sgvgw" Nov 27 08:51:25 crc kubenswrapper[4971]: I1127 08:51:25.421886 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0aa92f81-6dce-45d4-ad97-f736266bcc69-operator-scripts\") pod \"neutron-671e-account-create-update-sgvgw\" (UID: \"0aa92f81-6dce-45d4-ad97-f736266bcc69\") " pod="openstack/neutron-671e-account-create-update-sgvgw" Nov 27 08:51:25 crc kubenswrapper[4971]: I1127 08:51:25.423283 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0aa92f81-6dce-45d4-ad97-f736266bcc69-operator-scripts\") pod \"neutron-671e-account-create-update-sgvgw\" (UID: \"0aa92f81-6dce-45d4-ad97-f736266bcc69\") " pod="openstack/neutron-671e-account-create-update-sgvgw" Nov 27 08:51:25 crc kubenswrapper[4971]: I1127 08:51:25.443571 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p95f9\" (UniqueName: \"kubernetes.io/projected/0aa92f81-6dce-45d4-ad97-f736266bcc69-kube-api-access-p95f9\") pod \"neutron-671e-account-create-update-sgvgw\" (UID: \"0aa92f81-6dce-45d4-ad97-f736266bcc69\") " pod="openstack/neutron-671e-account-create-update-sgvgw" Nov 27 08:51:25 crc kubenswrapper[4971]: I1127 08:51:25.494584 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-671e-account-create-update-sgvgw" Nov 27 08:51:25 crc kubenswrapper[4971]: I1127 08:51:25.885192 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-xscwm"] Nov 27 08:51:25 crc kubenswrapper[4971]: W1127 08:51:25.890077 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod558798ff_c172_44e0_b329_79cb68adc93e.slice/crio-4911956b173dfa3eb6df88c1987f1ed913a2015aab9399412d0cc3777aee2a31 WatchSource:0}: Error finding container 4911956b173dfa3eb6df88c1987f1ed913a2015aab9399412d0cc3777aee2a31: Status 404 returned error can't find the container with id 4911956b173dfa3eb6df88c1987f1ed913a2015aab9399412d0cc3777aee2a31 Nov 27 08:51:26 crc kubenswrapper[4971]: I1127 08:51:26.016259 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-671e-account-create-update-sgvgw"] Nov 27 08:51:26 crc kubenswrapper[4971]: W1127 08:51:26.020577 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0aa92f81_6dce_45d4_ad97_f736266bcc69.slice/crio-ed1ad5852bb400d859e439e07cc6aa61616af0104d96c282d456a526598638f8 WatchSource:0}: Error finding container ed1ad5852bb400d859e439e07cc6aa61616af0104d96c282d456a526598638f8: Status 404 returned error can't find the container with id ed1ad5852bb400d859e439e07cc6aa61616af0104d96c282d456a526598638f8 Nov 27 08:51:26 crc kubenswrapper[4971]: I1127 08:51:26.546012 4971 generic.go:334] "Generic (PLEG): container finished" podID="558798ff-c172-44e0-b329-79cb68adc93e" containerID="8aa735447b4a30617a9838859f04b8951ee19a5830412d9ee16084071d62a68c" exitCode=0 Nov 27 08:51:26 crc kubenswrapper[4971]: I1127 08:51:26.546124 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-xscwm" event={"ID":"558798ff-c172-44e0-b329-79cb68adc93e","Type":"ContainerDied","Data":"8aa735447b4a30617a9838859f04b8951ee19a5830412d9ee16084071d62a68c"} Nov 27 08:51:26 crc kubenswrapper[4971]: I1127 08:51:26.546189 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-xscwm" event={"ID":"558798ff-c172-44e0-b329-79cb68adc93e","Type":"ContainerStarted","Data":"4911956b173dfa3eb6df88c1987f1ed913a2015aab9399412d0cc3777aee2a31"} Nov 27 08:51:26 crc kubenswrapper[4971]: I1127 08:51:26.547409 4971 generic.go:334] "Generic (PLEG): container finished" podID="0aa92f81-6dce-45d4-ad97-f736266bcc69" containerID="abf97cb26d9d10cfa3a3f85e66a7fda39253ee4584313278aa620c6a419d6a2a" exitCode=0 Nov 27 08:51:26 crc kubenswrapper[4971]: I1127 08:51:26.547443 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-671e-account-create-update-sgvgw" event={"ID":"0aa92f81-6dce-45d4-ad97-f736266bcc69","Type":"ContainerDied","Data":"abf97cb26d9d10cfa3a3f85e66a7fda39253ee4584313278aa620c6a419d6a2a"} Nov 27 08:51:26 crc kubenswrapper[4971]: I1127 08:51:26.547458 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-671e-account-create-update-sgvgw" event={"ID":"0aa92f81-6dce-45d4-ad97-f736266bcc69","Type":"ContainerStarted","Data":"ed1ad5852bb400d859e439e07cc6aa61616af0104d96c282d456a526598638f8"} Nov 27 08:51:27 crc kubenswrapper[4971]: I1127 08:51:27.964576 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-671e-account-create-update-sgvgw" Nov 27 08:51:27 crc kubenswrapper[4971]: I1127 08:51:27.975461 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-xscwm" Nov 27 08:51:28 crc kubenswrapper[4971]: I1127 08:51:28.074738 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4g7pt\" (UniqueName: \"kubernetes.io/projected/558798ff-c172-44e0-b329-79cb68adc93e-kube-api-access-4g7pt\") pod \"558798ff-c172-44e0-b329-79cb68adc93e\" (UID: \"558798ff-c172-44e0-b329-79cb68adc93e\") " Nov 27 08:51:28 crc kubenswrapper[4971]: I1127 08:51:28.074852 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/558798ff-c172-44e0-b329-79cb68adc93e-operator-scripts\") pod \"558798ff-c172-44e0-b329-79cb68adc93e\" (UID: \"558798ff-c172-44e0-b329-79cb68adc93e\") " Nov 27 08:51:28 crc kubenswrapper[4971]: I1127 08:51:28.074907 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0aa92f81-6dce-45d4-ad97-f736266bcc69-operator-scripts\") pod \"0aa92f81-6dce-45d4-ad97-f736266bcc69\" (UID: \"0aa92f81-6dce-45d4-ad97-f736266bcc69\") " Nov 27 08:51:28 crc kubenswrapper[4971]: I1127 08:51:28.074996 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p95f9\" (UniqueName: \"kubernetes.io/projected/0aa92f81-6dce-45d4-ad97-f736266bcc69-kube-api-access-p95f9\") pod \"0aa92f81-6dce-45d4-ad97-f736266bcc69\" (UID: \"0aa92f81-6dce-45d4-ad97-f736266bcc69\") " Nov 27 08:51:28 crc kubenswrapper[4971]: I1127 08:51:28.075596 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/558798ff-c172-44e0-b329-79cb68adc93e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "558798ff-c172-44e0-b329-79cb68adc93e" (UID: "558798ff-c172-44e0-b329-79cb68adc93e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:51:28 crc kubenswrapper[4971]: I1127 08:51:28.075749 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0aa92f81-6dce-45d4-ad97-f736266bcc69-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0aa92f81-6dce-45d4-ad97-f736266bcc69" (UID: "0aa92f81-6dce-45d4-ad97-f736266bcc69"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:51:28 crc kubenswrapper[4971]: I1127 08:51:28.080711 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/558798ff-c172-44e0-b329-79cb68adc93e-kube-api-access-4g7pt" (OuterVolumeSpecName: "kube-api-access-4g7pt") pod "558798ff-c172-44e0-b329-79cb68adc93e" (UID: "558798ff-c172-44e0-b329-79cb68adc93e"). InnerVolumeSpecName "kube-api-access-4g7pt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:51:28 crc kubenswrapper[4971]: I1127 08:51:28.080990 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0aa92f81-6dce-45d4-ad97-f736266bcc69-kube-api-access-p95f9" (OuterVolumeSpecName: "kube-api-access-p95f9") pod "0aa92f81-6dce-45d4-ad97-f736266bcc69" (UID: "0aa92f81-6dce-45d4-ad97-f736266bcc69"). InnerVolumeSpecName "kube-api-access-p95f9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:51:28 crc kubenswrapper[4971]: I1127 08:51:28.177942 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p95f9\" (UniqueName: \"kubernetes.io/projected/0aa92f81-6dce-45d4-ad97-f736266bcc69-kube-api-access-p95f9\") on node \"crc\" DevicePath \"\"" Nov 27 08:51:28 crc kubenswrapper[4971]: I1127 08:51:28.177983 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4g7pt\" (UniqueName: \"kubernetes.io/projected/558798ff-c172-44e0-b329-79cb68adc93e-kube-api-access-4g7pt\") on node \"crc\" DevicePath \"\"" Nov 27 08:51:28 crc kubenswrapper[4971]: I1127 08:51:28.177997 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/558798ff-c172-44e0-b329-79cb68adc93e-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 08:51:28 crc kubenswrapper[4971]: I1127 08:51:28.178010 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0aa92f81-6dce-45d4-ad97-f736266bcc69-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 08:51:28 crc kubenswrapper[4971]: I1127 08:51:28.569453 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-671e-account-create-update-sgvgw" event={"ID":"0aa92f81-6dce-45d4-ad97-f736266bcc69","Type":"ContainerDied","Data":"ed1ad5852bb400d859e439e07cc6aa61616af0104d96c282d456a526598638f8"} Nov 27 08:51:28 crc kubenswrapper[4971]: I1127 08:51:28.569515 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ed1ad5852bb400d859e439e07cc6aa61616af0104d96c282d456a526598638f8" Nov 27 08:51:28 crc kubenswrapper[4971]: I1127 08:51:28.569919 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-671e-account-create-update-sgvgw" Nov 27 08:51:28 crc kubenswrapper[4971]: I1127 08:51:28.570809 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-xscwm" event={"ID":"558798ff-c172-44e0-b329-79cb68adc93e","Type":"ContainerDied","Data":"4911956b173dfa3eb6df88c1987f1ed913a2015aab9399412d0cc3777aee2a31"} Nov 27 08:51:28 crc kubenswrapper[4971]: I1127 08:51:28.570839 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4911956b173dfa3eb6df88c1987f1ed913a2015aab9399412d0cc3777aee2a31" Nov 27 08:51:28 crc kubenswrapper[4971]: I1127 08:51:28.570908 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-xscwm" Nov 27 08:51:30 crc kubenswrapper[4971]: I1127 08:51:30.418647 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-2g9m5"] Nov 27 08:51:30 crc kubenswrapper[4971]: E1127 08:51:30.419403 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0aa92f81-6dce-45d4-ad97-f736266bcc69" containerName="mariadb-account-create-update" Nov 27 08:51:30 crc kubenswrapper[4971]: I1127 08:51:30.419418 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="0aa92f81-6dce-45d4-ad97-f736266bcc69" containerName="mariadb-account-create-update" Nov 27 08:51:30 crc kubenswrapper[4971]: E1127 08:51:30.419445 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="558798ff-c172-44e0-b329-79cb68adc93e" containerName="mariadb-database-create" Nov 27 08:51:30 crc kubenswrapper[4971]: I1127 08:51:30.419452 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="558798ff-c172-44e0-b329-79cb68adc93e" containerName="mariadb-database-create" Nov 27 08:51:30 crc kubenswrapper[4971]: I1127 08:51:30.419636 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="558798ff-c172-44e0-b329-79cb68adc93e" containerName="mariadb-database-create" Nov 27 08:51:30 crc kubenswrapper[4971]: I1127 08:51:30.419655 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="0aa92f81-6dce-45d4-ad97-f736266bcc69" containerName="mariadb-account-create-update" Nov 27 08:51:30 crc kubenswrapper[4971]: I1127 08:51:30.420377 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-2g9m5" Nov 27 08:51:30 crc kubenswrapper[4971]: I1127 08:51:30.424023 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Nov 27 08:51:30 crc kubenswrapper[4971]: I1127 08:51:30.424076 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-v576j" Nov 27 08:51:30 crc kubenswrapper[4971]: I1127 08:51:30.424894 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Nov 27 08:51:30 crc kubenswrapper[4971]: I1127 08:51:30.432515 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-2g9m5"] Nov 27 08:51:30 crc kubenswrapper[4971]: I1127 08:51:30.535698 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8h5pd\" (UniqueName: \"kubernetes.io/projected/0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845-kube-api-access-8h5pd\") pod \"neutron-db-sync-2g9m5\" (UID: \"0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845\") " pod="openstack/neutron-db-sync-2g9m5" Nov 27 08:51:30 crc kubenswrapper[4971]: I1127 08:51:30.535796 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845-config\") pod \"neutron-db-sync-2g9m5\" (UID: \"0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845\") " pod="openstack/neutron-db-sync-2g9m5" Nov 27 08:51:30 crc kubenswrapper[4971]: I1127 08:51:30.535822 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845-combined-ca-bundle\") pod \"neutron-db-sync-2g9m5\" (UID: \"0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845\") " pod="openstack/neutron-db-sync-2g9m5" Nov 27 08:51:30 crc kubenswrapper[4971]: I1127 08:51:30.637597 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8h5pd\" (UniqueName: \"kubernetes.io/projected/0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845-kube-api-access-8h5pd\") pod \"neutron-db-sync-2g9m5\" (UID: \"0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845\") " pod="openstack/neutron-db-sync-2g9m5" Nov 27 08:51:30 crc kubenswrapper[4971]: I1127 08:51:30.637723 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845-config\") pod \"neutron-db-sync-2g9m5\" (UID: \"0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845\") " pod="openstack/neutron-db-sync-2g9m5" Nov 27 08:51:30 crc kubenswrapper[4971]: I1127 08:51:30.637764 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845-combined-ca-bundle\") pod \"neutron-db-sync-2g9m5\" (UID: \"0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845\") " pod="openstack/neutron-db-sync-2g9m5" Nov 27 08:51:30 crc kubenswrapper[4971]: I1127 08:51:30.646762 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845-config\") pod \"neutron-db-sync-2g9m5\" (UID: \"0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845\") " pod="openstack/neutron-db-sync-2g9m5" Nov 27 08:51:30 crc kubenswrapper[4971]: I1127 08:51:30.660588 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845-combined-ca-bundle\") pod \"neutron-db-sync-2g9m5\" (UID: \"0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845\") " pod="openstack/neutron-db-sync-2g9m5" Nov 27 08:51:30 crc kubenswrapper[4971]: I1127 08:51:30.661379 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8h5pd\" (UniqueName: \"kubernetes.io/projected/0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845-kube-api-access-8h5pd\") pod \"neutron-db-sync-2g9m5\" (UID: \"0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845\") " pod="openstack/neutron-db-sync-2g9m5" Nov 27 08:51:30 crc kubenswrapper[4971]: I1127 08:51:30.739509 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-2g9m5" Nov 27 08:51:31 crc kubenswrapper[4971]: I1127 08:51:31.209775 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-2g9m5"] Nov 27 08:51:31 crc kubenswrapper[4971]: W1127 08:51:31.213570 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0b8c8f77_3f99_4f77_bbbd_bb90d3ad8845.slice/crio-6800b2786f451768fb2db33ba3e07c62945cdb14bd91a5113dfc0aee6ce91e92 WatchSource:0}: Error finding container 6800b2786f451768fb2db33ba3e07c62945cdb14bd91a5113dfc0aee6ce91e92: Status 404 returned error can't find the container with id 6800b2786f451768fb2db33ba3e07c62945cdb14bd91a5113dfc0aee6ce91e92 Nov 27 08:51:31 crc kubenswrapper[4971]: I1127 08:51:31.600433 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-2g9m5" event={"ID":"0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845","Type":"ContainerStarted","Data":"9169a0b52d9e84b87d170788f364b54a63a5512693ad23c3f1d33a5fae879542"} Nov 27 08:51:31 crc kubenswrapper[4971]: I1127 08:51:31.600497 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-2g9m5" event={"ID":"0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845","Type":"ContainerStarted","Data":"6800b2786f451768fb2db33ba3e07c62945cdb14bd91a5113dfc0aee6ce91e92"} Nov 27 08:51:31 crc kubenswrapper[4971]: I1127 08:51:31.636273 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-2g9m5" podStartSLOduration=1.636233686 podStartE2EDuration="1.636233686s" podCreationTimestamp="2025-11-27 08:51:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:51:31.617648754 +0000 UTC m=+7129.809692682" watchObservedRunningTime="2025-11-27 08:51:31.636233686 +0000 UTC m=+7129.828277644" Nov 27 08:51:32 crc kubenswrapper[4971]: I1127 08:51:32.559713 4971 scope.go:117] "RemoveContainer" containerID="503fed002d2daf941b71890d178a428a5c3cffb1b48a2221858dc92e28178dc4" Nov 27 08:51:32 crc kubenswrapper[4971]: E1127 08:51:32.560626 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:51:35 crc kubenswrapper[4971]: I1127 08:51:35.640520 4971 generic.go:334] "Generic (PLEG): container finished" podID="0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845" containerID="9169a0b52d9e84b87d170788f364b54a63a5512693ad23c3f1d33a5fae879542" exitCode=0 Nov 27 08:51:35 crc kubenswrapper[4971]: I1127 08:51:35.640668 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-2g9m5" event={"ID":"0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845","Type":"ContainerDied","Data":"9169a0b52d9e84b87d170788f364b54a63a5512693ad23c3f1d33a5fae879542"} Nov 27 08:51:36 crc kubenswrapper[4971]: I1127 08:51:36.958887 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-2g9m5" Nov 27 08:51:37 crc kubenswrapper[4971]: I1127 08:51:37.050502 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845-config\") pod \"0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845\" (UID: \"0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845\") " Nov 27 08:51:37 crc kubenswrapper[4971]: I1127 08:51:37.050633 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8h5pd\" (UniqueName: \"kubernetes.io/projected/0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845-kube-api-access-8h5pd\") pod \"0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845\" (UID: \"0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845\") " Nov 27 08:51:37 crc kubenswrapper[4971]: I1127 08:51:37.050711 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845-combined-ca-bundle\") pod \"0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845\" (UID: \"0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845\") " Nov 27 08:51:37 crc kubenswrapper[4971]: I1127 08:51:37.056340 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845-kube-api-access-8h5pd" (OuterVolumeSpecName: "kube-api-access-8h5pd") pod "0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845" (UID: "0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845"). InnerVolumeSpecName "kube-api-access-8h5pd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:51:37 crc kubenswrapper[4971]: I1127 08:51:37.073335 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845" (UID: "0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:51:37 crc kubenswrapper[4971]: I1127 08:51:37.074001 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845-config" (OuterVolumeSpecName: "config") pod "0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845" (UID: "0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:51:37 crc kubenswrapper[4971]: I1127 08:51:37.152425 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845-config\") on node \"crc\" DevicePath \"\"" Nov 27 08:51:37 crc kubenswrapper[4971]: I1127 08:51:37.152473 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8h5pd\" (UniqueName: \"kubernetes.io/projected/0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845-kube-api-access-8h5pd\") on node \"crc\" DevicePath \"\"" Nov 27 08:51:37 crc kubenswrapper[4971]: I1127 08:51:37.152510 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 08:51:37 crc kubenswrapper[4971]: I1127 08:51:37.661803 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-2g9m5" event={"ID":"0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845","Type":"ContainerDied","Data":"6800b2786f451768fb2db33ba3e07c62945cdb14bd91a5113dfc0aee6ce91e92"} Nov 27 08:51:37 crc kubenswrapper[4971]: I1127 08:51:37.661853 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6800b2786f451768fb2db33ba3e07c62945cdb14bd91a5113dfc0aee6ce91e92" Nov 27 08:51:37 crc kubenswrapper[4971]: I1127 08:51:37.661872 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-2g9m5" Nov 27 08:51:37 crc kubenswrapper[4971]: I1127 08:51:37.886832 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8bddfb579-775j9"] Nov 27 08:51:37 crc kubenswrapper[4971]: E1127 08:51:37.887247 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845" containerName="neutron-db-sync" Nov 27 08:51:37 crc kubenswrapper[4971]: I1127 08:51:37.887272 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845" containerName="neutron-db-sync" Nov 27 08:51:37 crc kubenswrapper[4971]: I1127 08:51:37.887484 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845" containerName="neutron-db-sync" Nov 27 08:51:37 crc kubenswrapper[4971]: I1127 08:51:37.888435 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8bddfb579-775j9" Nov 27 08:51:37 crc kubenswrapper[4971]: I1127 08:51:37.927603 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8bddfb579-775j9"] Nov 27 08:51:37 crc kubenswrapper[4971]: I1127 08:51:37.966663 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/70bc4fa8-853f-476c-aa6a-50aa1e80391f-ovsdbserver-nb\") pod \"dnsmasq-dns-8bddfb579-775j9\" (UID: \"70bc4fa8-853f-476c-aa6a-50aa1e80391f\") " pod="openstack/dnsmasq-dns-8bddfb579-775j9" Nov 27 08:51:37 crc kubenswrapper[4971]: I1127 08:51:37.966766 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/70bc4fa8-853f-476c-aa6a-50aa1e80391f-dns-svc\") pod \"dnsmasq-dns-8bddfb579-775j9\" (UID: \"70bc4fa8-853f-476c-aa6a-50aa1e80391f\") " pod="openstack/dnsmasq-dns-8bddfb579-775j9" Nov 27 08:51:37 crc kubenswrapper[4971]: I1127 08:51:37.966851 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70bc4fa8-853f-476c-aa6a-50aa1e80391f-config\") pod \"dnsmasq-dns-8bddfb579-775j9\" (UID: \"70bc4fa8-853f-476c-aa6a-50aa1e80391f\") " pod="openstack/dnsmasq-dns-8bddfb579-775j9" Nov 27 08:51:37 crc kubenswrapper[4971]: I1127 08:51:37.966986 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/70bc4fa8-853f-476c-aa6a-50aa1e80391f-ovsdbserver-sb\") pod \"dnsmasq-dns-8bddfb579-775j9\" (UID: \"70bc4fa8-853f-476c-aa6a-50aa1e80391f\") " pod="openstack/dnsmasq-dns-8bddfb579-775j9" Nov 27 08:51:37 crc kubenswrapper[4971]: I1127 08:51:37.967019 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ksxqv\" (UniqueName: \"kubernetes.io/projected/70bc4fa8-853f-476c-aa6a-50aa1e80391f-kube-api-access-ksxqv\") pod \"dnsmasq-dns-8bddfb579-775j9\" (UID: \"70bc4fa8-853f-476c-aa6a-50aa1e80391f\") " pod="openstack/dnsmasq-dns-8bddfb579-775j9" Nov 27 08:51:38 crc kubenswrapper[4971]: I1127 08:51:38.069282 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ksxqv\" (UniqueName: \"kubernetes.io/projected/70bc4fa8-853f-476c-aa6a-50aa1e80391f-kube-api-access-ksxqv\") pod \"dnsmasq-dns-8bddfb579-775j9\" (UID: \"70bc4fa8-853f-476c-aa6a-50aa1e80391f\") " pod="openstack/dnsmasq-dns-8bddfb579-775j9" Nov 27 08:51:38 crc kubenswrapper[4971]: I1127 08:51:38.069711 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/70bc4fa8-853f-476c-aa6a-50aa1e80391f-ovsdbserver-nb\") pod \"dnsmasq-dns-8bddfb579-775j9\" (UID: \"70bc4fa8-853f-476c-aa6a-50aa1e80391f\") " pod="openstack/dnsmasq-dns-8bddfb579-775j9" Nov 27 08:51:38 crc kubenswrapper[4971]: I1127 08:51:38.071285 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/70bc4fa8-853f-476c-aa6a-50aa1e80391f-ovsdbserver-nb\") pod \"dnsmasq-dns-8bddfb579-775j9\" (UID: \"70bc4fa8-853f-476c-aa6a-50aa1e80391f\") " pod="openstack/dnsmasq-dns-8bddfb579-775j9" Nov 27 08:51:38 crc kubenswrapper[4971]: I1127 08:51:38.072328 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/70bc4fa8-853f-476c-aa6a-50aa1e80391f-dns-svc\") pod \"dnsmasq-dns-8bddfb579-775j9\" (UID: \"70bc4fa8-853f-476c-aa6a-50aa1e80391f\") " pod="openstack/dnsmasq-dns-8bddfb579-775j9" Nov 27 08:51:38 crc kubenswrapper[4971]: I1127 08:51:38.072462 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/70bc4fa8-853f-476c-aa6a-50aa1e80391f-dns-svc\") pod \"dnsmasq-dns-8bddfb579-775j9\" (UID: \"70bc4fa8-853f-476c-aa6a-50aa1e80391f\") " pod="openstack/dnsmasq-dns-8bddfb579-775j9" Nov 27 08:51:38 crc kubenswrapper[4971]: I1127 08:51:38.073747 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70bc4fa8-853f-476c-aa6a-50aa1e80391f-config\") pod \"dnsmasq-dns-8bddfb579-775j9\" (UID: \"70bc4fa8-853f-476c-aa6a-50aa1e80391f\") " pod="openstack/dnsmasq-dns-8bddfb579-775j9" Nov 27 08:51:38 crc kubenswrapper[4971]: I1127 08:51:38.072902 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70bc4fa8-853f-476c-aa6a-50aa1e80391f-config\") pod \"dnsmasq-dns-8bddfb579-775j9\" (UID: \"70bc4fa8-853f-476c-aa6a-50aa1e80391f\") " pod="openstack/dnsmasq-dns-8bddfb579-775j9" Nov 27 08:51:38 crc kubenswrapper[4971]: I1127 08:51:38.073900 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/70bc4fa8-853f-476c-aa6a-50aa1e80391f-ovsdbserver-sb\") pod \"dnsmasq-dns-8bddfb579-775j9\" (UID: \"70bc4fa8-853f-476c-aa6a-50aa1e80391f\") " pod="openstack/dnsmasq-dns-8bddfb579-775j9" Nov 27 08:51:38 crc kubenswrapper[4971]: I1127 08:51:38.075126 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/70bc4fa8-853f-476c-aa6a-50aa1e80391f-ovsdbserver-sb\") pod \"dnsmasq-dns-8bddfb579-775j9\" (UID: \"70bc4fa8-853f-476c-aa6a-50aa1e80391f\") " pod="openstack/dnsmasq-dns-8bddfb579-775j9" Nov 27 08:51:38 crc kubenswrapper[4971]: I1127 08:51:38.094033 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ksxqv\" (UniqueName: \"kubernetes.io/projected/70bc4fa8-853f-476c-aa6a-50aa1e80391f-kube-api-access-ksxqv\") pod \"dnsmasq-dns-8bddfb579-775j9\" (UID: \"70bc4fa8-853f-476c-aa6a-50aa1e80391f\") " pod="openstack/dnsmasq-dns-8bddfb579-775j9" Nov 27 08:51:38 crc kubenswrapper[4971]: I1127 08:51:38.181834 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5fd8b7cd95-lc5lp"] Nov 27 08:51:38 crc kubenswrapper[4971]: I1127 08:51:38.183559 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5fd8b7cd95-lc5lp" Nov 27 08:51:38 crc kubenswrapper[4971]: I1127 08:51:38.186469 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-v576j" Nov 27 08:51:38 crc kubenswrapper[4971]: I1127 08:51:38.186815 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Nov 27 08:51:38 crc kubenswrapper[4971]: I1127 08:51:38.186954 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Nov 27 08:51:38 crc kubenswrapper[4971]: I1127 08:51:38.206783 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5fd8b7cd95-lc5lp"] Nov 27 08:51:38 crc kubenswrapper[4971]: I1127 08:51:38.215891 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8bddfb579-775j9" Nov 27 08:51:38 crc kubenswrapper[4971]: I1127 08:51:38.277552 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/289e2425-5bea-432f-bb53-37fbd0e796b5-combined-ca-bundle\") pod \"neutron-5fd8b7cd95-lc5lp\" (UID: \"289e2425-5bea-432f-bb53-37fbd0e796b5\") " pod="openstack/neutron-5fd8b7cd95-lc5lp" Nov 27 08:51:38 crc kubenswrapper[4971]: I1127 08:51:38.277604 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k8whn\" (UniqueName: \"kubernetes.io/projected/289e2425-5bea-432f-bb53-37fbd0e796b5-kube-api-access-k8whn\") pod \"neutron-5fd8b7cd95-lc5lp\" (UID: \"289e2425-5bea-432f-bb53-37fbd0e796b5\") " pod="openstack/neutron-5fd8b7cd95-lc5lp" Nov 27 08:51:38 crc kubenswrapper[4971]: I1127 08:51:38.277668 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/289e2425-5bea-432f-bb53-37fbd0e796b5-config\") pod \"neutron-5fd8b7cd95-lc5lp\" (UID: \"289e2425-5bea-432f-bb53-37fbd0e796b5\") " pod="openstack/neutron-5fd8b7cd95-lc5lp" Nov 27 08:51:38 crc kubenswrapper[4971]: I1127 08:51:38.277730 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/289e2425-5bea-432f-bb53-37fbd0e796b5-httpd-config\") pod \"neutron-5fd8b7cd95-lc5lp\" (UID: \"289e2425-5bea-432f-bb53-37fbd0e796b5\") " pod="openstack/neutron-5fd8b7cd95-lc5lp" Nov 27 08:51:38 crc kubenswrapper[4971]: I1127 08:51:38.379492 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/289e2425-5bea-432f-bb53-37fbd0e796b5-httpd-config\") pod \"neutron-5fd8b7cd95-lc5lp\" (UID: \"289e2425-5bea-432f-bb53-37fbd0e796b5\") " pod="openstack/neutron-5fd8b7cd95-lc5lp" Nov 27 08:51:38 crc kubenswrapper[4971]: I1127 08:51:38.379593 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/289e2425-5bea-432f-bb53-37fbd0e796b5-combined-ca-bundle\") pod \"neutron-5fd8b7cd95-lc5lp\" (UID: \"289e2425-5bea-432f-bb53-37fbd0e796b5\") " pod="openstack/neutron-5fd8b7cd95-lc5lp" Nov 27 08:51:38 crc kubenswrapper[4971]: I1127 08:51:38.379616 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k8whn\" (UniqueName: \"kubernetes.io/projected/289e2425-5bea-432f-bb53-37fbd0e796b5-kube-api-access-k8whn\") pod \"neutron-5fd8b7cd95-lc5lp\" (UID: \"289e2425-5bea-432f-bb53-37fbd0e796b5\") " pod="openstack/neutron-5fd8b7cd95-lc5lp" Nov 27 08:51:38 crc kubenswrapper[4971]: I1127 08:51:38.379678 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/289e2425-5bea-432f-bb53-37fbd0e796b5-config\") pod \"neutron-5fd8b7cd95-lc5lp\" (UID: \"289e2425-5bea-432f-bb53-37fbd0e796b5\") " pod="openstack/neutron-5fd8b7cd95-lc5lp" Nov 27 08:51:38 crc kubenswrapper[4971]: I1127 08:51:38.387573 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/289e2425-5bea-432f-bb53-37fbd0e796b5-combined-ca-bundle\") pod \"neutron-5fd8b7cd95-lc5lp\" (UID: \"289e2425-5bea-432f-bb53-37fbd0e796b5\") " pod="openstack/neutron-5fd8b7cd95-lc5lp" Nov 27 08:51:38 crc kubenswrapper[4971]: I1127 08:51:38.400548 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/289e2425-5bea-432f-bb53-37fbd0e796b5-config\") pod \"neutron-5fd8b7cd95-lc5lp\" (UID: \"289e2425-5bea-432f-bb53-37fbd0e796b5\") " pod="openstack/neutron-5fd8b7cd95-lc5lp" Nov 27 08:51:38 crc kubenswrapper[4971]: I1127 08:51:38.404243 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/289e2425-5bea-432f-bb53-37fbd0e796b5-httpd-config\") pod \"neutron-5fd8b7cd95-lc5lp\" (UID: \"289e2425-5bea-432f-bb53-37fbd0e796b5\") " pod="openstack/neutron-5fd8b7cd95-lc5lp" Nov 27 08:51:38 crc kubenswrapper[4971]: I1127 08:51:38.431824 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k8whn\" (UniqueName: \"kubernetes.io/projected/289e2425-5bea-432f-bb53-37fbd0e796b5-kube-api-access-k8whn\") pod \"neutron-5fd8b7cd95-lc5lp\" (UID: \"289e2425-5bea-432f-bb53-37fbd0e796b5\") " pod="openstack/neutron-5fd8b7cd95-lc5lp" Nov 27 08:51:38 crc kubenswrapper[4971]: I1127 08:51:38.498863 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5fd8b7cd95-lc5lp" Nov 27 08:51:38 crc kubenswrapper[4971]: I1127 08:51:38.775021 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8bddfb579-775j9"] Nov 27 08:51:39 crc kubenswrapper[4971]: I1127 08:51:39.099435 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5fd8b7cd95-lc5lp"] Nov 27 08:51:39 crc kubenswrapper[4971]: I1127 08:51:39.679146 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5fd8b7cd95-lc5lp" event={"ID":"289e2425-5bea-432f-bb53-37fbd0e796b5","Type":"ContainerStarted","Data":"b018c70df439fba523f2633aa61eb56abb65a50f41c7020ceee9f7ad3b41b48a"} Nov 27 08:51:39 crc kubenswrapper[4971]: I1127 08:51:39.679797 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5fd8b7cd95-lc5lp" event={"ID":"289e2425-5bea-432f-bb53-37fbd0e796b5","Type":"ContainerStarted","Data":"73ac9340246cfbfe9ee9a23655212714454b1f6f136293e41692a368a49ff22b"} Nov 27 08:51:39 crc kubenswrapper[4971]: I1127 08:51:39.679883 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5fd8b7cd95-lc5lp" event={"ID":"289e2425-5bea-432f-bb53-37fbd0e796b5","Type":"ContainerStarted","Data":"0b30257738b05eef52d0c24d42eb3dd34d458d55e62fa54c5ac2e93a39314dca"} Nov 27 08:51:39 crc kubenswrapper[4971]: I1127 08:51:39.680002 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-5fd8b7cd95-lc5lp" Nov 27 08:51:39 crc kubenswrapper[4971]: I1127 08:51:39.681149 4971 generic.go:334] "Generic (PLEG): container finished" podID="70bc4fa8-853f-476c-aa6a-50aa1e80391f" containerID="335b391eb5641133661ad248c6db06c4ae8a7d5b1da973f85bbccfa52de515cf" exitCode=0 Nov 27 08:51:39 crc kubenswrapper[4971]: I1127 08:51:39.681249 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8bddfb579-775j9" event={"ID":"70bc4fa8-853f-476c-aa6a-50aa1e80391f","Type":"ContainerDied","Data":"335b391eb5641133661ad248c6db06c4ae8a7d5b1da973f85bbccfa52de515cf"} Nov 27 08:51:39 crc kubenswrapper[4971]: I1127 08:51:39.681313 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8bddfb579-775j9" event={"ID":"70bc4fa8-853f-476c-aa6a-50aa1e80391f","Type":"ContainerStarted","Data":"9fb908e34809f98d862e5c540d572570c7c9c460d73e45987643dcd9267e923f"} Nov 27 08:51:39 crc kubenswrapper[4971]: I1127 08:51:39.714320 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-5fd8b7cd95-lc5lp" podStartSLOduration=1.7142936359999998 podStartE2EDuration="1.714293636s" podCreationTimestamp="2025-11-27 08:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:51:39.709044655 +0000 UTC m=+7137.901088573" watchObservedRunningTime="2025-11-27 08:51:39.714293636 +0000 UTC m=+7137.906337554" Nov 27 08:51:40 crc kubenswrapper[4971]: I1127 08:51:40.692936 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8bddfb579-775j9" event={"ID":"70bc4fa8-853f-476c-aa6a-50aa1e80391f","Type":"ContainerStarted","Data":"331afbf72ff0bf220438d6c691d8c98abf9046d9d106413b7716af7f23f67e5c"} Nov 27 08:51:40 crc kubenswrapper[4971]: I1127 08:51:40.718920 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8bddfb579-775j9" podStartSLOduration=3.718899582 podStartE2EDuration="3.718899582s" podCreationTimestamp="2025-11-27 08:51:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:51:40.711399958 +0000 UTC m=+7138.903443886" watchObservedRunningTime="2025-11-27 08:51:40.718899582 +0000 UTC m=+7138.910943500" Nov 27 08:51:41 crc kubenswrapper[4971]: I1127 08:51:41.703171 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8bddfb579-775j9" Nov 27 08:51:44 crc kubenswrapper[4971]: I1127 08:51:44.550904 4971 scope.go:117] "RemoveContainer" containerID="503fed002d2daf941b71890d178a428a5c3cffb1b48a2221858dc92e28178dc4" Nov 27 08:51:44 crc kubenswrapper[4971]: E1127 08:51:44.552872 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:51:48 crc kubenswrapper[4971]: I1127 08:51:48.218878 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8bddfb579-775j9" Nov 27 08:51:48 crc kubenswrapper[4971]: I1127 08:51:48.289427 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-54dd4f7d4c-8t54q"] Nov 27 08:51:48 crc kubenswrapper[4971]: I1127 08:51:48.289845 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-54dd4f7d4c-8t54q" podUID="44034162-7899-44d2-b36b-abedea905618" containerName="dnsmasq-dns" containerID="cri-o://b091ee6d2d6d57be6c7898aa901d0dc3f9573f551acb683b9762a407332f7cff" gracePeriod=10 Nov 27 08:51:48 crc kubenswrapper[4971]: I1127 08:51:48.859164 4971 generic.go:334] "Generic (PLEG): container finished" podID="44034162-7899-44d2-b36b-abedea905618" containerID="b091ee6d2d6d57be6c7898aa901d0dc3f9573f551acb683b9762a407332f7cff" exitCode=0 Nov 27 08:51:48 crc kubenswrapper[4971]: I1127 08:51:48.859619 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54dd4f7d4c-8t54q" event={"ID":"44034162-7899-44d2-b36b-abedea905618","Type":"ContainerDied","Data":"b091ee6d2d6d57be6c7898aa901d0dc3f9573f551acb683b9762a407332f7cff"} Nov 27 08:51:48 crc kubenswrapper[4971]: I1127 08:51:48.871614 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54dd4f7d4c-8t54q" Nov 27 08:51:49 crc kubenswrapper[4971]: I1127 08:51:49.018370 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/44034162-7899-44d2-b36b-abedea905618-dns-svc\") pod \"44034162-7899-44d2-b36b-abedea905618\" (UID: \"44034162-7899-44d2-b36b-abedea905618\") " Nov 27 08:51:49 crc kubenswrapper[4971]: I1127 08:51:49.018462 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/44034162-7899-44d2-b36b-abedea905618-ovsdbserver-sb\") pod \"44034162-7899-44d2-b36b-abedea905618\" (UID: \"44034162-7899-44d2-b36b-abedea905618\") " Nov 27 08:51:49 crc kubenswrapper[4971]: I1127 08:51:49.018525 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bdlcp\" (UniqueName: \"kubernetes.io/projected/44034162-7899-44d2-b36b-abedea905618-kube-api-access-bdlcp\") pod \"44034162-7899-44d2-b36b-abedea905618\" (UID: \"44034162-7899-44d2-b36b-abedea905618\") " Nov 27 08:51:49 crc kubenswrapper[4971]: I1127 08:51:49.018706 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/44034162-7899-44d2-b36b-abedea905618-ovsdbserver-nb\") pod \"44034162-7899-44d2-b36b-abedea905618\" (UID: \"44034162-7899-44d2-b36b-abedea905618\") " Nov 27 08:51:49 crc kubenswrapper[4971]: I1127 08:51:49.018734 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/44034162-7899-44d2-b36b-abedea905618-config\") pod \"44034162-7899-44d2-b36b-abedea905618\" (UID: \"44034162-7899-44d2-b36b-abedea905618\") " Nov 27 08:51:49 crc kubenswrapper[4971]: I1127 08:51:49.030331 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44034162-7899-44d2-b36b-abedea905618-kube-api-access-bdlcp" (OuterVolumeSpecName: "kube-api-access-bdlcp") pod "44034162-7899-44d2-b36b-abedea905618" (UID: "44034162-7899-44d2-b36b-abedea905618"). InnerVolumeSpecName "kube-api-access-bdlcp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:51:49 crc kubenswrapper[4971]: I1127 08:51:49.067979 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/44034162-7899-44d2-b36b-abedea905618-config" (OuterVolumeSpecName: "config") pod "44034162-7899-44d2-b36b-abedea905618" (UID: "44034162-7899-44d2-b36b-abedea905618"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:51:49 crc kubenswrapper[4971]: I1127 08:51:49.068036 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/44034162-7899-44d2-b36b-abedea905618-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "44034162-7899-44d2-b36b-abedea905618" (UID: "44034162-7899-44d2-b36b-abedea905618"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:51:49 crc kubenswrapper[4971]: I1127 08:51:49.072476 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/44034162-7899-44d2-b36b-abedea905618-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "44034162-7899-44d2-b36b-abedea905618" (UID: "44034162-7899-44d2-b36b-abedea905618"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:51:49 crc kubenswrapper[4971]: I1127 08:51:49.098244 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/44034162-7899-44d2-b36b-abedea905618-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "44034162-7899-44d2-b36b-abedea905618" (UID: "44034162-7899-44d2-b36b-abedea905618"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:51:49 crc kubenswrapper[4971]: I1127 08:51:49.121341 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/44034162-7899-44d2-b36b-abedea905618-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 27 08:51:49 crc kubenswrapper[4971]: I1127 08:51:49.121596 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/44034162-7899-44d2-b36b-abedea905618-config\") on node \"crc\" DevicePath \"\"" Nov 27 08:51:49 crc kubenswrapper[4971]: I1127 08:51:49.121606 4971 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/44034162-7899-44d2-b36b-abedea905618-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 27 08:51:49 crc kubenswrapper[4971]: I1127 08:51:49.121613 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/44034162-7899-44d2-b36b-abedea905618-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 27 08:51:49 crc kubenswrapper[4971]: I1127 08:51:49.121625 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bdlcp\" (UniqueName: \"kubernetes.io/projected/44034162-7899-44d2-b36b-abedea905618-kube-api-access-bdlcp\") on node \"crc\" DevicePath \"\"" Nov 27 08:51:49 crc kubenswrapper[4971]: I1127 08:51:49.870983 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54dd4f7d4c-8t54q" event={"ID":"44034162-7899-44d2-b36b-abedea905618","Type":"ContainerDied","Data":"397beb53c5aa81ae3a470554c925ee0bbe353e183973d586698aa98813483fd4"} Nov 27 08:51:49 crc kubenswrapper[4971]: I1127 08:51:49.871070 4971 scope.go:117] "RemoveContainer" containerID="b091ee6d2d6d57be6c7898aa901d0dc3f9573f551acb683b9762a407332f7cff" Nov 27 08:51:49 crc kubenswrapper[4971]: I1127 08:51:49.871075 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54dd4f7d4c-8t54q" Nov 27 08:51:49 crc kubenswrapper[4971]: I1127 08:51:49.909069 4971 scope.go:117] "RemoveContainer" containerID="13669135962cd4f22510fdc4c86b6b1a1d6d29a2f3ad8709a1f1d78d68940d63" Nov 27 08:51:49 crc kubenswrapper[4971]: I1127 08:51:49.916366 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-54dd4f7d4c-8t54q"] Nov 27 08:51:49 crc kubenswrapper[4971]: I1127 08:51:49.929669 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-54dd4f7d4c-8t54q"] Nov 27 08:51:50 crc kubenswrapper[4971]: I1127 08:51:50.560249 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44034162-7899-44d2-b36b-abedea905618" path="/var/lib/kubelet/pods/44034162-7899-44d2-b36b-abedea905618/volumes" Nov 27 08:51:57 crc kubenswrapper[4971]: I1127 08:51:57.550486 4971 scope.go:117] "RemoveContainer" containerID="503fed002d2daf941b71890d178a428a5c3cffb1b48a2221858dc92e28178dc4" Nov 27 08:51:57 crc kubenswrapper[4971]: E1127 08:51:57.551419 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:52:08 crc kubenswrapper[4971]: I1127 08:52:08.510962 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-5fd8b7cd95-lc5lp" Nov 27 08:52:11 crc kubenswrapper[4971]: I1127 08:52:11.550066 4971 scope.go:117] "RemoveContainer" containerID="503fed002d2daf941b71890d178a428a5c3cffb1b48a2221858dc92e28178dc4" Nov 27 08:52:11 crc kubenswrapper[4971]: E1127 08:52:11.551207 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:52:15 crc kubenswrapper[4971]: I1127 08:52:15.663771 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-g4f6p"] Nov 27 08:52:15 crc kubenswrapper[4971]: E1127 08:52:15.666203 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44034162-7899-44d2-b36b-abedea905618" containerName="dnsmasq-dns" Nov 27 08:52:15 crc kubenswrapper[4971]: I1127 08:52:15.666224 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="44034162-7899-44d2-b36b-abedea905618" containerName="dnsmasq-dns" Nov 27 08:52:15 crc kubenswrapper[4971]: E1127 08:52:15.666243 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44034162-7899-44d2-b36b-abedea905618" containerName="init" Nov 27 08:52:15 crc kubenswrapper[4971]: I1127 08:52:15.666251 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="44034162-7899-44d2-b36b-abedea905618" containerName="init" Nov 27 08:52:15 crc kubenswrapper[4971]: I1127 08:52:15.666457 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="44034162-7899-44d2-b36b-abedea905618" containerName="dnsmasq-dns" Nov 27 08:52:15 crc kubenswrapper[4971]: I1127 08:52:15.668063 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-g4f6p" Nov 27 08:52:15 crc kubenswrapper[4971]: I1127 08:52:15.695620 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-g4f6p"] Nov 27 08:52:15 crc kubenswrapper[4971]: I1127 08:52:15.769015 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-96e4-account-create-update-h8wnp"] Nov 27 08:52:15 crc kubenswrapper[4971]: I1127 08:52:15.771397 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-96e4-account-create-update-h8wnp" Nov 27 08:52:15 crc kubenswrapper[4971]: I1127 08:52:15.773592 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Nov 27 08:52:15 crc kubenswrapper[4971]: I1127 08:52:15.806216 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-96e4-account-create-update-h8wnp"] Nov 27 08:52:15 crc kubenswrapper[4971]: I1127 08:52:15.814604 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/46a41356-c31e-4965-82a0-e836612bb0be-operator-scripts\") pod \"glance-db-create-g4f6p\" (UID: \"46a41356-c31e-4965-82a0-e836612bb0be\") " pod="openstack/glance-db-create-g4f6p" Nov 27 08:52:15 crc kubenswrapper[4971]: I1127 08:52:15.814942 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6ttl4\" (UniqueName: \"kubernetes.io/projected/8fb11aa1-8a1f-4854-80b1-65240a37dacd-kube-api-access-6ttl4\") pod \"glance-96e4-account-create-update-h8wnp\" (UID: \"8fb11aa1-8a1f-4854-80b1-65240a37dacd\") " pod="openstack/glance-96e4-account-create-update-h8wnp" Nov 27 08:52:15 crc kubenswrapper[4971]: I1127 08:52:15.815157 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wk86j\" (UniqueName: \"kubernetes.io/projected/46a41356-c31e-4965-82a0-e836612bb0be-kube-api-access-wk86j\") pod \"glance-db-create-g4f6p\" (UID: \"46a41356-c31e-4965-82a0-e836612bb0be\") " pod="openstack/glance-db-create-g4f6p" Nov 27 08:52:15 crc kubenswrapper[4971]: I1127 08:52:15.815336 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8fb11aa1-8a1f-4854-80b1-65240a37dacd-operator-scripts\") pod \"glance-96e4-account-create-update-h8wnp\" (UID: \"8fb11aa1-8a1f-4854-80b1-65240a37dacd\") " pod="openstack/glance-96e4-account-create-update-h8wnp" Nov 27 08:52:15 crc kubenswrapper[4971]: I1127 08:52:15.916777 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8fb11aa1-8a1f-4854-80b1-65240a37dacd-operator-scripts\") pod \"glance-96e4-account-create-update-h8wnp\" (UID: \"8fb11aa1-8a1f-4854-80b1-65240a37dacd\") " pod="openstack/glance-96e4-account-create-update-h8wnp" Nov 27 08:52:15 crc kubenswrapper[4971]: I1127 08:52:15.916874 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/46a41356-c31e-4965-82a0-e836612bb0be-operator-scripts\") pod \"glance-db-create-g4f6p\" (UID: \"46a41356-c31e-4965-82a0-e836612bb0be\") " pod="openstack/glance-db-create-g4f6p" Nov 27 08:52:15 crc kubenswrapper[4971]: I1127 08:52:15.916904 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6ttl4\" (UniqueName: \"kubernetes.io/projected/8fb11aa1-8a1f-4854-80b1-65240a37dacd-kube-api-access-6ttl4\") pod \"glance-96e4-account-create-update-h8wnp\" (UID: \"8fb11aa1-8a1f-4854-80b1-65240a37dacd\") " pod="openstack/glance-96e4-account-create-update-h8wnp" Nov 27 08:52:15 crc kubenswrapper[4971]: I1127 08:52:15.916934 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wk86j\" (UniqueName: \"kubernetes.io/projected/46a41356-c31e-4965-82a0-e836612bb0be-kube-api-access-wk86j\") pod \"glance-db-create-g4f6p\" (UID: \"46a41356-c31e-4965-82a0-e836612bb0be\") " pod="openstack/glance-db-create-g4f6p" Nov 27 08:52:15 crc kubenswrapper[4971]: I1127 08:52:15.917801 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8fb11aa1-8a1f-4854-80b1-65240a37dacd-operator-scripts\") pod \"glance-96e4-account-create-update-h8wnp\" (UID: \"8fb11aa1-8a1f-4854-80b1-65240a37dacd\") " pod="openstack/glance-96e4-account-create-update-h8wnp" Nov 27 08:52:15 crc kubenswrapper[4971]: I1127 08:52:15.917838 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/46a41356-c31e-4965-82a0-e836612bb0be-operator-scripts\") pod \"glance-db-create-g4f6p\" (UID: \"46a41356-c31e-4965-82a0-e836612bb0be\") " pod="openstack/glance-db-create-g4f6p" Nov 27 08:52:15 crc kubenswrapper[4971]: I1127 08:52:15.936433 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6ttl4\" (UniqueName: \"kubernetes.io/projected/8fb11aa1-8a1f-4854-80b1-65240a37dacd-kube-api-access-6ttl4\") pod \"glance-96e4-account-create-update-h8wnp\" (UID: \"8fb11aa1-8a1f-4854-80b1-65240a37dacd\") " pod="openstack/glance-96e4-account-create-update-h8wnp" Nov 27 08:52:15 crc kubenswrapper[4971]: I1127 08:52:15.936521 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wk86j\" (UniqueName: \"kubernetes.io/projected/46a41356-c31e-4965-82a0-e836612bb0be-kube-api-access-wk86j\") pod \"glance-db-create-g4f6p\" (UID: \"46a41356-c31e-4965-82a0-e836612bb0be\") " pod="openstack/glance-db-create-g4f6p" Nov 27 08:52:15 crc kubenswrapper[4971]: I1127 08:52:15.996486 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-g4f6p" Nov 27 08:52:16 crc kubenswrapper[4971]: I1127 08:52:16.106462 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-96e4-account-create-update-h8wnp" Nov 27 08:52:16 crc kubenswrapper[4971]: I1127 08:52:16.475370 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-g4f6p"] Nov 27 08:52:16 crc kubenswrapper[4971]: I1127 08:52:16.597686 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-96e4-account-create-update-h8wnp"] Nov 27 08:52:16 crc kubenswrapper[4971]: W1127 08:52:16.616323 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8fb11aa1_8a1f_4854_80b1_65240a37dacd.slice/crio-e4499721675537214716489a75be7578c9b998d4a06f3d78e4a9b22395f5eec7 WatchSource:0}: Error finding container e4499721675537214716489a75be7578c9b998d4a06f3d78e4a9b22395f5eec7: Status 404 returned error can't find the container with id e4499721675537214716489a75be7578c9b998d4a06f3d78e4a9b22395f5eec7 Nov 27 08:52:17 crc kubenswrapper[4971]: I1127 08:52:17.118982 4971 generic.go:334] "Generic (PLEG): container finished" podID="46a41356-c31e-4965-82a0-e836612bb0be" containerID="ee77c16458699ec5f0aa1db1aeaefe51b66693591eac7d455fd7720894485685" exitCode=0 Nov 27 08:52:17 crc kubenswrapper[4971]: I1127 08:52:17.119134 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-g4f6p" event={"ID":"46a41356-c31e-4965-82a0-e836612bb0be","Type":"ContainerDied","Data":"ee77c16458699ec5f0aa1db1aeaefe51b66693591eac7d455fd7720894485685"} Nov 27 08:52:17 crc kubenswrapper[4971]: I1127 08:52:17.119360 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-g4f6p" event={"ID":"46a41356-c31e-4965-82a0-e836612bb0be","Type":"ContainerStarted","Data":"7ddb15940ed1a7a69e6fef03ae35ac95de76972f5b780c5edbb719b6b49be113"} Nov 27 08:52:17 crc kubenswrapper[4971]: I1127 08:52:17.123295 4971 generic.go:334] "Generic (PLEG): container finished" podID="8fb11aa1-8a1f-4854-80b1-65240a37dacd" containerID="39926ee57c85d2ca278f72689d231b7b6c2326cc4084040de24294226786a3f1" exitCode=0 Nov 27 08:52:17 crc kubenswrapper[4971]: I1127 08:52:17.123328 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-96e4-account-create-update-h8wnp" event={"ID":"8fb11aa1-8a1f-4854-80b1-65240a37dacd","Type":"ContainerDied","Data":"39926ee57c85d2ca278f72689d231b7b6c2326cc4084040de24294226786a3f1"} Nov 27 08:52:17 crc kubenswrapper[4971]: I1127 08:52:17.123352 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-96e4-account-create-update-h8wnp" event={"ID":"8fb11aa1-8a1f-4854-80b1-65240a37dacd","Type":"ContainerStarted","Data":"e4499721675537214716489a75be7578c9b998d4a06f3d78e4a9b22395f5eec7"} Nov 27 08:52:18 crc kubenswrapper[4971]: I1127 08:52:18.521664 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-96e4-account-create-update-h8wnp" Nov 27 08:52:18 crc kubenswrapper[4971]: I1127 08:52:18.529330 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-g4f6p" Nov 27 08:52:18 crc kubenswrapper[4971]: I1127 08:52:18.672837 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8fb11aa1-8a1f-4854-80b1-65240a37dacd-operator-scripts\") pod \"8fb11aa1-8a1f-4854-80b1-65240a37dacd\" (UID: \"8fb11aa1-8a1f-4854-80b1-65240a37dacd\") " Nov 27 08:52:18 crc kubenswrapper[4971]: I1127 08:52:18.673046 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/46a41356-c31e-4965-82a0-e836612bb0be-operator-scripts\") pod \"46a41356-c31e-4965-82a0-e836612bb0be\" (UID: \"46a41356-c31e-4965-82a0-e836612bb0be\") " Nov 27 08:52:18 crc kubenswrapper[4971]: I1127 08:52:18.673176 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wk86j\" (UniqueName: \"kubernetes.io/projected/46a41356-c31e-4965-82a0-e836612bb0be-kube-api-access-wk86j\") pod \"46a41356-c31e-4965-82a0-e836612bb0be\" (UID: \"46a41356-c31e-4965-82a0-e836612bb0be\") " Nov 27 08:52:18 crc kubenswrapper[4971]: I1127 08:52:18.673232 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ttl4\" (UniqueName: \"kubernetes.io/projected/8fb11aa1-8a1f-4854-80b1-65240a37dacd-kube-api-access-6ttl4\") pod \"8fb11aa1-8a1f-4854-80b1-65240a37dacd\" (UID: \"8fb11aa1-8a1f-4854-80b1-65240a37dacd\") " Nov 27 08:52:18 crc kubenswrapper[4971]: I1127 08:52:18.673436 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8fb11aa1-8a1f-4854-80b1-65240a37dacd-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8fb11aa1-8a1f-4854-80b1-65240a37dacd" (UID: "8fb11aa1-8a1f-4854-80b1-65240a37dacd"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:52:18 crc kubenswrapper[4971]: I1127 08:52:18.673838 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8fb11aa1-8a1f-4854-80b1-65240a37dacd-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 08:52:18 crc kubenswrapper[4971]: I1127 08:52:18.673859 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46a41356-c31e-4965-82a0-e836612bb0be-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "46a41356-c31e-4965-82a0-e836612bb0be" (UID: "46a41356-c31e-4965-82a0-e836612bb0be"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:52:18 crc kubenswrapper[4971]: I1127 08:52:18.685826 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8fb11aa1-8a1f-4854-80b1-65240a37dacd-kube-api-access-6ttl4" (OuterVolumeSpecName: "kube-api-access-6ttl4") pod "8fb11aa1-8a1f-4854-80b1-65240a37dacd" (UID: "8fb11aa1-8a1f-4854-80b1-65240a37dacd"). InnerVolumeSpecName "kube-api-access-6ttl4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:52:18 crc kubenswrapper[4971]: I1127 08:52:18.685867 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46a41356-c31e-4965-82a0-e836612bb0be-kube-api-access-wk86j" (OuterVolumeSpecName: "kube-api-access-wk86j") pod "46a41356-c31e-4965-82a0-e836612bb0be" (UID: "46a41356-c31e-4965-82a0-e836612bb0be"). InnerVolumeSpecName "kube-api-access-wk86j". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:52:18 crc kubenswrapper[4971]: I1127 08:52:18.775727 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wk86j\" (UniqueName: \"kubernetes.io/projected/46a41356-c31e-4965-82a0-e836612bb0be-kube-api-access-wk86j\") on node \"crc\" DevicePath \"\"" Nov 27 08:52:18 crc kubenswrapper[4971]: I1127 08:52:18.775772 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ttl4\" (UniqueName: \"kubernetes.io/projected/8fb11aa1-8a1f-4854-80b1-65240a37dacd-kube-api-access-6ttl4\") on node \"crc\" DevicePath \"\"" Nov 27 08:52:18 crc kubenswrapper[4971]: I1127 08:52:18.775786 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/46a41356-c31e-4965-82a0-e836612bb0be-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 08:52:19 crc kubenswrapper[4971]: I1127 08:52:19.140982 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-g4f6p" Nov 27 08:52:19 crc kubenswrapper[4971]: I1127 08:52:19.141004 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-g4f6p" event={"ID":"46a41356-c31e-4965-82a0-e836612bb0be","Type":"ContainerDied","Data":"7ddb15940ed1a7a69e6fef03ae35ac95de76972f5b780c5edbb719b6b49be113"} Nov 27 08:52:19 crc kubenswrapper[4971]: I1127 08:52:19.141049 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7ddb15940ed1a7a69e6fef03ae35ac95de76972f5b780c5edbb719b6b49be113" Nov 27 08:52:19 crc kubenswrapper[4971]: I1127 08:52:19.143745 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-96e4-account-create-update-h8wnp" event={"ID":"8fb11aa1-8a1f-4854-80b1-65240a37dacd","Type":"ContainerDied","Data":"e4499721675537214716489a75be7578c9b998d4a06f3d78e4a9b22395f5eec7"} Nov 27 08:52:19 crc kubenswrapper[4971]: I1127 08:52:19.143790 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e4499721675537214716489a75be7578c9b998d4a06f3d78e4a9b22395f5eec7" Nov 27 08:52:19 crc kubenswrapper[4971]: I1127 08:52:19.143860 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-96e4-account-create-update-h8wnp" Nov 27 08:52:20 crc kubenswrapper[4971]: I1127 08:52:20.944727 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-w997d"] Nov 27 08:52:20 crc kubenswrapper[4971]: E1127 08:52:20.945588 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46a41356-c31e-4965-82a0-e836612bb0be" containerName="mariadb-database-create" Nov 27 08:52:20 crc kubenswrapper[4971]: I1127 08:52:20.945604 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="46a41356-c31e-4965-82a0-e836612bb0be" containerName="mariadb-database-create" Nov 27 08:52:20 crc kubenswrapper[4971]: E1127 08:52:20.945628 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fb11aa1-8a1f-4854-80b1-65240a37dacd" containerName="mariadb-account-create-update" Nov 27 08:52:20 crc kubenswrapper[4971]: I1127 08:52:20.945634 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fb11aa1-8a1f-4854-80b1-65240a37dacd" containerName="mariadb-account-create-update" Nov 27 08:52:20 crc kubenswrapper[4971]: I1127 08:52:20.945791 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="46a41356-c31e-4965-82a0-e836612bb0be" containerName="mariadb-database-create" Nov 27 08:52:20 crc kubenswrapper[4971]: I1127 08:52:20.945816 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="8fb11aa1-8a1f-4854-80b1-65240a37dacd" containerName="mariadb-account-create-update" Nov 27 08:52:20 crc kubenswrapper[4971]: I1127 08:52:20.946447 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-w997d" Nov 27 08:52:20 crc kubenswrapper[4971]: I1127 08:52:20.948963 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Nov 27 08:52:20 crc kubenswrapper[4971]: I1127 08:52:20.949081 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-whfqh" Nov 27 08:52:20 crc kubenswrapper[4971]: I1127 08:52:20.956306 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-w997d"] Nov 27 08:52:21 crc kubenswrapper[4971]: I1127 08:52:21.015204 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/803c80a6-4a93-48a9-b3d6-5d7142c41863-config-data\") pod \"glance-db-sync-w997d\" (UID: \"803c80a6-4a93-48a9-b3d6-5d7142c41863\") " pod="openstack/glance-db-sync-w997d" Nov 27 08:52:21 crc kubenswrapper[4971]: I1127 08:52:21.015260 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/803c80a6-4a93-48a9-b3d6-5d7142c41863-combined-ca-bundle\") pod \"glance-db-sync-w997d\" (UID: \"803c80a6-4a93-48a9-b3d6-5d7142c41863\") " pod="openstack/glance-db-sync-w997d" Nov 27 08:52:21 crc kubenswrapper[4971]: I1127 08:52:21.015651 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvcgk\" (UniqueName: \"kubernetes.io/projected/803c80a6-4a93-48a9-b3d6-5d7142c41863-kube-api-access-rvcgk\") pod \"glance-db-sync-w997d\" (UID: \"803c80a6-4a93-48a9-b3d6-5d7142c41863\") " pod="openstack/glance-db-sync-w997d" Nov 27 08:52:21 crc kubenswrapper[4971]: I1127 08:52:21.015914 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/803c80a6-4a93-48a9-b3d6-5d7142c41863-db-sync-config-data\") pod \"glance-db-sync-w997d\" (UID: \"803c80a6-4a93-48a9-b3d6-5d7142c41863\") " pod="openstack/glance-db-sync-w997d" Nov 27 08:52:21 crc kubenswrapper[4971]: I1127 08:52:21.117815 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvcgk\" (UniqueName: \"kubernetes.io/projected/803c80a6-4a93-48a9-b3d6-5d7142c41863-kube-api-access-rvcgk\") pod \"glance-db-sync-w997d\" (UID: \"803c80a6-4a93-48a9-b3d6-5d7142c41863\") " pod="openstack/glance-db-sync-w997d" Nov 27 08:52:21 crc kubenswrapper[4971]: I1127 08:52:21.117897 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/803c80a6-4a93-48a9-b3d6-5d7142c41863-db-sync-config-data\") pod \"glance-db-sync-w997d\" (UID: \"803c80a6-4a93-48a9-b3d6-5d7142c41863\") " pod="openstack/glance-db-sync-w997d" Nov 27 08:52:21 crc kubenswrapper[4971]: I1127 08:52:21.117942 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/803c80a6-4a93-48a9-b3d6-5d7142c41863-config-data\") pod \"glance-db-sync-w997d\" (UID: \"803c80a6-4a93-48a9-b3d6-5d7142c41863\") " pod="openstack/glance-db-sync-w997d" Nov 27 08:52:21 crc kubenswrapper[4971]: I1127 08:52:21.117970 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/803c80a6-4a93-48a9-b3d6-5d7142c41863-combined-ca-bundle\") pod \"glance-db-sync-w997d\" (UID: \"803c80a6-4a93-48a9-b3d6-5d7142c41863\") " pod="openstack/glance-db-sync-w997d" Nov 27 08:52:21 crc kubenswrapper[4971]: I1127 08:52:21.126178 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/803c80a6-4a93-48a9-b3d6-5d7142c41863-combined-ca-bundle\") pod \"glance-db-sync-w997d\" (UID: \"803c80a6-4a93-48a9-b3d6-5d7142c41863\") " pod="openstack/glance-db-sync-w997d" Nov 27 08:52:21 crc kubenswrapper[4971]: I1127 08:52:21.131911 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/803c80a6-4a93-48a9-b3d6-5d7142c41863-db-sync-config-data\") pod \"glance-db-sync-w997d\" (UID: \"803c80a6-4a93-48a9-b3d6-5d7142c41863\") " pod="openstack/glance-db-sync-w997d" Nov 27 08:52:21 crc kubenswrapper[4971]: I1127 08:52:21.134811 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvcgk\" (UniqueName: \"kubernetes.io/projected/803c80a6-4a93-48a9-b3d6-5d7142c41863-kube-api-access-rvcgk\") pod \"glance-db-sync-w997d\" (UID: \"803c80a6-4a93-48a9-b3d6-5d7142c41863\") " pod="openstack/glance-db-sync-w997d" Nov 27 08:52:21 crc kubenswrapper[4971]: I1127 08:52:21.134834 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/803c80a6-4a93-48a9-b3d6-5d7142c41863-config-data\") pod \"glance-db-sync-w997d\" (UID: \"803c80a6-4a93-48a9-b3d6-5d7142c41863\") " pod="openstack/glance-db-sync-w997d" Nov 27 08:52:21 crc kubenswrapper[4971]: I1127 08:52:21.268744 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-w997d" Nov 27 08:52:21 crc kubenswrapper[4971]: I1127 08:52:21.864384 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-w997d"] Nov 27 08:52:21 crc kubenswrapper[4971]: I1127 08:52:21.873760 4971 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 27 08:52:22 crc kubenswrapper[4971]: I1127 08:52:22.185683 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-w997d" event={"ID":"803c80a6-4a93-48a9-b3d6-5d7142c41863","Type":"ContainerStarted","Data":"8a0d1f4e905345f944bed4ce145708dad5ab6c9d2b3dcaee8d0bbdcf903b0d7d"} Nov 27 08:52:23 crc kubenswrapper[4971]: I1127 08:52:23.551941 4971 scope.go:117] "RemoveContainer" containerID="503fed002d2daf941b71890d178a428a5c3cffb1b48a2221858dc92e28178dc4" Nov 27 08:52:23 crc kubenswrapper[4971]: E1127 08:52:23.552666 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:52:34 crc kubenswrapper[4971]: I1127 08:52:34.551016 4971 scope.go:117] "RemoveContainer" containerID="503fed002d2daf941b71890d178a428a5c3cffb1b48a2221858dc92e28178dc4" Nov 27 08:52:34 crc kubenswrapper[4971]: E1127 08:52:34.552137 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:52:37 crc kubenswrapper[4971]: I1127 08:52:37.331007 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-w997d" event={"ID":"803c80a6-4a93-48a9-b3d6-5d7142c41863","Type":"ContainerStarted","Data":"194a0db8ab08a82ff3dcb919c604689550acf34b2e905554387735c50d3907a4"} Nov 27 08:52:37 crc kubenswrapper[4971]: I1127 08:52:37.356378 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-w997d" podStartSLOduration=2.660511881 podStartE2EDuration="17.356355196s" podCreationTimestamp="2025-11-27 08:52:20 +0000 UTC" firstStartedPulling="2025-11-27 08:52:21.873416373 +0000 UTC m=+7180.065460291" lastFinishedPulling="2025-11-27 08:52:36.569259678 +0000 UTC m=+7194.761303606" observedRunningTime="2025-11-27 08:52:37.352797394 +0000 UTC m=+7195.544841352" watchObservedRunningTime="2025-11-27 08:52:37.356355196 +0000 UTC m=+7195.548399134" Nov 27 08:52:41 crc kubenswrapper[4971]: I1127 08:52:41.393037 4971 generic.go:334] "Generic (PLEG): container finished" podID="803c80a6-4a93-48a9-b3d6-5d7142c41863" containerID="194a0db8ab08a82ff3dcb919c604689550acf34b2e905554387735c50d3907a4" exitCode=0 Nov 27 08:52:41 crc kubenswrapper[4971]: I1127 08:52:41.393243 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-w997d" event={"ID":"803c80a6-4a93-48a9-b3d6-5d7142c41863","Type":"ContainerDied","Data":"194a0db8ab08a82ff3dcb919c604689550acf34b2e905554387735c50d3907a4"} Nov 27 08:52:42 crc kubenswrapper[4971]: I1127 08:52:42.855760 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-w997d" Nov 27 08:52:42 crc kubenswrapper[4971]: I1127 08:52:42.995717 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/803c80a6-4a93-48a9-b3d6-5d7142c41863-combined-ca-bundle\") pod \"803c80a6-4a93-48a9-b3d6-5d7142c41863\" (UID: \"803c80a6-4a93-48a9-b3d6-5d7142c41863\") " Nov 27 08:52:42 crc kubenswrapper[4971]: I1127 08:52:42.996001 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/803c80a6-4a93-48a9-b3d6-5d7142c41863-db-sync-config-data\") pod \"803c80a6-4a93-48a9-b3d6-5d7142c41863\" (UID: \"803c80a6-4a93-48a9-b3d6-5d7142c41863\") " Nov 27 08:52:42 crc kubenswrapper[4971]: I1127 08:52:42.996033 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rvcgk\" (UniqueName: \"kubernetes.io/projected/803c80a6-4a93-48a9-b3d6-5d7142c41863-kube-api-access-rvcgk\") pod \"803c80a6-4a93-48a9-b3d6-5d7142c41863\" (UID: \"803c80a6-4a93-48a9-b3d6-5d7142c41863\") " Nov 27 08:52:42 crc kubenswrapper[4971]: I1127 08:52:42.996057 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/803c80a6-4a93-48a9-b3d6-5d7142c41863-config-data\") pod \"803c80a6-4a93-48a9-b3d6-5d7142c41863\" (UID: \"803c80a6-4a93-48a9-b3d6-5d7142c41863\") " Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.002417 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/803c80a6-4a93-48a9-b3d6-5d7142c41863-kube-api-access-rvcgk" (OuterVolumeSpecName: "kube-api-access-rvcgk") pod "803c80a6-4a93-48a9-b3d6-5d7142c41863" (UID: "803c80a6-4a93-48a9-b3d6-5d7142c41863"). InnerVolumeSpecName "kube-api-access-rvcgk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.002718 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/803c80a6-4a93-48a9-b3d6-5d7142c41863-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "803c80a6-4a93-48a9-b3d6-5d7142c41863" (UID: "803c80a6-4a93-48a9-b3d6-5d7142c41863"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.029847 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/803c80a6-4a93-48a9-b3d6-5d7142c41863-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "803c80a6-4a93-48a9-b3d6-5d7142c41863" (UID: "803c80a6-4a93-48a9-b3d6-5d7142c41863"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.041236 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/803c80a6-4a93-48a9-b3d6-5d7142c41863-config-data" (OuterVolumeSpecName: "config-data") pod "803c80a6-4a93-48a9-b3d6-5d7142c41863" (UID: "803c80a6-4a93-48a9-b3d6-5d7142c41863"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.098623 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/803c80a6-4a93-48a9-b3d6-5d7142c41863-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.098682 4971 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/803c80a6-4a93-48a9-b3d6-5d7142c41863-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.098696 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rvcgk\" (UniqueName: \"kubernetes.io/projected/803c80a6-4a93-48a9-b3d6-5d7142c41863-kube-api-access-rvcgk\") on node \"crc\" DevicePath \"\"" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.098719 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/803c80a6-4a93-48a9-b3d6-5d7142c41863-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.409888 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-w997d" event={"ID":"803c80a6-4a93-48a9-b3d6-5d7142c41863","Type":"ContainerDied","Data":"8a0d1f4e905345f944bed4ce145708dad5ab6c9d2b3dcaee8d0bbdcf903b0d7d"} Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.409936 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8a0d1f4e905345f944bed4ce145708dad5ab6c9d2b3dcaee8d0bbdcf903b0d7d" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.409997 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-w997d" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.737525 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Nov 27 08:52:43 crc kubenswrapper[4971]: E1127 08:52:43.738478 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="803c80a6-4a93-48a9-b3d6-5d7142c41863" containerName="glance-db-sync" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.738506 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="803c80a6-4a93-48a9-b3d6-5d7142c41863" containerName="glance-db-sync" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.738787 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="803c80a6-4a93-48a9-b3d6-5d7142c41863" containerName="glance-db-sync" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.740073 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.745291 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.745667 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.745677 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-whfqh" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.745929 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.753985 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.815510 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-config-data\") pod \"glance-default-external-api-0\" (UID: \"4c3b5fbe-a258-40f8-8eb5-9cafc2614173\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.815590 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-664lt\" (UniqueName: \"kubernetes.io/projected/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-kube-api-access-664lt\") pod \"glance-default-external-api-0\" (UID: \"4c3b5fbe-a258-40f8-8eb5-9cafc2614173\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.815654 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-scripts\") pod \"glance-default-external-api-0\" (UID: \"4c3b5fbe-a258-40f8-8eb5-9cafc2614173\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.815687 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-logs\") pod \"glance-default-external-api-0\" (UID: \"4c3b5fbe-a258-40f8-8eb5-9cafc2614173\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.815716 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4c3b5fbe-a258-40f8-8eb5-9cafc2614173\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.815761 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4c3b5fbe-a258-40f8-8eb5-9cafc2614173\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.815832 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-ceph\") pod \"glance-default-external-api-0\" (UID: \"4c3b5fbe-a258-40f8-8eb5-9cafc2614173\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.918063 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-config-data\") pod \"glance-default-external-api-0\" (UID: \"4c3b5fbe-a258-40f8-8eb5-9cafc2614173\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.918762 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-664lt\" (UniqueName: \"kubernetes.io/projected/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-kube-api-access-664lt\") pod \"glance-default-external-api-0\" (UID: \"4c3b5fbe-a258-40f8-8eb5-9cafc2614173\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.919038 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-scripts\") pod \"glance-default-external-api-0\" (UID: \"4c3b5fbe-a258-40f8-8eb5-9cafc2614173\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.919163 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-logs\") pod \"glance-default-external-api-0\" (UID: \"4c3b5fbe-a258-40f8-8eb5-9cafc2614173\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.919280 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4c3b5fbe-a258-40f8-8eb5-9cafc2614173\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.919443 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4c3b5fbe-a258-40f8-8eb5-9cafc2614173\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.919576 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-ceph\") pod \"glance-default-external-api-0\" (UID: \"4c3b5fbe-a258-40f8-8eb5-9cafc2614173\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.919692 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-logs\") pod \"glance-default-external-api-0\" (UID: \"4c3b5fbe-a258-40f8-8eb5-9cafc2614173\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.920094 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4c3b5fbe-a258-40f8-8eb5-9cafc2614173\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.926969 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-scripts\") pod \"glance-default-external-api-0\" (UID: \"4c3b5fbe-a258-40f8-8eb5-9cafc2614173\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.927190 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-config-data\") pod \"glance-default-external-api-0\" (UID: \"4c3b5fbe-a258-40f8-8eb5-9cafc2614173\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.929835 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-ceph\") pod \"glance-default-external-api-0\" (UID: \"4c3b5fbe-a258-40f8-8eb5-9cafc2614173\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.931940 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4c3b5fbe-a258-40f8-8eb5-9cafc2614173\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.948516 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-664lt\" (UniqueName: \"kubernetes.io/projected/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-kube-api-access-664lt\") pod \"glance-default-external-api-0\" (UID: \"4c3b5fbe-a258-40f8-8eb5-9cafc2614173\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.956608 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-d7bfb7d8c-drcrf"] Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.958095 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d7bfb7d8c-drcrf" Nov 27 08:52:43 crc kubenswrapper[4971]: I1127 08:52:43.981609 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d7bfb7d8c-drcrf"] Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.042073 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.043858 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.053828 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.072355 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.117936 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.129638 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzhk9\" (UniqueName: \"kubernetes.io/projected/c6169840-fce7-4bb4-94cb-2873386aaa85-kube-api-access-nzhk9\") pod \"glance-default-internal-api-0\" (UID: \"c6169840-fce7-4bb4-94cb-2873386aaa85\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.129954 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6169840-fce7-4bb4-94cb-2873386aaa85-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c6169840-fce7-4bb4-94cb-2873386aaa85\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.130114 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-48dkq\" (UniqueName: \"kubernetes.io/projected/5c490ae6-410f-4be8-a025-1c8813f27678-kube-api-access-48dkq\") pod \"dnsmasq-dns-d7bfb7d8c-drcrf\" (UID: \"5c490ae6-410f-4be8-a025-1c8813f27678\") " pod="openstack/dnsmasq-dns-d7bfb7d8c-drcrf" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.130247 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6169840-fce7-4bb4-94cb-2873386aaa85-logs\") pod \"glance-default-internal-api-0\" (UID: \"c6169840-fce7-4bb4-94cb-2873386aaa85\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.130363 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c490ae6-410f-4be8-a025-1c8813f27678-dns-svc\") pod \"dnsmasq-dns-d7bfb7d8c-drcrf\" (UID: \"5c490ae6-410f-4be8-a025-1c8813f27678\") " pod="openstack/dnsmasq-dns-d7bfb7d8c-drcrf" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.130455 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6169840-fce7-4bb4-94cb-2873386aaa85-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c6169840-fce7-4bb4-94cb-2873386aaa85\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.130699 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/c6169840-fce7-4bb4-94cb-2873386aaa85-ceph\") pod \"glance-default-internal-api-0\" (UID: \"c6169840-fce7-4bb4-94cb-2873386aaa85\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.130833 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c490ae6-410f-4be8-a025-1c8813f27678-ovsdbserver-nb\") pod \"dnsmasq-dns-d7bfb7d8c-drcrf\" (UID: \"5c490ae6-410f-4be8-a025-1c8813f27678\") " pod="openstack/dnsmasq-dns-d7bfb7d8c-drcrf" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.131936 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c490ae6-410f-4be8-a025-1c8813f27678-ovsdbserver-sb\") pod \"dnsmasq-dns-d7bfb7d8c-drcrf\" (UID: \"5c490ae6-410f-4be8-a025-1c8813f27678\") " pod="openstack/dnsmasq-dns-d7bfb7d8c-drcrf" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.132047 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6169840-fce7-4bb4-94cb-2873386aaa85-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c6169840-fce7-4bb4-94cb-2873386aaa85\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.132126 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c490ae6-410f-4be8-a025-1c8813f27678-config\") pod \"dnsmasq-dns-d7bfb7d8c-drcrf\" (UID: \"5c490ae6-410f-4be8-a025-1c8813f27678\") " pod="openstack/dnsmasq-dns-d7bfb7d8c-drcrf" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.132235 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c6169840-fce7-4bb4-94cb-2873386aaa85-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c6169840-fce7-4bb4-94cb-2873386aaa85\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.241880 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c490ae6-410f-4be8-a025-1c8813f27678-dns-svc\") pod \"dnsmasq-dns-d7bfb7d8c-drcrf\" (UID: \"5c490ae6-410f-4be8-a025-1c8813f27678\") " pod="openstack/dnsmasq-dns-d7bfb7d8c-drcrf" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.242345 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6169840-fce7-4bb4-94cb-2873386aaa85-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c6169840-fce7-4bb4-94cb-2873386aaa85\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.242407 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/c6169840-fce7-4bb4-94cb-2873386aaa85-ceph\") pod \"glance-default-internal-api-0\" (UID: \"c6169840-fce7-4bb4-94cb-2873386aaa85\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.242467 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c490ae6-410f-4be8-a025-1c8813f27678-ovsdbserver-nb\") pod \"dnsmasq-dns-d7bfb7d8c-drcrf\" (UID: \"5c490ae6-410f-4be8-a025-1c8813f27678\") " pod="openstack/dnsmasq-dns-d7bfb7d8c-drcrf" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.242505 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c490ae6-410f-4be8-a025-1c8813f27678-ovsdbserver-sb\") pod \"dnsmasq-dns-d7bfb7d8c-drcrf\" (UID: \"5c490ae6-410f-4be8-a025-1c8813f27678\") " pod="openstack/dnsmasq-dns-d7bfb7d8c-drcrf" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.245008 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c490ae6-410f-4be8-a025-1c8813f27678-dns-svc\") pod \"dnsmasq-dns-d7bfb7d8c-drcrf\" (UID: \"5c490ae6-410f-4be8-a025-1c8813f27678\") " pod="openstack/dnsmasq-dns-d7bfb7d8c-drcrf" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.245956 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c490ae6-410f-4be8-a025-1c8813f27678-ovsdbserver-nb\") pod \"dnsmasq-dns-d7bfb7d8c-drcrf\" (UID: \"5c490ae6-410f-4be8-a025-1c8813f27678\") " pod="openstack/dnsmasq-dns-d7bfb7d8c-drcrf" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.243523 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6169840-fce7-4bb4-94cb-2873386aaa85-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c6169840-fce7-4bb4-94cb-2873386aaa85\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.246629 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c490ae6-410f-4be8-a025-1c8813f27678-config\") pod \"dnsmasq-dns-d7bfb7d8c-drcrf\" (UID: \"5c490ae6-410f-4be8-a025-1c8813f27678\") " pod="openstack/dnsmasq-dns-d7bfb7d8c-drcrf" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.246871 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c6169840-fce7-4bb4-94cb-2873386aaa85-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c6169840-fce7-4bb4-94cb-2873386aaa85\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.247064 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzhk9\" (UniqueName: \"kubernetes.io/projected/c6169840-fce7-4bb4-94cb-2873386aaa85-kube-api-access-nzhk9\") pod \"glance-default-internal-api-0\" (UID: \"c6169840-fce7-4bb4-94cb-2873386aaa85\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.247295 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6169840-fce7-4bb4-94cb-2873386aaa85-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c6169840-fce7-4bb4-94cb-2873386aaa85\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.247568 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-48dkq\" (UniqueName: \"kubernetes.io/projected/5c490ae6-410f-4be8-a025-1c8813f27678-kube-api-access-48dkq\") pod \"dnsmasq-dns-d7bfb7d8c-drcrf\" (UID: \"5c490ae6-410f-4be8-a025-1c8813f27678\") " pod="openstack/dnsmasq-dns-d7bfb7d8c-drcrf" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.247701 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6169840-fce7-4bb4-94cb-2873386aaa85-logs\") pod \"glance-default-internal-api-0\" (UID: \"c6169840-fce7-4bb4-94cb-2873386aaa85\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.248244 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6169840-fce7-4bb4-94cb-2873386aaa85-logs\") pod \"glance-default-internal-api-0\" (UID: \"c6169840-fce7-4bb4-94cb-2873386aaa85\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.246418 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c490ae6-410f-4be8-a025-1c8813f27678-ovsdbserver-sb\") pod \"dnsmasq-dns-d7bfb7d8c-drcrf\" (UID: \"5c490ae6-410f-4be8-a025-1c8813f27678\") " pod="openstack/dnsmasq-dns-d7bfb7d8c-drcrf" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.249257 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c490ae6-410f-4be8-a025-1c8813f27678-config\") pod \"dnsmasq-dns-d7bfb7d8c-drcrf\" (UID: \"5c490ae6-410f-4be8-a025-1c8813f27678\") " pod="openstack/dnsmasq-dns-d7bfb7d8c-drcrf" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.249749 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c6169840-fce7-4bb4-94cb-2873386aaa85-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c6169840-fce7-4bb4-94cb-2873386aaa85\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.251900 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6169840-fce7-4bb4-94cb-2873386aaa85-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c6169840-fce7-4bb4-94cb-2873386aaa85\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.255877 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/c6169840-fce7-4bb4-94cb-2873386aaa85-ceph\") pod \"glance-default-internal-api-0\" (UID: \"c6169840-fce7-4bb4-94cb-2873386aaa85\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.256663 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6169840-fce7-4bb4-94cb-2873386aaa85-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c6169840-fce7-4bb4-94cb-2873386aaa85\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.258179 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6169840-fce7-4bb4-94cb-2873386aaa85-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c6169840-fce7-4bb4-94cb-2873386aaa85\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.272273 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzhk9\" (UniqueName: \"kubernetes.io/projected/c6169840-fce7-4bb4-94cb-2873386aaa85-kube-api-access-nzhk9\") pod \"glance-default-internal-api-0\" (UID: \"c6169840-fce7-4bb4-94cb-2873386aaa85\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.281346 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-48dkq\" (UniqueName: \"kubernetes.io/projected/5c490ae6-410f-4be8-a025-1c8813f27678-kube-api-access-48dkq\") pod \"dnsmasq-dns-d7bfb7d8c-drcrf\" (UID: \"5c490ae6-410f-4be8-a025-1c8813f27678\") " pod="openstack/dnsmasq-dns-d7bfb7d8c-drcrf" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.325695 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d7bfb7d8c-drcrf" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.378372 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 27 08:52:44 crc kubenswrapper[4971]: I1127 08:52:44.830844 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 27 08:52:45 crc kubenswrapper[4971]: I1127 08:52:45.057611 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d7bfb7d8c-drcrf"] Nov 27 08:52:45 crc kubenswrapper[4971]: I1127 08:52:45.067686 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 27 08:52:45 crc kubenswrapper[4971]: I1127 08:52:45.337921 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 27 08:52:45 crc kubenswrapper[4971]: I1127 08:52:45.532895 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4c3b5fbe-a258-40f8-8eb5-9cafc2614173","Type":"ContainerStarted","Data":"9730ada91b271884863c22b519eb0ef1901b462396625a83024f855e9cd12445"} Nov 27 08:52:45 crc kubenswrapper[4971]: I1127 08:52:45.537813 4971 generic.go:334] "Generic (PLEG): container finished" podID="5c490ae6-410f-4be8-a025-1c8813f27678" containerID="a6a8ba3d680c062035f287c37b86b2a0df2fc0906533f633a0cc89a242c6e0ff" exitCode=0 Nov 27 08:52:45 crc kubenswrapper[4971]: I1127 08:52:45.538665 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d7bfb7d8c-drcrf" event={"ID":"5c490ae6-410f-4be8-a025-1c8813f27678","Type":"ContainerDied","Data":"a6a8ba3d680c062035f287c37b86b2a0df2fc0906533f633a0cc89a242c6e0ff"} Nov 27 08:52:45 crc kubenswrapper[4971]: I1127 08:52:45.538702 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d7bfb7d8c-drcrf" event={"ID":"5c490ae6-410f-4be8-a025-1c8813f27678","Type":"ContainerStarted","Data":"5e0963611632e7bb24a05671ebe824436a2af63e54f0847ebb2904bc69369536"} Nov 27 08:52:45 crc kubenswrapper[4971]: I1127 08:52:45.541809 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c6169840-fce7-4bb4-94cb-2873386aaa85","Type":"ContainerStarted","Data":"e4f38fe710974f5279d59f1ac9eefc2638f5112f3a58938e628087ce1521c489"} Nov 27 08:52:46 crc kubenswrapper[4971]: I1127 08:52:46.553944 4971 scope.go:117] "RemoveContainer" containerID="503fed002d2daf941b71890d178a428a5c3cffb1b48a2221858dc92e28178dc4" Nov 27 08:52:46 crc kubenswrapper[4971]: E1127 08:52:46.554836 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:52:46 crc kubenswrapper[4971]: I1127 08:52:46.560150 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="4c3b5fbe-a258-40f8-8eb5-9cafc2614173" containerName="glance-httpd" containerID="cri-o://a9dfb6da4108e03d3e5471093fa3f4436f8d65a5f3e8cc1cfe26cf26e0a80099" gracePeriod=30 Nov 27 08:52:46 crc kubenswrapper[4971]: I1127 08:52:46.560124 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="4c3b5fbe-a258-40f8-8eb5-9cafc2614173" containerName="glance-log" containerID="cri-o://345a8bb00c68b349f69ff1c4ec6d067bc38112678f0ae453ddd33f75739d15c5" gracePeriod=30 Nov 27 08:52:46 crc kubenswrapper[4971]: I1127 08:52:46.564334 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-d7bfb7d8c-drcrf" Nov 27 08:52:46 crc kubenswrapper[4971]: I1127 08:52:46.564379 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4c3b5fbe-a258-40f8-8eb5-9cafc2614173","Type":"ContainerStarted","Data":"a9dfb6da4108e03d3e5471093fa3f4436f8d65a5f3e8cc1cfe26cf26e0a80099"} Nov 27 08:52:46 crc kubenswrapper[4971]: I1127 08:52:46.564406 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4c3b5fbe-a258-40f8-8eb5-9cafc2614173","Type":"ContainerStarted","Data":"345a8bb00c68b349f69ff1c4ec6d067bc38112678f0ae453ddd33f75739d15c5"} Nov 27 08:52:46 crc kubenswrapper[4971]: I1127 08:52:46.564419 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d7bfb7d8c-drcrf" event={"ID":"5c490ae6-410f-4be8-a025-1c8813f27678","Type":"ContainerStarted","Data":"c5f4e4928894bc260a0740c500ef7cba3a8628447c67b31fe9c572ebdca23bfb"} Nov 27 08:52:46 crc kubenswrapper[4971]: I1127 08:52:46.571766 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c6169840-fce7-4bb4-94cb-2873386aaa85","Type":"ContainerStarted","Data":"dfb4fd7eb2fd9dbc8dcef5d31d106dee60c077d78b654e38808a289913870a13"} Nov 27 08:52:46 crc kubenswrapper[4971]: I1127 08:52:46.571838 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c6169840-fce7-4bb4-94cb-2873386aaa85","Type":"ContainerStarted","Data":"dd258d55bc5338eae2b3b0049f75a64b385eec00776089d0eb2fb5d1bec8e474"} Nov 27 08:52:46 crc kubenswrapper[4971]: I1127 08:52:46.583765 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.583739175 podStartE2EDuration="3.583739175s" podCreationTimestamp="2025-11-27 08:52:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:52:46.583061595 +0000 UTC m=+7204.775105513" watchObservedRunningTime="2025-11-27 08:52:46.583739175 +0000 UTC m=+7204.775783103" Nov 27 08:52:46 crc kubenswrapper[4971]: I1127 08:52:46.607286 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=2.607268729 podStartE2EDuration="2.607268729s" podCreationTimestamp="2025-11-27 08:52:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:52:46.60312829 +0000 UTC m=+7204.795172208" watchObservedRunningTime="2025-11-27 08:52:46.607268729 +0000 UTC m=+7204.799312657" Nov 27 08:52:46 crc kubenswrapper[4971]: I1127 08:52:46.634960 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-d7bfb7d8c-drcrf" podStartSLOduration=3.634942391 podStartE2EDuration="3.634942391s" podCreationTimestamp="2025-11-27 08:52:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:52:46.629442943 +0000 UTC m=+7204.821486871" watchObservedRunningTime="2025-11-27 08:52:46.634942391 +0000 UTC m=+7204.826986309" Nov 27 08:52:46 crc kubenswrapper[4971]: E1127 08:52:46.983454 4971 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4c3b5fbe_a258_40f8_8eb5_9cafc2614173.slice/crio-a9dfb6da4108e03d3e5471093fa3f4436f8d65a5f3e8cc1cfe26cf26e0a80099.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4c3b5fbe_a258_40f8_8eb5_9cafc2614173.slice/crio-conmon-a9dfb6da4108e03d3e5471093fa3f4436f8d65a5f3e8cc1cfe26cf26e0a80099.scope\": RecentStats: unable to find data in memory cache]" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.305390 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.376222 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.529841 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-httpd-run\") pod \"4c3b5fbe-a258-40f8-8eb5-9cafc2614173\" (UID: \"4c3b5fbe-a258-40f8-8eb5-9cafc2614173\") " Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.529888 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-combined-ca-bundle\") pod \"4c3b5fbe-a258-40f8-8eb5-9cafc2614173\" (UID: \"4c3b5fbe-a258-40f8-8eb5-9cafc2614173\") " Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.529934 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-logs\") pod \"4c3b5fbe-a258-40f8-8eb5-9cafc2614173\" (UID: \"4c3b5fbe-a258-40f8-8eb5-9cafc2614173\") " Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.530023 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-ceph\") pod \"4c3b5fbe-a258-40f8-8eb5-9cafc2614173\" (UID: \"4c3b5fbe-a258-40f8-8eb5-9cafc2614173\") " Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.530160 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-scripts\") pod \"4c3b5fbe-a258-40f8-8eb5-9cafc2614173\" (UID: \"4c3b5fbe-a258-40f8-8eb5-9cafc2614173\") " Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.530229 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-664lt\" (UniqueName: \"kubernetes.io/projected/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-kube-api-access-664lt\") pod \"4c3b5fbe-a258-40f8-8eb5-9cafc2614173\" (UID: \"4c3b5fbe-a258-40f8-8eb5-9cafc2614173\") " Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.530291 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-config-data\") pod \"4c3b5fbe-a258-40f8-8eb5-9cafc2614173\" (UID: \"4c3b5fbe-a258-40f8-8eb5-9cafc2614173\") " Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.530423 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-logs" (OuterVolumeSpecName: "logs") pod "4c3b5fbe-a258-40f8-8eb5-9cafc2614173" (UID: "4c3b5fbe-a258-40f8-8eb5-9cafc2614173"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.530703 4971 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-logs\") on node \"crc\" DevicePath \"\"" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.530984 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "4c3b5fbe-a258-40f8-8eb5-9cafc2614173" (UID: "4c3b5fbe-a258-40f8-8eb5-9cafc2614173"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.536823 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-scripts" (OuterVolumeSpecName: "scripts") pod "4c3b5fbe-a258-40f8-8eb5-9cafc2614173" (UID: "4c3b5fbe-a258-40f8-8eb5-9cafc2614173"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.536907 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-ceph" (OuterVolumeSpecName: "ceph") pod "4c3b5fbe-a258-40f8-8eb5-9cafc2614173" (UID: "4c3b5fbe-a258-40f8-8eb5-9cafc2614173"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.537760 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-kube-api-access-664lt" (OuterVolumeSpecName: "kube-api-access-664lt") pod "4c3b5fbe-a258-40f8-8eb5-9cafc2614173" (UID: "4c3b5fbe-a258-40f8-8eb5-9cafc2614173"). InnerVolumeSpecName "kube-api-access-664lt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.561129 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4c3b5fbe-a258-40f8-8eb5-9cafc2614173" (UID: "4c3b5fbe-a258-40f8-8eb5-9cafc2614173"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.584287 4971 generic.go:334] "Generic (PLEG): container finished" podID="4c3b5fbe-a258-40f8-8eb5-9cafc2614173" containerID="a9dfb6da4108e03d3e5471093fa3f4436f8d65a5f3e8cc1cfe26cf26e0a80099" exitCode=0 Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.584323 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-config-data" (OuterVolumeSpecName: "config-data") pod "4c3b5fbe-a258-40f8-8eb5-9cafc2614173" (UID: "4c3b5fbe-a258-40f8-8eb5-9cafc2614173"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.584352 4971 generic.go:334] "Generic (PLEG): container finished" podID="4c3b5fbe-a258-40f8-8eb5-9cafc2614173" containerID="345a8bb00c68b349f69ff1c4ec6d067bc38112678f0ae453ddd33f75739d15c5" exitCode=143 Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.584386 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.584364 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4c3b5fbe-a258-40f8-8eb5-9cafc2614173","Type":"ContainerDied","Data":"a9dfb6da4108e03d3e5471093fa3f4436f8d65a5f3e8cc1cfe26cf26e0a80099"} Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.584894 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4c3b5fbe-a258-40f8-8eb5-9cafc2614173","Type":"ContainerDied","Data":"345a8bb00c68b349f69ff1c4ec6d067bc38112678f0ae453ddd33f75739d15c5"} Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.585007 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4c3b5fbe-a258-40f8-8eb5-9cafc2614173","Type":"ContainerDied","Data":"9730ada91b271884863c22b519eb0ef1901b462396625a83024f855e9cd12445"} Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.584983 4971 scope.go:117] "RemoveContainer" containerID="a9dfb6da4108e03d3e5471093fa3f4436f8d65a5f3e8cc1cfe26cf26e0a80099" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.633042 4971 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.633088 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.633106 4971 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-ceph\") on node \"crc\" DevicePath \"\"" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.633116 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.633128 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-664lt\" (UniqueName: \"kubernetes.io/projected/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-kube-api-access-664lt\") on node \"crc\" DevicePath \"\"" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.633139 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c3b5fbe-a258-40f8-8eb5-9cafc2614173-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.673151 4971 scope.go:117] "RemoveContainer" containerID="345a8bb00c68b349f69ff1c4ec6d067bc38112678f0ae453ddd33f75739d15c5" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.679680 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.689670 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.704902 4971 scope.go:117] "RemoveContainer" containerID="a9dfb6da4108e03d3e5471093fa3f4436f8d65a5f3e8cc1cfe26cf26e0a80099" Nov 27 08:52:47 crc kubenswrapper[4971]: E1127 08:52:47.714922 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a9dfb6da4108e03d3e5471093fa3f4436f8d65a5f3e8cc1cfe26cf26e0a80099\": container with ID starting with a9dfb6da4108e03d3e5471093fa3f4436f8d65a5f3e8cc1cfe26cf26e0a80099 not found: ID does not exist" containerID="a9dfb6da4108e03d3e5471093fa3f4436f8d65a5f3e8cc1cfe26cf26e0a80099" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.714986 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9dfb6da4108e03d3e5471093fa3f4436f8d65a5f3e8cc1cfe26cf26e0a80099"} err="failed to get container status \"a9dfb6da4108e03d3e5471093fa3f4436f8d65a5f3e8cc1cfe26cf26e0a80099\": rpc error: code = NotFound desc = could not find container \"a9dfb6da4108e03d3e5471093fa3f4436f8d65a5f3e8cc1cfe26cf26e0a80099\": container with ID starting with a9dfb6da4108e03d3e5471093fa3f4436f8d65a5f3e8cc1cfe26cf26e0a80099 not found: ID does not exist" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.715026 4971 scope.go:117] "RemoveContainer" containerID="345a8bb00c68b349f69ff1c4ec6d067bc38112678f0ae453ddd33f75739d15c5" Nov 27 08:52:47 crc kubenswrapper[4971]: E1127 08:52:47.715674 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"345a8bb00c68b349f69ff1c4ec6d067bc38112678f0ae453ddd33f75739d15c5\": container with ID starting with 345a8bb00c68b349f69ff1c4ec6d067bc38112678f0ae453ddd33f75739d15c5 not found: ID does not exist" containerID="345a8bb00c68b349f69ff1c4ec6d067bc38112678f0ae453ddd33f75739d15c5" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.715732 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"345a8bb00c68b349f69ff1c4ec6d067bc38112678f0ae453ddd33f75739d15c5"} err="failed to get container status \"345a8bb00c68b349f69ff1c4ec6d067bc38112678f0ae453ddd33f75739d15c5\": rpc error: code = NotFound desc = could not find container \"345a8bb00c68b349f69ff1c4ec6d067bc38112678f0ae453ddd33f75739d15c5\": container with ID starting with 345a8bb00c68b349f69ff1c4ec6d067bc38112678f0ae453ddd33f75739d15c5 not found: ID does not exist" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.715770 4971 scope.go:117] "RemoveContainer" containerID="a9dfb6da4108e03d3e5471093fa3f4436f8d65a5f3e8cc1cfe26cf26e0a80099" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.716250 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9dfb6da4108e03d3e5471093fa3f4436f8d65a5f3e8cc1cfe26cf26e0a80099"} err="failed to get container status \"a9dfb6da4108e03d3e5471093fa3f4436f8d65a5f3e8cc1cfe26cf26e0a80099\": rpc error: code = NotFound desc = could not find container \"a9dfb6da4108e03d3e5471093fa3f4436f8d65a5f3e8cc1cfe26cf26e0a80099\": container with ID starting with a9dfb6da4108e03d3e5471093fa3f4436f8d65a5f3e8cc1cfe26cf26e0a80099 not found: ID does not exist" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.716278 4971 scope.go:117] "RemoveContainer" containerID="345a8bb00c68b349f69ff1c4ec6d067bc38112678f0ae453ddd33f75739d15c5" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.716736 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"345a8bb00c68b349f69ff1c4ec6d067bc38112678f0ae453ddd33f75739d15c5"} err="failed to get container status \"345a8bb00c68b349f69ff1c4ec6d067bc38112678f0ae453ddd33f75739d15c5\": rpc error: code = NotFound desc = could not find container \"345a8bb00c68b349f69ff1c4ec6d067bc38112678f0ae453ddd33f75739d15c5\": container with ID starting with 345a8bb00c68b349f69ff1c4ec6d067bc38112678f0ae453ddd33f75739d15c5 not found: ID does not exist" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.730007 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Nov 27 08:52:47 crc kubenswrapper[4971]: E1127 08:52:47.730711 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c3b5fbe-a258-40f8-8eb5-9cafc2614173" containerName="glance-httpd" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.730740 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c3b5fbe-a258-40f8-8eb5-9cafc2614173" containerName="glance-httpd" Nov 27 08:52:47 crc kubenswrapper[4971]: E1127 08:52:47.730778 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c3b5fbe-a258-40f8-8eb5-9cafc2614173" containerName="glance-log" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.730796 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c3b5fbe-a258-40f8-8eb5-9cafc2614173" containerName="glance-log" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.731052 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c3b5fbe-a258-40f8-8eb5-9cafc2614173" containerName="glance-log" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.731088 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c3b5fbe-a258-40f8-8eb5-9cafc2614173" containerName="glance-httpd" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.732555 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.739972 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.754835 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.837947 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"f4e3a631-76b8-4ab7-9836-b73e8ade5bac\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.838082 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-logs\") pod \"glance-default-external-api-0\" (UID: \"f4e3a631-76b8-4ab7-9836-b73e8ade5bac\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.838162 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-scripts\") pod \"glance-default-external-api-0\" (UID: \"f4e3a631-76b8-4ab7-9836-b73e8ade5bac\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.838233 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5lqp8\" (UniqueName: \"kubernetes.io/projected/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-kube-api-access-5lqp8\") pod \"glance-default-external-api-0\" (UID: \"f4e3a631-76b8-4ab7-9836-b73e8ade5bac\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.838389 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"f4e3a631-76b8-4ab7-9836-b73e8ade5bac\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.838591 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-config-data\") pod \"glance-default-external-api-0\" (UID: \"f4e3a631-76b8-4ab7-9836-b73e8ade5bac\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.838649 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-ceph\") pod \"glance-default-external-api-0\" (UID: \"f4e3a631-76b8-4ab7-9836-b73e8ade5bac\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.940699 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-logs\") pod \"glance-default-external-api-0\" (UID: \"f4e3a631-76b8-4ab7-9836-b73e8ade5bac\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.940837 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-scripts\") pod \"glance-default-external-api-0\" (UID: \"f4e3a631-76b8-4ab7-9836-b73e8ade5bac\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.940936 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5lqp8\" (UniqueName: \"kubernetes.io/projected/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-kube-api-access-5lqp8\") pod \"glance-default-external-api-0\" (UID: \"f4e3a631-76b8-4ab7-9836-b73e8ade5bac\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.940985 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"f4e3a631-76b8-4ab7-9836-b73e8ade5bac\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.941069 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-config-data\") pod \"glance-default-external-api-0\" (UID: \"f4e3a631-76b8-4ab7-9836-b73e8ade5bac\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.941111 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-ceph\") pod \"glance-default-external-api-0\" (UID: \"f4e3a631-76b8-4ab7-9836-b73e8ade5bac\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.941180 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"f4e3a631-76b8-4ab7-9836-b73e8ade5bac\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.941583 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-logs\") pod \"glance-default-external-api-0\" (UID: \"f4e3a631-76b8-4ab7-9836-b73e8ade5bac\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.942028 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"f4e3a631-76b8-4ab7-9836-b73e8ade5bac\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.945588 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-ceph\") pod \"glance-default-external-api-0\" (UID: \"f4e3a631-76b8-4ab7-9836-b73e8ade5bac\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.945739 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-scripts\") pod \"glance-default-external-api-0\" (UID: \"f4e3a631-76b8-4ab7-9836-b73e8ade5bac\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.946555 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-config-data\") pod \"glance-default-external-api-0\" (UID: \"f4e3a631-76b8-4ab7-9836-b73e8ade5bac\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.948283 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"f4e3a631-76b8-4ab7-9836-b73e8ade5bac\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:47 crc kubenswrapper[4971]: I1127 08:52:47.962704 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5lqp8\" (UniqueName: \"kubernetes.io/projected/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-kube-api-access-5lqp8\") pod \"glance-default-external-api-0\" (UID: \"f4e3a631-76b8-4ab7-9836-b73e8ade5bac\") " pod="openstack/glance-default-external-api-0" Nov 27 08:52:48 crc kubenswrapper[4971]: I1127 08:52:48.057126 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 27 08:52:48 crc kubenswrapper[4971]: I1127 08:52:48.569075 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c3b5fbe-a258-40f8-8eb5-9cafc2614173" path="/var/lib/kubelet/pods/4c3b5fbe-a258-40f8-8eb5-9cafc2614173/volumes" Nov 27 08:52:48 crc kubenswrapper[4971]: I1127 08:52:48.599201 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="c6169840-fce7-4bb4-94cb-2873386aaa85" containerName="glance-log" containerID="cri-o://dd258d55bc5338eae2b3b0049f75a64b385eec00776089d0eb2fb5d1bec8e474" gracePeriod=30 Nov 27 08:52:48 crc kubenswrapper[4971]: I1127 08:52:48.599244 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="c6169840-fce7-4bb4-94cb-2873386aaa85" containerName="glance-httpd" containerID="cri-o://dfb4fd7eb2fd9dbc8dcef5d31d106dee60c077d78b654e38808a289913870a13" gracePeriod=30 Nov 27 08:52:48 crc kubenswrapper[4971]: I1127 08:52:48.645569 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 27 08:52:48 crc kubenswrapper[4971]: W1127 08:52:48.654256 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf4e3a631_76b8_4ab7_9836_b73e8ade5bac.slice/crio-62d4f3b69fa57d19b28989ab022404002f2ef7b5f1970c532a8df2d1eba1871f WatchSource:0}: Error finding container 62d4f3b69fa57d19b28989ab022404002f2ef7b5f1970c532a8df2d1eba1871f: Status 404 returned error can't find the container with id 62d4f3b69fa57d19b28989ab022404002f2ef7b5f1970c532a8df2d1eba1871f Nov 27 08:52:49 crc kubenswrapper[4971]: I1127 08:52:49.619224 4971 generic.go:334] "Generic (PLEG): container finished" podID="c6169840-fce7-4bb4-94cb-2873386aaa85" containerID="dfb4fd7eb2fd9dbc8dcef5d31d106dee60c077d78b654e38808a289913870a13" exitCode=0 Nov 27 08:52:49 crc kubenswrapper[4971]: I1127 08:52:49.619734 4971 generic.go:334] "Generic (PLEG): container finished" podID="c6169840-fce7-4bb4-94cb-2873386aaa85" containerID="dd258d55bc5338eae2b3b0049f75a64b385eec00776089d0eb2fb5d1bec8e474" exitCode=143 Nov 27 08:52:49 crc kubenswrapper[4971]: I1127 08:52:49.619312 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c6169840-fce7-4bb4-94cb-2873386aaa85","Type":"ContainerDied","Data":"dfb4fd7eb2fd9dbc8dcef5d31d106dee60c077d78b654e38808a289913870a13"} Nov 27 08:52:49 crc kubenswrapper[4971]: I1127 08:52:49.619792 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c6169840-fce7-4bb4-94cb-2873386aaa85","Type":"ContainerDied","Data":"dd258d55bc5338eae2b3b0049f75a64b385eec00776089d0eb2fb5d1bec8e474"} Nov 27 08:52:49 crc kubenswrapper[4971]: I1127 08:52:49.622016 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"f4e3a631-76b8-4ab7-9836-b73e8ade5bac","Type":"ContainerStarted","Data":"cd2fea27a85791a8405b4cc7497d181643216527344449c18ae97b29f58abd8c"} Nov 27 08:52:49 crc kubenswrapper[4971]: I1127 08:52:49.622037 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"f4e3a631-76b8-4ab7-9836-b73e8ade5bac","Type":"ContainerStarted","Data":"62d4f3b69fa57d19b28989ab022404002f2ef7b5f1970c532a8df2d1eba1871f"} Nov 27 08:52:49 crc kubenswrapper[4971]: I1127 08:52:49.952439 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.089729 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6169840-fce7-4bb4-94cb-2873386aaa85-combined-ca-bundle\") pod \"c6169840-fce7-4bb4-94cb-2873386aaa85\" (UID: \"c6169840-fce7-4bb4-94cb-2873386aaa85\") " Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.089833 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6169840-fce7-4bb4-94cb-2873386aaa85-logs\") pod \"c6169840-fce7-4bb4-94cb-2873386aaa85\" (UID: \"c6169840-fce7-4bb4-94cb-2873386aaa85\") " Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.089907 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c6169840-fce7-4bb4-94cb-2873386aaa85-httpd-run\") pod \"c6169840-fce7-4bb4-94cb-2873386aaa85\" (UID: \"c6169840-fce7-4bb4-94cb-2873386aaa85\") " Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.089946 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/c6169840-fce7-4bb4-94cb-2873386aaa85-ceph\") pod \"c6169840-fce7-4bb4-94cb-2873386aaa85\" (UID: \"c6169840-fce7-4bb4-94cb-2873386aaa85\") " Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.090004 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6169840-fce7-4bb4-94cb-2873386aaa85-config-data\") pod \"c6169840-fce7-4bb4-94cb-2873386aaa85\" (UID: \"c6169840-fce7-4bb4-94cb-2873386aaa85\") " Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.090065 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6169840-fce7-4bb4-94cb-2873386aaa85-scripts\") pod \"c6169840-fce7-4bb4-94cb-2873386aaa85\" (UID: \"c6169840-fce7-4bb4-94cb-2873386aaa85\") " Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.090088 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzhk9\" (UniqueName: \"kubernetes.io/projected/c6169840-fce7-4bb4-94cb-2873386aaa85-kube-api-access-nzhk9\") pod \"c6169840-fce7-4bb4-94cb-2873386aaa85\" (UID: \"c6169840-fce7-4bb4-94cb-2873386aaa85\") " Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.090666 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c6169840-fce7-4bb4-94cb-2873386aaa85-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "c6169840-fce7-4bb4-94cb-2873386aaa85" (UID: "c6169840-fce7-4bb4-94cb-2873386aaa85"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.090735 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c6169840-fce7-4bb4-94cb-2873386aaa85-logs" (OuterVolumeSpecName: "logs") pod "c6169840-fce7-4bb4-94cb-2873386aaa85" (UID: "c6169840-fce7-4bb4-94cb-2873386aaa85"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.096424 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6169840-fce7-4bb4-94cb-2873386aaa85-ceph" (OuterVolumeSpecName: "ceph") pod "c6169840-fce7-4bb4-94cb-2873386aaa85" (UID: "c6169840-fce7-4bb4-94cb-2873386aaa85"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.097916 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6169840-fce7-4bb4-94cb-2873386aaa85-kube-api-access-nzhk9" (OuterVolumeSpecName: "kube-api-access-nzhk9") pod "c6169840-fce7-4bb4-94cb-2873386aaa85" (UID: "c6169840-fce7-4bb4-94cb-2873386aaa85"). InnerVolumeSpecName "kube-api-access-nzhk9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.104190 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6169840-fce7-4bb4-94cb-2873386aaa85-scripts" (OuterVolumeSpecName: "scripts") pod "c6169840-fce7-4bb4-94cb-2873386aaa85" (UID: "c6169840-fce7-4bb4-94cb-2873386aaa85"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.123399 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6169840-fce7-4bb4-94cb-2873386aaa85-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c6169840-fce7-4bb4-94cb-2873386aaa85" (UID: "c6169840-fce7-4bb4-94cb-2873386aaa85"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.143689 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6169840-fce7-4bb4-94cb-2873386aaa85-config-data" (OuterVolumeSpecName: "config-data") pod "c6169840-fce7-4bb4-94cb-2873386aaa85" (UID: "c6169840-fce7-4bb4-94cb-2873386aaa85"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.192522 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6169840-fce7-4bb4-94cb-2873386aaa85-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.192596 4971 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6169840-fce7-4bb4-94cb-2873386aaa85-logs\") on node \"crc\" DevicePath \"\"" Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.192608 4971 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c6169840-fce7-4bb4-94cb-2873386aaa85-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.192619 4971 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/c6169840-fce7-4bb4-94cb-2873386aaa85-ceph\") on node \"crc\" DevicePath \"\"" Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.192628 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6169840-fce7-4bb4-94cb-2873386aaa85-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.192639 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6169840-fce7-4bb4-94cb-2873386aaa85-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.192650 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzhk9\" (UniqueName: \"kubernetes.io/projected/c6169840-fce7-4bb4-94cb-2873386aaa85-kube-api-access-nzhk9\") on node \"crc\" DevicePath \"\"" Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.634652 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c6169840-fce7-4bb4-94cb-2873386aaa85","Type":"ContainerDied","Data":"e4f38fe710974f5279d59f1ac9eefc2638f5112f3a58938e628087ce1521c489"} Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.634713 4971 scope.go:117] "RemoveContainer" containerID="dfb4fd7eb2fd9dbc8dcef5d31d106dee60c077d78b654e38808a289913870a13" Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.634706 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.637954 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"f4e3a631-76b8-4ab7-9836-b73e8ade5bac","Type":"ContainerStarted","Data":"8874bc68655bb4edb830936197a59ecf4a7b58cc0437cdc96485d9007a4d500e"} Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.662801 4971 scope.go:117] "RemoveContainer" containerID="dd258d55bc5338eae2b3b0049f75a64b385eec00776089d0eb2fb5d1bec8e474" Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.684625 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.684599831 podStartE2EDuration="3.684599831s" podCreationTimestamp="2025-11-27 08:52:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:52:50.67514396 +0000 UTC m=+7208.867187888" watchObservedRunningTime="2025-11-27 08:52:50.684599831 +0000 UTC m=+7208.876643759" Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.747108 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.756640 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.764814 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 27 08:52:50 crc kubenswrapper[4971]: E1127 08:52:50.765472 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6169840-fce7-4bb4-94cb-2873386aaa85" containerName="glance-log" Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.765496 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6169840-fce7-4bb4-94cb-2873386aaa85" containerName="glance-log" Nov 27 08:52:50 crc kubenswrapper[4971]: E1127 08:52:50.765551 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6169840-fce7-4bb4-94cb-2873386aaa85" containerName="glance-httpd" Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.765559 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6169840-fce7-4bb4-94cb-2873386aaa85" containerName="glance-httpd" Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.765786 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6169840-fce7-4bb4-94cb-2873386aaa85" containerName="glance-log" Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.765825 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6169840-fce7-4bb4-94cb-2873386aaa85" containerName="glance-httpd" Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.767248 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.772153 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.774782 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.913246 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21546bf5-5b71-49ae-ba26-865ec87cda59-logs\") pod \"glance-default-internal-api-0\" (UID: \"21546bf5-5b71-49ae-ba26-865ec87cda59\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.913345 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/21546bf5-5b71-49ae-ba26-865ec87cda59-ceph\") pod \"glance-default-internal-api-0\" (UID: \"21546bf5-5b71-49ae-ba26-865ec87cda59\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.913416 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/21546bf5-5b71-49ae-ba26-865ec87cda59-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"21546bf5-5b71-49ae-ba26-865ec87cda59\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.913440 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hkgsq\" (UniqueName: \"kubernetes.io/projected/21546bf5-5b71-49ae-ba26-865ec87cda59-kube-api-access-hkgsq\") pod \"glance-default-internal-api-0\" (UID: \"21546bf5-5b71-49ae-ba26-865ec87cda59\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.913483 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21546bf5-5b71-49ae-ba26-865ec87cda59-scripts\") pod \"glance-default-internal-api-0\" (UID: \"21546bf5-5b71-49ae-ba26-865ec87cda59\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.913638 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21546bf5-5b71-49ae-ba26-865ec87cda59-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"21546bf5-5b71-49ae-ba26-865ec87cda59\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:50 crc kubenswrapper[4971]: I1127 08:52:50.913681 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21546bf5-5b71-49ae-ba26-865ec87cda59-config-data\") pod \"glance-default-internal-api-0\" (UID: \"21546bf5-5b71-49ae-ba26-865ec87cda59\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:51 crc kubenswrapper[4971]: I1127 08:52:51.015347 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/21546bf5-5b71-49ae-ba26-865ec87cda59-ceph\") pod \"glance-default-internal-api-0\" (UID: \"21546bf5-5b71-49ae-ba26-865ec87cda59\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:51 crc kubenswrapper[4971]: I1127 08:52:51.015422 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/21546bf5-5b71-49ae-ba26-865ec87cda59-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"21546bf5-5b71-49ae-ba26-865ec87cda59\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:51 crc kubenswrapper[4971]: I1127 08:52:51.015449 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hkgsq\" (UniqueName: \"kubernetes.io/projected/21546bf5-5b71-49ae-ba26-865ec87cda59-kube-api-access-hkgsq\") pod \"glance-default-internal-api-0\" (UID: \"21546bf5-5b71-49ae-ba26-865ec87cda59\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:51 crc kubenswrapper[4971]: I1127 08:52:51.015486 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21546bf5-5b71-49ae-ba26-865ec87cda59-scripts\") pod \"glance-default-internal-api-0\" (UID: \"21546bf5-5b71-49ae-ba26-865ec87cda59\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:51 crc kubenswrapper[4971]: I1127 08:52:51.015519 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21546bf5-5b71-49ae-ba26-865ec87cda59-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"21546bf5-5b71-49ae-ba26-865ec87cda59\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:51 crc kubenswrapper[4971]: I1127 08:52:51.015561 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21546bf5-5b71-49ae-ba26-865ec87cda59-config-data\") pod \"glance-default-internal-api-0\" (UID: \"21546bf5-5b71-49ae-ba26-865ec87cda59\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:51 crc kubenswrapper[4971]: I1127 08:52:51.015601 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21546bf5-5b71-49ae-ba26-865ec87cda59-logs\") pod \"glance-default-internal-api-0\" (UID: \"21546bf5-5b71-49ae-ba26-865ec87cda59\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:51 crc kubenswrapper[4971]: I1127 08:52:51.016202 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/21546bf5-5b71-49ae-ba26-865ec87cda59-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"21546bf5-5b71-49ae-ba26-865ec87cda59\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:51 crc kubenswrapper[4971]: I1127 08:52:51.016308 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21546bf5-5b71-49ae-ba26-865ec87cda59-logs\") pod \"glance-default-internal-api-0\" (UID: \"21546bf5-5b71-49ae-ba26-865ec87cda59\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:51 crc kubenswrapper[4971]: I1127 08:52:51.022389 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21546bf5-5b71-49ae-ba26-865ec87cda59-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"21546bf5-5b71-49ae-ba26-865ec87cda59\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:51 crc kubenswrapper[4971]: I1127 08:52:51.022948 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/21546bf5-5b71-49ae-ba26-865ec87cda59-ceph\") pod \"glance-default-internal-api-0\" (UID: \"21546bf5-5b71-49ae-ba26-865ec87cda59\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:51 crc kubenswrapper[4971]: I1127 08:52:51.022952 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21546bf5-5b71-49ae-ba26-865ec87cda59-config-data\") pod \"glance-default-internal-api-0\" (UID: \"21546bf5-5b71-49ae-ba26-865ec87cda59\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:51 crc kubenswrapper[4971]: I1127 08:52:51.024624 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21546bf5-5b71-49ae-ba26-865ec87cda59-scripts\") pod \"glance-default-internal-api-0\" (UID: \"21546bf5-5b71-49ae-ba26-865ec87cda59\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:51 crc kubenswrapper[4971]: I1127 08:52:51.038181 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hkgsq\" (UniqueName: \"kubernetes.io/projected/21546bf5-5b71-49ae-ba26-865ec87cda59-kube-api-access-hkgsq\") pod \"glance-default-internal-api-0\" (UID: \"21546bf5-5b71-49ae-ba26-865ec87cda59\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:52:51 crc kubenswrapper[4971]: I1127 08:52:51.091011 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 27 08:52:51 crc kubenswrapper[4971]: I1127 08:52:51.638234 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 27 08:52:52 crc kubenswrapper[4971]: I1127 08:52:52.563685 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6169840-fce7-4bb4-94cb-2873386aaa85" path="/var/lib/kubelet/pods/c6169840-fce7-4bb4-94cb-2873386aaa85/volumes" Nov 27 08:52:52 crc kubenswrapper[4971]: I1127 08:52:52.662724 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"21546bf5-5b71-49ae-ba26-865ec87cda59","Type":"ContainerStarted","Data":"b9b5c435cc8b1b7f346bd8d7784742969317d3ad45f77756a85745dd3e261be8"} Nov 27 08:52:52 crc kubenswrapper[4971]: I1127 08:52:52.662793 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"21546bf5-5b71-49ae-ba26-865ec87cda59","Type":"ContainerStarted","Data":"09746a7fb951bfe924f654241dc5d324a76d2079fdd80935541f3b88a27e9878"} Nov 27 08:52:53 crc kubenswrapper[4971]: I1127 08:52:53.536901 4971 scope.go:117] "RemoveContainer" containerID="13a6d74dc8bdec9813e38ff6986663b014177a32bd3209549f994c2bea534c9d" Nov 27 08:52:53 crc kubenswrapper[4971]: I1127 08:52:53.674631 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"21546bf5-5b71-49ae-ba26-865ec87cda59","Type":"ContainerStarted","Data":"ccfde958dcaf43405b3c3ea5cf14dde53978c3a46b16171a3e8ce688894c0bb4"} Nov 27 08:52:53 crc kubenswrapper[4971]: I1127 08:52:53.700136 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.700105827 podStartE2EDuration="3.700105827s" podCreationTimestamp="2025-11-27 08:52:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:52:53.693292972 +0000 UTC m=+7211.885336910" watchObservedRunningTime="2025-11-27 08:52:53.700105827 +0000 UTC m=+7211.892149775" Nov 27 08:52:54 crc kubenswrapper[4971]: I1127 08:52:54.327781 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-d7bfb7d8c-drcrf" Nov 27 08:52:54 crc kubenswrapper[4971]: I1127 08:52:54.400255 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8bddfb579-775j9"] Nov 27 08:52:54 crc kubenswrapper[4971]: I1127 08:52:54.401097 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8bddfb579-775j9" podUID="70bc4fa8-853f-476c-aa6a-50aa1e80391f" containerName="dnsmasq-dns" containerID="cri-o://331afbf72ff0bf220438d6c691d8c98abf9046d9d106413b7716af7f23f67e5c" gracePeriod=10 Nov 27 08:52:54 crc kubenswrapper[4971]: I1127 08:52:54.709427 4971 generic.go:334] "Generic (PLEG): container finished" podID="70bc4fa8-853f-476c-aa6a-50aa1e80391f" containerID="331afbf72ff0bf220438d6c691d8c98abf9046d9d106413b7716af7f23f67e5c" exitCode=0 Nov 27 08:52:54 crc kubenswrapper[4971]: I1127 08:52:54.709472 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8bddfb579-775j9" event={"ID":"70bc4fa8-853f-476c-aa6a-50aa1e80391f","Type":"ContainerDied","Data":"331afbf72ff0bf220438d6c691d8c98abf9046d9d106413b7716af7f23f67e5c"} Nov 27 08:52:54 crc kubenswrapper[4971]: I1127 08:52:54.898799 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8bddfb579-775j9" Nov 27 08:52:55 crc kubenswrapper[4971]: I1127 08:52:55.009613 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/70bc4fa8-853f-476c-aa6a-50aa1e80391f-ovsdbserver-nb\") pod \"70bc4fa8-853f-476c-aa6a-50aa1e80391f\" (UID: \"70bc4fa8-853f-476c-aa6a-50aa1e80391f\") " Nov 27 08:52:55 crc kubenswrapper[4971]: I1127 08:52:55.009660 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/70bc4fa8-853f-476c-aa6a-50aa1e80391f-ovsdbserver-sb\") pod \"70bc4fa8-853f-476c-aa6a-50aa1e80391f\" (UID: \"70bc4fa8-853f-476c-aa6a-50aa1e80391f\") " Nov 27 08:52:55 crc kubenswrapper[4971]: I1127 08:52:55.009739 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/70bc4fa8-853f-476c-aa6a-50aa1e80391f-dns-svc\") pod \"70bc4fa8-853f-476c-aa6a-50aa1e80391f\" (UID: \"70bc4fa8-853f-476c-aa6a-50aa1e80391f\") " Nov 27 08:52:55 crc kubenswrapper[4971]: I1127 08:52:55.009767 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ksxqv\" (UniqueName: \"kubernetes.io/projected/70bc4fa8-853f-476c-aa6a-50aa1e80391f-kube-api-access-ksxqv\") pod \"70bc4fa8-853f-476c-aa6a-50aa1e80391f\" (UID: \"70bc4fa8-853f-476c-aa6a-50aa1e80391f\") " Nov 27 08:52:55 crc kubenswrapper[4971]: I1127 08:52:55.009942 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70bc4fa8-853f-476c-aa6a-50aa1e80391f-config\") pod \"70bc4fa8-853f-476c-aa6a-50aa1e80391f\" (UID: \"70bc4fa8-853f-476c-aa6a-50aa1e80391f\") " Nov 27 08:52:55 crc kubenswrapper[4971]: I1127 08:52:55.017118 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70bc4fa8-853f-476c-aa6a-50aa1e80391f-kube-api-access-ksxqv" (OuterVolumeSpecName: "kube-api-access-ksxqv") pod "70bc4fa8-853f-476c-aa6a-50aa1e80391f" (UID: "70bc4fa8-853f-476c-aa6a-50aa1e80391f"). InnerVolumeSpecName "kube-api-access-ksxqv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:52:55 crc kubenswrapper[4971]: I1127 08:52:55.062013 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/70bc4fa8-853f-476c-aa6a-50aa1e80391f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "70bc4fa8-853f-476c-aa6a-50aa1e80391f" (UID: "70bc4fa8-853f-476c-aa6a-50aa1e80391f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:52:55 crc kubenswrapper[4971]: I1127 08:52:55.063605 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/70bc4fa8-853f-476c-aa6a-50aa1e80391f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "70bc4fa8-853f-476c-aa6a-50aa1e80391f" (UID: "70bc4fa8-853f-476c-aa6a-50aa1e80391f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:52:55 crc kubenswrapper[4971]: I1127 08:52:55.065345 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/70bc4fa8-853f-476c-aa6a-50aa1e80391f-config" (OuterVolumeSpecName: "config") pod "70bc4fa8-853f-476c-aa6a-50aa1e80391f" (UID: "70bc4fa8-853f-476c-aa6a-50aa1e80391f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:52:55 crc kubenswrapper[4971]: I1127 08:52:55.076010 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/70bc4fa8-853f-476c-aa6a-50aa1e80391f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "70bc4fa8-853f-476c-aa6a-50aa1e80391f" (UID: "70bc4fa8-853f-476c-aa6a-50aa1e80391f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:52:55 crc kubenswrapper[4971]: I1127 08:52:55.115054 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/70bc4fa8-853f-476c-aa6a-50aa1e80391f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 27 08:52:55 crc kubenswrapper[4971]: I1127 08:52:55.115117 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/70bc4fa8-853f-476c-aa6a-50aa1e80391f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 27 08:52:55 crc kubenswrapper[4971]: I1127 08:52:55.115136 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ksxqv\" (UniqueName: \"kubernetes.io/projected/70bc4fa8-853f-476c-aa6a-50aa1e80391f-kube-api-access-ksxqv\") on node \"crc\" DevicePath \"\"" Nov 27 08:52:55 crc kubenswrapper[4971]: I1127 08:52:55.115153 4971 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/70bc4fa8-853f-476c-aa6a-50aa1e80391f-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 27 08:52:55 crc kubenswrapper[4971]: I1127 08:52:55.115168 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70bc4fa8-853f-476c-aa6a-50aa1e80391f-config\") on node \"crc\" DevicePath \"\"" Nov 27 08:52:55 crc kubenswrapper[4971]: I1127 08:52:55.720746 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8bddfb579-775j9" event={"ID":"70bc4fa8-853f-476c-aa6a-50aa1e80391f","Type":"ContainerDied","Data":"9fb908e34809f98d862e5c540d572570c7c9c460d73e45987643dcd9267e923f"} Nov 27 08:52:55 crc kubenswrapper[4971]: I1127 08:52:55.720823 4971 scope.go:117] "RemoveContainer" containerID="331afbf72ff0bf220438d6c691d8c98abf9046d9d106413b7716af7f23f67e5c" Nov 27 08:52:55 crc kubenswrapper[4971]: I1127 08:52:55.720917 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8bddfb579-775j9" Nov 27 08:52:55 crc kubenswrapper[4971]: I1127 08:52:55.763705 4971 scope.go:117] "RemoveContainer" containerID="335b391eb5641133661ad248c6db06c4ae8a7d5b1da973f85bbccfa52de515cf" Nov 27 08:52:55 crc kubenswrapper[4971]: I1127 08:52:55.783577 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8bddfb579-775j9"] Nov 27 08:52:55 crc kubenswrapper[4971]: I1127 08:52:55.791426 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8bddfb579-775j9"] Nov 27 08:52:56 crc kubenswrapper[4971]: I1127 08:52:56.560741 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70bc4fa8-853f-476c-aa6a-50aa1e80391f" path="/var/lib/kubelet/pods/70bc4fa8-853f-476c-aa6a-50aa1e80391f/volumes" Nov 27 08:52:58 crc kubenswrapper[4971]: I1127 08:52:58.058358 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Nov 27 08:52:58 crc kubenswrapper[4971]: I1127 08:52:58.058872 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Nov 27 08:52:58 crc kubenswrapper[4971]: I1127 08:52:58.093586 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Nov 27 08:52:58 crc kubenswrapper[4971]: I1127 08:52:58.110951 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Nov 27 08:52:58 crc kubenswrapper[4971]: I1127 08:52:58.769196 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Nov 27 08:52:58 crc kubenswrapper[4971]: I1127 08:52:58.769269 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Nov 27 08:52:59 crc kubenswrapper[4971]: I1127 08:52:59.551562 4971 scope.go:117] "RemoveContainer" containerID="503fed002d2daf941b71890d178a428a5c3cffb1b48a2221858dc92e28178dc4" Nov 27 08:52:59 crc kubenswrapper[4971]: E1127 08:52:59.553453 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:53:00 crc kubenswrapper[4971]: I1127 08:53:00.743175 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Nov 27 08:53:00 crc kubenswrapper[4971]: I1127 08:53:00.786715 4971 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 27 08:53:01 crc kubenswrapper[4971]: I1127 08:53:01.009263 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Nov 27 08:53:01 crc kubenswrapper[4971]: I1127 08:53:01.091892 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Nov 27 08:53:01 crc kubenswrapper[4971]: I1127 08:53:01.091965 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Nov 27 08:53:01 crc kubenswrapper[4971]: I1127 08:53:01.131047 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Nov 27 08:53:01 crc kubenswrapper[4971]: I1127 08:53:01.150486 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Nov 27 08:53:01 crc kubenswrapper[4971]: I1127 08:53:01.794508 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Nov 27 08:53:01 crc kubenswrapper[4971]: I1127 08:53:01.794919 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Nov 27 08:53:03 crc kubenswrapper[4971]: I1127 08:53:03.809792 4971 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 27 08:53:03 crc kubenswrapper[4971]: I1127 08:53:03.810357 4971 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 27 08:53:03 crc kubenswrapper[4971]: I1127 08:53:03.827320 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Nov 27 08:53:03 crc kubenswrapper[4971]: I1127 08:53:03.843265 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Nov 27 08:53:09 crc kubenswrapper[4971]: I1127 08:53:09.867146 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-2vn4z"] Nov 27 08:53:09 crc kubenswrapper[4971]: E1127 08:53:09.868010 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70bc4fa8-853f-476c-aa6a-50aa1e80391f" containerName="init" Nov 27 08:53:09 crc kubenswrapper[4971]: I1127 08:53:09.868029 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="70bc4fa8-853f-476c-aa6a-50aa1e80391f" containerName="init" Nov 27 08:53:09 crc kubenswrapper[4971]: E1127 08:53:09.868070 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70bc4fa8-853f-476c-aa6a-50aa1e80391f" containerName="dnsmasq-dns" Nov 27 08:53:09 crc kubenswrapper[4971]: I1127 08:53:09.868078 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="70bc4fa8-853f-476c-aa6a-50aa1e80391f" containerName="dnsmasq-dns" Nov 27 08:53:09 crc kubenswrapper[4971]: I1127 08:53:09.868284 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="70bc4fa8-853f-476c-aa6a-50aa1e80391f" containerName="dnsmasq-dns" Nov 27 08:53:09 crc kubenswrapper[4971]: I1127 08:53:09.869551 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-2vn4z" Nov 27 08:53:09 crc kubenswrapper[4971]: I1127 08:53:09.888521 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-2vn4z"] Nov 27 08:53:09 crc kubenswrapper[4971]: I1127 08:53:09.930086 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8ts44\" (UniqueName: \"kubernetes.io/projected/90d61a54-6dcb-4eec-971c-f9a93fda7d12-kube-api-access-8ts44\") pod \"placement-db-create-2vn4z\" (UID: \"90d61a54-6dcb-4eec-971c-f9a93fda7d12\") " pod="openstack/placement-db-create-2vn4z" Nov 27 08:53:09 crc kubenswrapper[4971]: I1127 08:53:09.930367 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/90d61a54-6dcb-4eec-971c-f9a93fda7d12-operator-scripts\") pod \"placement-db-create-2vn4z\" (UID: \"90d61a54-6dcb-4eec-971c-f9a93fda7d12\") " pod="openstack/placement-db-create-2vn4z" Nov 27 08:53:09 crc kubenswrapper[4971]: I1127 08:53:09.970507 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-2bea-account-create-update-n5z98"] Nov 27 08:53:09 crc kubenswrapper[4971]: I1127 08:53:09.973130 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-2bea-account-create-update-n5z98" Nov 27 08:53:09 crc kubenswrapper[4971]: I1127 08:53:09.975824 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Nov 27 08:53:09 crc kubenswrapper[4971]: I1127 08:53:09.983110 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-2bea-account-create-update-n5z98"] Nov 27 08:53:10 crc kubenswrapper[4971]: I1127 08:53:10.032692 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/90d61a54-6dcb-4eec-971c-f9a93fda7d12-operator-scripts\") pod \"placement-db-create-2vn4z\" (UID: \"90d61a54-6dcb-4eec-971c-f9a93fda7d12\") " pod="openstack/placement-db-create-2vn4z" Nov 27 08:53:10 crc kubenswrapper[4971]: I1127 08:53:10.032793 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kgfck\" (UniqueName: \"kubernetes.io/projected/76fa5095-c64b-4ed1-b5ed-221c8f85db60-kube-api-access-kgfck\") pod \"placement-2bea-account-create-update-n5z98\" (UID: \"76fa5095-c64b-4ed1-b5ed-221c8f85db60\") " pod="openstack/placement-2bea-account-create-update-n5z98" Nov 27 08:53:10 crc kubenswrapper[4971]: I1127 08:53:10.032822 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/76fa5095-c64b-4ed1-b5ed-221c8f85db60-operator-scripts\") pod \"placement-2bea-account-create-update-n5z98\" (UID: \"76fa5095-c64b-4ed1-b5ed-221c8f85db60\") " pod="openstack/placement-2bea-account-create-update-n5z98" Nov 27 08:53:10 crc kubenswrapper[4971]: I1127 08:53:10.032858 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8ts44\" (UniqueName: \"kubernetes.io/projected/90d61a54-6dcb-4eec-971c-f9a93fda7d12-kube-api-access-8ts44\") pod \"placement-db-create-2vn4z\" (UID: \"90d61a54-6dcb-4eec-971c-f9a93fda7d12\") " pod="openstack/placement-db-create-2vn4z" Nov 27 08:53:10 crc kubenswrapper[4971]: I1127 08:53:10.035875 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/90d61a54-6dcb-4eec-971c-f9a93fda7d12-operator-scripts\") pod \"placement-db-create-2vn4z\" (UID: \"90d61a54-6dcb-4eec-971c-f9a93fda7d12\") " pod="openstack/placement-db-create-2vn4z" Nov 27 08:53:10 crc kubenswrapper[4971]: I1127 08:53:10.061363 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8ts44\" (UniqueName: \"kubernetes.io/projected/90d61a54-6dcb-4eec-971c-f9a93fda7d12-kube-api-access-8ts44\") pod \"placement-db-create-2vn4z\" (UID: \"90d61a54-6dcb-4eec-971c-f9a93fda7d12\") " pod="openstack/placement-db-create-2vn4z" Nov 27 08:53:10 crc kubenswrapper[4971]: I1127 08:53:10.135764 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kgfck\" (UniqueName: \"kubernetes.io/projected/76fa5095-c64b-4ed1-b5ed-221c8f85db60-kube-api-access-kgfck\") pod \"placement-2bea-account-create-update-n5z98\" (UID: \"76fa5095-c64b-4ed1-b5ed-221c8f85db60\") " pod="openstack/placement-2bea-account-create-update-n5z98" Nov 27 08:53:10 crc kubenswrapper[4971]: I1127 08:53:10.135849 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/76fa5095-c64b-4ed1-b5ed-221c8f85db60-operator-scripts\") pod \"placement-2bea-account-create-update-n5z98\" (UID: \"76fa5095-c64b-4ed1-b5ed-221c8f85db60\") " pod="openstack/placement-2bea-account-create-update-n5z98" Nov 27 08:53:10 crc kubenswrapper[4971]: I1127 08:53:10.137307 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/76fa5095-c64b-4ed1-b5ed-221c8f85db60-operator-scripts\") pod \"placement-2bea-account-create-update-n5z98\" (UID: \"76fa5095-c64b-4ed1-b5ed-221c8f85db60\") " pod="openstack/placement-2bea-account-create-update-n5z98" Nov 27 08:53:10 crc kubenswrapper[4971]: I1127 08:53:10.162233 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kgfck\" (UniqueName: \"kubernetes.io/projected/76fa5095-c64b-4ed1-b5ed-221c8f85db60-kube-api-access-kgfck\") pod \"placement-2bea-account-create-update-n5z98\" (UID: \"76fa5095-c64b-4ed1-b5ed-221c8f85db60\") " pod="openstack/placement-2bea-account-create-update-n5z98" Nov 27 08:53:10 crc kubenswrapper[4971]: I1127 08:53:10.191847 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-2vn4z" Nov 27 08:53:10 crc kubenswrapper[4971]: I1127 08:53:10.292839 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-2bea-account-create-update-n5z98" Nov 27 08:53:10 crc kubenswrapper[4971]: I1127 08:53:10.493368 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-2vn4z"] Nov 27 08:53:10 crc kubenswrapper[4971]: W1127 08:53:10.812464 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod76fa5095_c64b_4ed1_b5ed_221c8f85db60.slice/crio-2d48a402808ad1f26c962a7e228f22a9cf9ed5b463992058fc504f3e7483ae0e WatchSource:0}: Error finding container 2d48a402808ad1f26c962a7e228f22a9cf9ed5b463992058fc504f3e7483ae0e: Status 404 returned error can't find the container with id 2d48a402808ad1f26c962a7e228f22a9cf9ed5b463992058fc504f3e7483ae0e Nov 27 08:53:10 crc kubenswrapper[4971]: I1127 08:53:10.820746 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-2bea-account-create-update-n5z98"] Nov 27 08:53:10 crc kubenswrapper[4971]: I1127 08:53:10.894936 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-2vn4z" event={"ID":"90d61a54-6dcb-4eec-971c-f9a93fda7d12","Type":"ContainerStarted","Data":"e75750b3e68dab7e4b1472f97a570c72b6a25b4a1fb0daae229530905fbed035"} Nov 27 08:53:10 crc kubenswrapper[4971]: I1127 08:53:10.895022 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-2vn4z" event={"ID":"90d61a54-6dcb-4eec-971c-f9a93fda7d12","Type":"ContainerStarted","Data":"a317dcf33a8cb7e3046362462a7d0ec6ab3178d4ead28f0963874595fcd537c1"} Nov 27 08:53:10 crc kubenswrapper[4971]: I1127 08:53:10.897999 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-2bea-account-create-update-n5z98" event={"ID":"76fa5095-c64b-4ed1-b5ed-221c8f85db60","Type":"ContainerStarted","Data":"2d48a402808ad1f26c962a7e228f22a9cf9ed5b463992058fc504f3e7483ae0e"} Nov 27 08:53:10 crc kubenswrapper[4971]: I1127 08:53:10.921638 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-create-2vn4z" podStartSLOduration=1.921491541 podStartE2EDuration="1.921491541s" podCreationTimestamp="2025-11-27 08:53:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:53:10.914893302 +0000 UTC m=+7229.106937220" watchObservedRunningTime="2025-11-27 08:53:10.921491541 +0000 UTC m=+7229.113535459" Nov 27 08:53:11 crc kubenswrapper[4971]: I1127 08:53:11.908184 4971 generic.go:334] "Generic (PLEG): container finished" podID="90d61a54-6dcb-4eec-971c-f9a93fda7d12" containerID="e75750b3e68dab7e4b1472f97a570c72b6a25b4a1fb0daae229530905fbed035" exitCode=0 Nov 27 08:53:11 crc kubenswrapper[4971]: I1127 08:53:11.908358 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-2vn4z" event={"ID":"90d61a54-6dcb-4eec-971c-f9a93fda7d12","Type":"ContainerDied","Data":"e75750b3e68dab7e4b1472f97a570c72b6a25b4a1fb0daae229530905fbed035"} Nov 27 08:53:11 crc kubenswrapper[4971]: I1127 08:53:11.911513 4971 generic.go:334] "Generic (PLEG): container finished" podID="76fa5095-c64b-4ed1-b5ed-221c8f85db60" containerID="35620458da3e080f33f43cc93fb770b2cc1cd6250a32765da4d35255073653d4" exitCode=0 Nov 27 08:53:11 crc kubenswrapper[4971]: I1127 08:53:11.911555 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-2bea-account-create-update-n5z98" event={"ID":"76fa5095-c64b-4ed1-b5ed-221c8f85db60","Type":"ContainerDied","Data":"35620458da3e080f33f43cc93fb770b2cc1cd6250a32765da4d35255073653d4"} Nov 27 08:53:13 crc kubenswrapper[4971]: I1127 08:53:13.421337 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-2vn4z" Nov 27 08:53:13 crc kubenswrapper[4971]: I1127 08:53:13.428670 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-2bea-account-create-update-n5z98" Nov 27 08:53:13 crc kubenswrapper[4971]: I1127 08:53:13.506129 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kgfck\" (UniqueName: \"kubernetes.io/projected/76fa5095-c64b-4ed1-b5ed-221c8f85db60-kube-api-access-kgfck\") pod \"76fa5095-c64b-4ed1-b5ed-221c8f85db60\" (UID: \"76fa5095-c64b-4ed1-b5ed-221c8f85db60\") " Nov 27 08:53:13 crc kubenswrapper[4971]: I1127 08:53:13.506824 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/90d61a54-6dcb-4eec-971c-f9a93fda7d12-operator-scripts\") pod \"90d61a54-6dcb-4eec-971c-f9a93fda7d12\" (UID: \"90d61a54-6dcb-4eec-971c-f9a93fda7d12\") " Nov 27 08:53:13 crc kubenswrapper[4971]: I1127 08:53:13.506904 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/76fa5095-c64b-4ed1-b5ed-221c8f85db60-operator-scripts\") pod \"76fa5095-c64b-4ed1-b5ed-221c8f85db60\" (UID: \"76fa5095-c64b-4ed1-b5ed-221c8f85db60\") " Nov 27 08:53:13 crc kubenswrapper[4971]: I1127 08:53:13.506956 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8ts44\" (UniqueName: \"kubernetes.io/projected/90d61a54-6dcb-4eec-971c-f9a93fda7d12-kube-api-access-8ts44\") pod \"90d61a54-6dcb-4eec-971c-f9a93fda7d12\" (UID: \"90d61a54-6dcb-4eec-971c-f9a93fda7d12\") " Nov 27 08:53:13 crc kubenswrapper[4971]: I1127 08:53:13.507933 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76fa5095-c64b-4ed1-b5ed-221c8f85db60-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "76fa5095-c64b-4ed1-b5ed-221c8f85db60" (UID: "76fa5095-c64b-4ed1-b5ed-221c8f85db60"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:53:13 crc kubenswrapper[4971]: I1127 08:53:13.510250 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90d61a54-6dcb-4eec-971c-f9a93fda7d12-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "90d61a54-6dcb-4eec-971c-f9a93fda7d12" (UID: "90d61a54-6dcb-4eec-971c-f9a93fda7d12"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:53:13 crc kubenswrapper[4971]: I1127 08:53:13.514297 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76fa5095-c64b-4ed1-b5ed-221c8f85db60-kube-api-access-kgfck" (OuterVolumeSpecName: "kube-api-access-kgfck") pod "76fa5095-c64b-4ed1-b5ed-221c8f85db60" (UID: "76fa5095-c64b-4ed1-b5ed-221c8f85db60"). InnerVolumeSpecName "kube-api-access-kgfck". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:53:13 crc kubenswrapper[4971]: I1127 08:53:13.514793 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90d61a54-6dcb-4eec-971c-f9a93fda7d12-kube-api-access-8ts44" (OuterVolumeSpecName: "kube-api-access-8ts44") pod "90d61a54-6dcb-4eec-971c-f9a93fda7d12" (UID: "90d61a54-6dcb-4eec-971c-f9a93fda7d12"). InnerVolumeSpecName "kube-api-access-8ts44". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:53:13 crc kubenswrapper[4971]: I1127 08:53:13.552054 4971 scope.go:117] "RemoveContainer" containerID="503fed002d2daf941b71890d178a428a5c3cffb1b48a2221858dc92e28178dc4" Nov 27 08:53:13 crc kubenswrapper[4971]: E1127 08:53:13.552316 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:53:13 crc kubenswrapper[4971]: I1127 08:53:13.613214 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kgfck\" (UniqueName: \"kubernetes.io/projected/76fa5095-c64b-4ed1-b5ed-221c8f85db60-kube-api-access-kgfck\") on node \"crc\" DevicePath \"\"" Nov 27 08:53:13 crc kubenswrapper[4971]: I1127 08:53:13.613246 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/90d61a54-6dcb-4eec-971c-f9a93fda7d12-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 08:53:13 crc kubenswrapper[4971]: I1127 08:53:13.613258 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/76fa5095-c64b-4ed1-b5ed-221c8f85db60-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 08:53:13 crc kubenswrapper[4971]: I1127 08:53:13.613270 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8ts44\" (UniqueName: \"kubernetes.io/projected/90d61a54-6dcb-4eec-971c-f9a93fda7d12-kube-api-access-8ts44\") on node \"crc\" DevicePath \"\"" Nov 27 08:53:13 crc kubenswrapper[4971]: I1127 08:53:13.941404 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-2vn4z" event={"ID":"90d61a54-6dcb-4eec-971c-f9a93fda7d12","Type":"ContainerDied","Data":"a317dcf33a8cb7e3046362462a7d0ec6ab3178d4ead28f0963874595fcd537c1"} Nov 27 08:53:13 crc kubenswrapper[4971]: I1127 08:53:13.941463 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a317dcf33a8cb7e3046362462a7d0ec6ab3178d4ead28f0963874595fcd537c1" Nov 27 08:53:13 crc kubenswrapper[4971]: I1127 08:53:13.941569 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-2vn4z" Nov 27 08:53:13 crc kubenswrapper[4971]: I1127 08:53:13.947410 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-2bea-account-create-update-n5z98" event={"ID":"76fa5095-c64b-4ed1-b5ed-221c8f85db60","Type":"ContainerDied","Data":"2d48a402808ad1f26c962a7e228f22a9cf9ed5b463992058fc504f3e7483ae0e"} Nov 27 08:53:13 crc kubenswrapper[4971]: I1127 08:53:13.947465 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2d48a402808ad1f26c962a7e228f22a9cf9ed5b463992058fc504f3e7483ae0e" Nov 27 08:53:13 crc kubenswrapper[4971]: I1127 08:53:13.947556 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-2bea-account-create-update-n5z98" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.229421 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-5949w"] Nov 27 08:53:15 crc kubenswrapper[4971]: E1127 08:53:15.231002 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76fa5095-c64b-4ed1-b5ed-221c8f85db60" containerName="mariadb-account-create-update" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.231024 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="76fa5095-c64b-4ed1-b5ed-221c8f85db60" containerName="mariadb-account-create-update" Nov 27 08:53:15 crc kubenswrapper[4971]: E1127 08:53:15.231058 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90d61a54-6dcb-4eec-971c-f9a93fda7d12" containerName="mariadb-database-create" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.231064 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="90d61a54-6dcb-4eec-971c-f9a93fda7d12" containerName="mariadb-database-create" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.231576 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="90d61a54-6dcb-4eec-971c-f9a93fda7d12" containerName="mariadb-database-create" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.231631 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="76fa5095-c64b-4ed1-b5ed-221c8f85db60" containerName="mariadb-account-create-update" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.253042 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-5949w" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.258056 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-474xz" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.258355 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.262850 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.266586 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-5949w"] Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.288418 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-559c7c46ff-gddgr"] Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.290557 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-559c7c46ff-gddgr" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.301313 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-559c7c46ff-gddgr"] Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.355416 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75d5055d-85b2-4925-b62c-1cfa9acaf383-logs\") pod \"placement-db-sync-5949w\" (UID: \"75d5055d-85b2-4925-b62c-1cfa9acaf383\") " pod="openstack/placement-db-sync-5949w" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.355484 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75d5055d-85b2-4925-b62c-1cfa9acaf383-config-data\") pod \"placement-db-sync-5949w\" (UID: \"75d5055d-85b2-4925-b62c-1cfa9acaf383\") " pod="openstack/placement-db-sync-5949w" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.355507 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75d5055d-85b2-4925-b62c-1cfa9acaf383-combined-ca-bundle\") pod \"placement-db-sync-5949w\" (UID: \"75d5055d-85b2-4925-b62c-1cfa9acaf383\") " pod="openstack/placement-db-sync-5949w" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.355604 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75d5055d-85b2-4925-b62c-1cfa9acaf383-scripts\") pod \"placement-db-sync-5949w\" (UID: \"75d5055d-85b2-4925-b62c-1cfa9acaf383\") " pod="openstack/placement-db-sync-5949w" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.355648 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9wj4\" (UniqueName: \"kubernetes.io/projected/75d5055d-85b2-4925-b62c-1cfa9acaf383-kube-api-access-z9wj4\") pod \"placement-db-sync-5949w\" (UID: \"75d5055d-85b2-4925-b62c-1cfa9acaf383\") " pod="openstack/placement-db-sync-5949w" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.457070 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/caf8c7f8-182f-471e-b6ba-cc257b879d1f-ovsdbserver-nb\") pod \"dnsmasq-dns-559c7c46ff-gddgr\" (UID: \"caf8c7f8-182f-471e-b6ba-cc257b879d1f\") " pod="openstack/dnsmasq-dns-559c7c46ff-gddgr" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.457148 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4j4w\" (UniqueName: \"kubernetes.io/projected/caf8c7f8-182f-471e-b6ba-cc257b879d1f-kube-api-access-f4j4w\") pod \"dnsmasq-dns-559c7c46ff-gddgr\" (UID: \"caf8c7f8-182f-471e-b6ba-cc257b879d1f\") " pod="openstack/dnsmasq-dns-559c7c46ff-gddgr" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.457173 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/caf8c7f8-182f-471e-b6ba-cc257b879d1f-config\") pod \"dnsmasq-dns-559c7c46ff-gddgr\" (UID: \"caf8c7f8-182f-471e-b6ba-cc257b879d1f\") " pod="openstack/dnsmasq-dns-559c7c46ff-gddgr" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.457209 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75d5055d-85b2-4925-b62c-1cfa9acaf383-scripts\") pod \"placement-db-sync-5949w\" (UID: \"75d5055d-85b2-4925-b62c-1cfa9acaf383\") " pod="openstack/placement-db-sync-5949w" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.457249 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9wj4\" (UniqueName: \"kubernetes.io/projected/75d5055d-85b2-4925-b62c-1cfa9acaf383-kube-api-access-z9wj4\") pod \"placement-db-sync-5949w\" (UID: \"75d5055d-85b2-4925-b62c-1cfa9acaf383\") " pod="openstack/placement-db-sync-5949w" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.457290 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75d5055d-85b2-4925-b62c-1cfa9acaf383-logs\") pod \"placement-db-sync-5949w\" (UID: \"75d5055d-85b2-4925-b62c-1cfa9acaf383\") " pod="openstack/placement-db-sync-5949w" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.457307 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75d5055d-85b2-4925-b62c-1cfa9acaf383-config-data\") pod \"placement-db-sync-5949w\" (UID: \"75d5055d-85b2-4925-b62c-1cfa9acaf383\") " pod="openstack/placement-db-sync-5949w" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.457329 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75d5055d-85b2-4925-b62c-1cfa9acaf383-combined-ca-bundle\") pod \"placement-db-sync-5949w\" (UID: \"75d5055d-85b2-4925-b62c-1cfa9acaf383\") " pod="openstack/placement-db-sync-5949w" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.457368 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/caf8c7f8-182f-471e-b6ba-cc257b879d1f-ovsdbserver-sb\") pod \"dnsmasq-dns-559c7c46ff-gddgr\" (UID: \"caf8c7f8-182f-471e-b6ba-cc257b879d1f\") " pod="openstack/dnsmasq-dns-559c7c46ff-gddgr" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.457397 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/caf8c7f8-182f-471e-b6ba-cc257b879d1f-dns-svc\") pod \"dnsmasq-dns-559c7c46ff-gddgr\" (UID: \"caf8c7f8-182f-471e-b6ba-cc257b879d1f\") " pod="openstack/dnsmasq-dns-559c7c46ff-gddgr" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.458517 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75d5055d-85b2-4925-b62c-1cfa9acaf383-logs\") pod \"placement-db-sync-5949w\" (UID: \"75d5055d-85b2-4925-b62c-1cfa9acaf383\") " pod="openstack/placement-db-sync-5949w" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.462131 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75d5055d-85b2-4925-b62c-1cfa9acaf383-scripts\") pod \"placement-db-sync-5949w\" (UID: \"75d5055d-85b2-4925-b62c-1cfa9acaf383\") " pod="openstack/placement-db-sync-5949w" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.463967 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75d5055d-85b2-4925-b62c-1cfa9acaf383-combined-ca-bundle\") pod \"placement-db-sync-5949w\" (UID: \"75d5055d-85b2-4925-b62c-1cfa9acaf383\") " pod="openstack/placement-db-sync-5949w" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.469309 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75d5055d-85b2-4925-b62c-1cfa9acaf383-config-data\") pod \"placement-db-sync-5949w\" (UID: \"75d5055d-85b2-4925-b62c-1cfa9acaf383\") " pod="openstack/placement-db-sync-5949w" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.488780 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9wj4\" (UniqueName: \"kubernetes.io/projected/75d5055d-85b2-4925-b62c-1cfa9acaf383-kube-api-access-z9wj4\") pod \"placement-db-sync-5949w\" (UID: \"75d5055d-85b2-4925-b62c-1cfa9acaf383\") " pod="openstack/placement-db-sync-5949w" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.559176 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/caf8c7f8-182f-471e-b6ba-cc257b879d1f-ovsdbserver-sb\") pod \"dnsmasq-dns-559c7c46ff-gddgr\" (UID: \"caf8c7f8-182f-471e-b6ba-cc257b879d1f\") " pod="openstack/dnsmasq-dns-559c7c46ff-gddgr" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.559275 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/caf8c7f8-182f-471e-b6ba-cc257b879d1f-dns-svc\") pod \"dnsmasq-dns-559c7c46ff-gddgr\" (UID: \"caf8c7f8-182f-471e-b6ba-cc257b879d1f\") " pod="openstack/dnsmasq-dns-559c7c46ff-gddgr" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.559309 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/caf8c7f8-182f-471e-b6ba-cc257b879d1f-ovsdbserver-nb\") pod \"dnsmasq-dns-559c7c46ff-gddgr\" (UID: \"caf8c7f8-182f-471e-b6ba-cc257b879d1f\") " pod="openstack/dnsmasq-dns-559c7c46ff-gddgr" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.559349 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4j4w\" (UniqueName: \"kubernetes.io/projected/caf8c7f8-182f-471e-b6ba-cc257b879d1f-kube-api-access-f4j4w\") pod \"dnsmasq-dns-559c7c46ff-gddgr\" (UID: \"caf8c7f8-182f-471e-b6ba-cc257b879d1f\") " pod="openstack/dnsmasq-dns-559c7c46ff-gddgr" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.559398 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/caf8c7f8-182f-471e-b6ba-cc257b879d1f-config\") pod \"dnsmasq-dns-559c7c46ff-gddgr\" (UID: \"caf8c7f8-182f-471e-b6ba-cc257b879d1f\") " pod="openstack/dnsmasq-dns-559c7c46ff-gddgr" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.560440 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/caf8c7f8-182f-471e-b6ba-cc257b879d1f-ovsdbserver-nb\") pod \"dnsmasq-dns-559c7c46ff-gddgr\" (UID: \"caf8c7f8-182f-471e-b6ba-cc257b879d1f\") " pod="openstack/dnsmasq-dns-559c7c46ff-gddgr" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.560498 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/caf8c7f8-182f-471e-b6ba-cc257b879d1f-dns-svc\") pod \"dnsmasq-dns-559c7c46ff-gddgr\" (UID: \"caf8c7f8-182f-471e-b6ba-cc257b879d1f\") " pod="openstack/dnsmasq-dns-559c7c46ff-gddgr" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.560692 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/caf8c7f8-182f-471e-b6ba-cc257b879d1f-config\") pod \"dnsmasq-dns-559c7c46ff-gddgr\" (UID: \"caf8c7f8-182f-471e-b6ba-cc257b879d1f\") " pod="openstack/dnsmasq-dns-559c7c46ff-gddgr" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.561269 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/caf8c7f8-182f-471e-b6ba-cc257b879d1f-ovsdbserver-sb\") pod \"dnsmasq-dns-559c7c46ff-gddgr\" (UID: \"caf8c7f8-182f-471e-b6ba-cc257b879d1f\") " pod="openstack/dnsmasq-dns-559c7c46ff-gddgr" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.579868 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4j4w\" (UniqueName: \"kubernetes.io/projected/caf8c7f8-182f-471e-b6ba-cc257b879d1f-kube-api-access-f4j4w\") pod \"dnsmasq-dns-559c7c46ff-gddgr\" (UID: \"caf8c7f8-182f-471e-b6ba-cc257b879d1f\") " pod="openstack/dnsmasq-dns-559c7c46ff-gddgr" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.588938 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-5949w" Nov 27 08:53:15 crc kubenswrapper[4971]: I1127 08:53:15.626065 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-559c7c46ff-gddgr" Nov 27 08:53:16 crc kubenswrapper[4971]: I1127 08:53:16.078259 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-5949w"] Nov 27 08:53:16 crc kubenswrapper[4971]: W1127 08:53:16.085992 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod75d5055d_85b2_4925_b62c_1cfa9acaf383.slice/crio-adac5a8bcf2dc0152b624f70ecdfda504b6dbcff50ccc53d55c5c4504b11cf89 WatchSource:0}: Error finding container adac5a8bcf2dc0152b624f70ecdfda504b6dbcff50ccc53d55c5c4504b11cf89: Status 404 returned error can't find the container with id adac5a8bcf2dc0152b624f70ecdfda504b6dbcff50ccc53d55c5c4504b11cf89 Nov 27 08:53:16 crc kubenswrapper[4971]: I1127 08:53:16.256746 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-559c7c46ff-gddgr"] Nov 27 08:53:16 crc kubenswrapper[4971]: I1127 08:53:16.981608 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-5949w" event={"ID":"75d5055d-85b2-4925-b62c-1cfa9acaf383","Type":"ContainerStarted","Data":"adac5a8bcf2dc0152b624f70ecdfda504b6dbcff50ccc53d55c5c4504b11cf89"} Nov 27 08:53:16 crc kubenswrapper[4971]: I1127 08:53:16.985099 4971 generic.go:334] "Generic (PLEG): container finished" podID="caf8c7f8-182f-471e-b6ba-cc257b879d1f" containerID="523049562fb23f9b50de806f0a7a72fa89d4551ab816205eaf5ab0f94f4b2793" exitCode=0 Nov 27 08:53:16 crc kubenswrapper[4971]: I1127 08:53:16.985170 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-559c7c46ff-gddgr" event={"ID":"caf8c7f8-182f-471e-b6ba-cc257b879d1f","Type":"ContainerDied","Data":"523049562fb23f9b50de806f0a7a72fa89d4551ab816205eaf5ab0f94f4b2793"} Nov 27 08:53:16 crc kubenswrapper[4971]: I1127 08:53:16.985203 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-559c7c46ff-gddgr" event={"ID":"caf8c7f8-182f-471e-b6ba-cc257b879d1f","Type":"ContainerStarted","Data":"0f2731dfdb324d36a63e5cf1458a32f978fb25ddb30e5d252a5e5388d5901fe2"} Nov 27 08:53:17 crc kubenswrapper[4971]: I1127 08:53:17.996619 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-559c7c46ff-gddgr" event={"ID":"caf8c7f8-182f-471e-b6ba-cc257b879d1f","Type":"ContainerStarted","Data":"90f2fbf20f00cacb129f9ffb34bd45dececf911add9a0de6ce800f667230729f"} Nov 27 08:53:17 crc kubenswrapper[4971]: I1127 08:53:17.997429 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-559c7c46ff-gddgr" Nov 27 08:53:18 crc kubenswrapper[4971]: I1127 08:53:18.016084 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-559c7c46ff-gddgr" podStartSLOduration=3.016059569 podStartE2EDuration="3.016059569s" podCreationTimestamp="2025-11-27 08:53:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:53:18.014143394 +0000 UTC m=+7236.206187322" watchObservedRunningTime="2025-11-27 08:53:18.016059569 +0000 UTC m=+7236.208103497" Nov 27 08:53:20 crc kubenswrapper[4971]: I1127 08:53:20.021722 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-5949w" event={"ID":"75d5055d-85b2-4925-b62c-1cfa9acaf383","Type":"ContainerStarted","Data":"8c17a9b039b351f6c479fd011283c99457ea2826ecad03822e7480c24a053563"} Nov 27 08:53:20 crc kubenswrapper[4971]: I1127 08:53:20.048462 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-5949w" podStartSLOduration=1.6314217 podStartE2EDuration="5.048440834s" podCreationTimestamp="2025-11-27 08:53:15 +0000 UTC" firstStartedPulling="2025-11-27 08:53:16.088453823 +0000 UTC m=+7234.280497741" lastFinishedPulling="2025-11-27 08:53:19.505472957 +0000 UTC m=+7237.697516875" observedRunningTime="2025-11-27 08:53:20.038477619 +0000 UTC m=+7238.230521547" watchObservedRunningTime="2025-11-27 08:53:20.048440834 +0000 UTC m=+7238.240484752" Nov 27 08:53:21 crc kubenswrapper[4971]: I1127 08:53:21.032309 4971 generic.go:334] "Generic (PLEG): container finished" podID="75d5055d-85b2-4925-b62c-1cfa9acaf383" containerID="8c17a9b039b351f6c479fd011283c99457ea2826ecad03822e7480c24a053563" exitCode=0 Nov 27 08:53:21 crc kubenswrapper[4971]: I1127 08:53:21.032900 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-5949w" event={"ID":"75d5055d-85b2-4925-b62c-1cfa9acaf383","Type":"ContainerDied","Data":"8c17a9b039b351f6c479fd011283c99457ea2826ecad03822e7480c24a053563"} Nov 27 08:53:22 crc kubenswrapper[4971]: I1127 08:53:22.408587 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-5949w" Nov 27 08:53:22 crc kubenswrapper[4971]: I1127 08:53:22.519471 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75d5055d-85b2-4925-b62c-1cfa9acaf383-config-data\") pod \"75d5055d-85b2-4925-b62c-1cfa9acaf383\" (UID: \"75d5055d-85b2-4925-b62c-1cfa9acaf383\") " Nov 27 08:53:22 crc kubenswrapper[4971]: I1127 08:53:22.519611 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75d5055d-85b2-4925-b62c-1cfa9acaf383-scripts\") pod \"75d5055d-85b2-4925-b62c-1cfa9acaf383\" (UID: \"75d5055d-85b2-4925-b62c-1cfa9acaf383\") " Nov 27 08:53:22 crc kubenswrapper[4971]: I1127 08:53:22.519677 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z9wj4\" (UniqueName: \"kubernetes.io/projected/75d5055d-85b2-4925-b62c-1cfa9acaf383-kube-api-access-z9wj4\") pod \"75d5055d-85b2-4925-b62c-1cfa9acaf383\" (UID: \"75d5055d-85b2-4925-b62c-1cfa9acaf383\") " Nov 27 08:53:22 crc kubenswrapper[4971]: I1127 08:53:22.519732 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75d5055d-85b2-4925-b62c-1cfa9acaf383-logs\") pod \"75d5055d-85b2-4925-b62c-1cfa9acaf383\" (UID: \"75d5055d-85b2-4925-b62c-1cfa9acaf383\") " Nov 27 08:53:22 crc kubenswrapper[4971]: I1127 08:53:22.519773 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75d5055d-85b2-4925-b62c-1cfa9acaf383-combined-ca-bundle\") pod \"75d5055d-85b2-4925-b62c-1cfa9acaf383\" (UID: \"75d5055d-85b2-4925-b62c-1cfa9acaf383\") " Nov 27 08:53:22 crc kubenswrapper[4971]: I1127 08:53:22.520926 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/75d5055d-85b2-4925-b62c-1cfa9acaf383-logs" (OuterVolumeSpecName: "logs") pod "75d5055d-85b2-4925-b62c-1cfa9acaf383" (UID: "75d5055d-85b2-4925-b62c-1cfa9acaf383"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:53:22 crc kubenswrapper[4971]: I1127 08:53:22.526723 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75d5055d-85b2-4925-b62c-1cfa9acaf383-scripts" (OuterVolumeSpecName: "scripts") pod "75d5055d-85b2-4925-b62c-1cfa9acaf383" (UID: "75d5055d-85b2-4925-b62c-1cfa9acaf383"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:53:22 crc kubenswrapper[4971]: I1127 08:53:22.531226 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/75d5055d-85b2-4925-b62c-1cfa9acaf383-kube-api-access-z9wj4" (OuterVolumeSpecName: "kube-api-access-z9wj4") pod "75d5055d-85b2-4925-b62c-1cfa9acaf383" (UID: "75d5055d-85b2-4925-b62c-1cfa9acaf383"). InnerVolumeSpecName "kube-api-access-z9wj4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:53:22 crc kubenswrapper[4971]: I1127 08:53:22.546609 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75d5055d-85b2-4925-b62c-1cfa9acaf383-config-data" (OuterVolumeSpecName: "config-data") pod "75d5055d-85b2-4925-b62c-1cfa9acaf383" (UID: "75d5055d-85b2-4925-b62c-1cfa9acaf383"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:53:22 crc kubenswrapper[4971]: I1127 08:53:22.566500 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75d5055d-85b2-4925-b62c-1cfa9acaf383-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "75d5055d-85b2-4925-b62c-1cfa9acaf383" (UID: "75d5055d-85b2-4925-b62c-1cfa9acaf383"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:53:22 crc kubenswrapper[4971]: I1127 08:53:22.622363 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z9wj4\" (UniqueName: \"kubernetes.io/projected/75d5055d-85b2-4925-b62c-1cfa9acaf383-kube-api-access-z9wj4\") on node \"crc\" DevicePath \"\"" Nov 27 08:53:22 crc kubenswrapper[4971]: I1127 08:53:22.622395 4971 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75d5055d-85b2-4925-b62c-1cfa9acaf383-logs\") on node \"crc\" DevicePath \"\"" Nov 27 08:53:22 crc kubenswrapper[4971]: I1127 08:53:22.622406 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75d5055d-85b2-4925-b62c-1cfa9acaf383-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 08:53:22 crc kubenswrapper[4971]: I1127 08:53:22.622418 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75d5055d-85b2-4925-b62c-1cfa9acaf383-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 08:53:22 crc kubenswrapper[4971]: I1127 08:53:22.622460 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75d5055d-85b2-4925-b62c-1cfa9acaf383-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 08:53:23 crc kubenswrapper[4971]: I1127 08:53:23.051228 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-5949w" event={"ID":"75d5055d-85b2-4925-b62c-1cfa9acaf383","Type":"ContainerDied","Data":"adac5a8bcf2dc0152b624f70ecdfda504b6dbcff50ccc53d55c5c4504b11cf89"} Nov 27 08:53:23 crc kubenswrapper[4971]: I1127 08:53:23.051276 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="adac5a8bcf2dc0152b624f70ecdfda504b6dbcff50ccc53d55c5c4504b11cf89" Nov 27 08:53:23 crc kubenswrapper[4971]: I1127 08:53:23.051281 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-5949w" Nov 27 08:53:23 crc kubenswrapper[4971]: I1127 08:53:23.140431 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-74fbdbb586-65qb7"] Nov 27 08:53:23 crc kubenswrapper[4971]: E1127 08:53:23.141030 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75d5055d-85b2-4925-b62c-1cfa9acaf383" containerName="placement-db-sync" Nov 27 08:53:23 crc kubenswrapper[4971]: I1127 08:53:23.141057 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="75d5055d-85b2-4925-b62c-1cfa9acaf383" containerName="placement-db-sync" Nov 27 08:53:23 crc kubenswrapper[4971]: I1127 08:53:23.141298 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="75d5055d-85b2-4925-b62c-1cfa9acaf383" containerName="placement-db-sync" Nov 27 08:53:23 crc kubenswrapper[4971]: I1127 08:53:23.143914 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-74fbdbb586-65qb7" Nov 27 08:53:23 crc kubenswrapper[4971]: I1127 08:53:23.146965 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-474xz" Nov 27 08:53:23 crc kubenswrapper[4971]: I1127 08:53:23.147368 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Nov 27 08:53:23 crc kubenswrapper[4971]: I1127 08:53:23.150671 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Nov 27 08:53:23 crc kubenswrapper[4971]: I1127 08:53:23.153398 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-74fbdbb586-65qb7"] Nov 27 08:53:23 crc kubenswrapper[4971]: I1127 08:53:23.232821 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa5026c6-2f7f-4590-a883-d2599ec5a57a-logs\") pod \"placement-74fbdbb586-65qb7\" (UID: \"aa5026c6-2f7f-4590-a883-d2599ec5a57a\") " pod="openstack/placement-74fbdbb586-65qb7" Nov 27 08:53:23 crc kubenswrapper[4971]: I1127 08:53:23.232881 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa5026c6-2f7f-4590-a883-d2599ec5a57a-combined-ca-bundle\") pod \"placement-74fbdbb586-65qb7\" (UID: \"aa5026c6-2f7f-4590-a883-d2599ec5a57a\") " pod="openstack/placement-74fbdbb586-65qb7" Nov 27 08:53:23 crc kubenswrapper[4971]: I1127 08:53:23.233155 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa5026c6-2f7f-4590-a883-d2599ec5a57a-config-data\") pod \"placement-74fbdbb586-65qb7\" (UID: \"aa5026c6-2f7f-4590-a883-d2599ec5a57a\") " pod="openstack/placement-74fbdbb586-65qb7" Nov 27 08:53:23 crc kubenswrapper[4971]: I1127 08:53:23.233265 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gtq2h\" (UniqueName: \"kubernetes.io/projected/aa5026c6-2f7f-4590-a883-d2599ec5a57a-kube-api-access-gtq2h\") pod \"placement-74fbdbb586-65qb7\" (UID: \"aa5026c6-2f7f-4590-a883-d2599ec5a57a\") " pod="openstack/placement-74fbdbb586-65qb7" Nov 27 08:53:23 crc kubenswrapper[4971]: I1127 08:53:23.233432 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa5026c6-2f7f-4590-a883-d2599ec5a57a-scripts\") pod \"placement-74fbdbb586-65qb7\" (UID: \"aa5026c6-2f7f-4590-a883-d2599ec5a57a\") " pod="openstack/placement-74fbdbb586-65qb7" Nov 27 08:53:23 crc kubenswrapper[4971]: I1127 08:53:23.335950 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa5026c6-2f7f-4590-a883-d2599ec5a57a-logs\") pod \"placement-74fbdbb586-65qb7\" (UID: \"aa5026c6-2f7f-4590-a883-d2599ec5a57a\") " pod="openstack/placement-74fbdbb586-65qb7" Nov 27 08:53:23 crc kubenswrapper[4971]: I1127 08:53:23.336006 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa5026c6-2f7f-4590-a883-d2599ec5a57a-combined-ca-bundle\") pod \"placement-74fbdbb586-65qb7\" (UID: \"aa5026c6-2f7f-4590-a883-d2599ec5a57a\") " pod="openstack/placement-74fbdbb586-65qb7" Nov 27 08:53:23 crc kubenswrapper[4971]: I1127 08:53:23.336095 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa5026c6-2f7f-4590-a883-d2599ec5a57a-config-data\") pod \"placement-74fbdbb586-65qb7\" (UID: \"aa5026c6-2f7f-4590-a883-d2599ec5a57a\") " pod="openstack/placement-74fbdbb586-65qb7" Nov 27 08:53:23 crc kubenswrapper[4971]: I1127 08:53:23.336127 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gtq2h\" (UniqueName: \"kubernetes.io/projected/aa5026c6-2f7f-4590-a883-d2599ec5a57a-kube-api-access-gtq2h\") pod \"placement-74fbdbb586-65qb7\" (UID: \"aa5026c6-2f7f-4590-a883-d2599ec5a57a\") " pod="openstack/placement-74fbdbb586-65qb7" Nov 27 08:53:23 crc kubenswrapper[4971]: I1127 08:53:23.336191 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa5026c6-2f7f-4590-a883-d2599ec5a57a-scripts\") pod \"placement-74fbdbb586-65qb7\" (UID: \"aa5026c6-2f7f-4590-a883-d2599ec5a57a\") " pod="openstack/placement-74fbdbb586-65qb7" Nov 27 08:53:23 crc kubenswrapper[4971]: I1127 08:53:23.336587 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa5026c6-2f7f-4590-a883-d2599ec5a57a-logs\") pod \"placement-74fbdbb586-65qb7\" (UID: \"aa5026c6-2f7f-4590-a883-d2599ec5a57a\") " pod="openstack/placement-74fbdbb586-65qb7" Nov 27 08:53:23 crc kubenswrapper[4971]: I1127 08:53:23.349220 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa5026c6-2f7f-4590-a883-d2599ec5a57a-scripts\") pod \"placement-74fbdbb586-65qb7\" (UID: \"aa5026c6-2f7f-4590-a883-d2599ec5a57a\") " pod="openstack/placement-74fbdbb586-65qb7" Nov 27 08:53:23 crc kubenswrapper[4971]: I1127 08:53:23.350763 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa5026c6-2f7f-4590-a883-d2599ec5a57a-config-data\") pod \"placement-74fbdbb586-65qb7\" (UID: \"aa5026c6-2f7f-4590-a883-d2599ec5a57a\") " pod="openstack/placement-74fbdbb586-65qb7" Nov 27 08:53:23 crc kubenswrapper[4971]: I1127 08:53:23.353357 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa5026c6-2f7f-4590-a883-d2599ec5a57a-combined-ca-bundle\") pod \"placement-74fbdbb586-65qb7\" (UID: \"aa5026c6-2f7f-4590-a883-d2599ec5a57a\") " pod="openstack/placement-74fbdbb586-65qb7" Nov 27 08:53:23 crc kubenswrapper[4971]: I1127 08:53:23.355681 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gtq2h\" (UniqueName: \"kubernetes.io/projected/aa5026c6-2f7f-4590-a883-d2599ec5a57a-kube-api-access-gtq2h\") pod \"placement-74fbdbb586-65qb7\" (UID: \"aa5026c6-2f7f-4590-a883-d2599ec5a57a\") " pod="openstack/placement-74fbdbb586-65qb7" Nov 27 08:53:23 crc kubenswrapper[4971]: I1127 08:53:23.470204 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-74fbdbb586-65qb7" Nov 27 08:53:23 crc kubenswrapper[4971]: I1127 08:53:23.983804 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-74fbdbb586-65qb7"] Nov 27 08:53:24 crc kubenswrapper[4971]: I1127 08:53:24.063576 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-74fbdbb586-65qb7" event={"ID":"aa5026c6-2f7f-4590-a883-d2599ec5a57a","Type":"ContainerStarted","Data":"735cfd5fb974ef70c34095f25881cdc7ff69c6f6bd3fafdab632589b7ff167f7"} Nov 27 08:53:25 crc kubenswrapper[4971]: I1127 08:53:25.077581 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-74fbdbb586-65qb7" event={"ID":"aa5026c6-2f7f-4590-a883-d2599ec5a57a","Type":"ContainerStarted","Data":"4c45ceee25a352808130014a959afb3ac9500e50f8aabae23b75c2d56e125220"} Nov 27 08:53:25 crc kubenswrapper[4971]: I1127 08:53:25.078019 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-74fbdbb586-65qb7" event={"ID":"aa5026c6-2f7f-4590-a883-d2599ec5a57a","Type":"ContainerStarted","Data":"2b7eb4cf732838b9fe45f915cfcc99f63470ab9455e3a4c4b3bcbc165b211fc8"} Nov 27 08:53:25 crc kubenswrapper[4971]: I1127 08:53:25.078047 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-74fbdbb586-65qb7" Nov 27 08:53:25 crc kubenswrapper[4971]: I1127 08:53:25.112653 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-74fbdbb586-65qb7" podStartSLOduration=2.112627604 podStartE2EDuration="2.112627604s" podCreationTimestamp="2025-11-27 08:53:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:53:25.102959868 +0000 UTC m=+7243.295003786" watchObservedRunningTime="2025-11-27 08:53:25.112627604 +0000 UTC m=+7243.304671542" Nov 27 08:53:25 crc kubenswrapper[4971]: I1127 08:53:25.628939 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-559c7c46ff-gddgr" Nov 27 08:53:25 crc kubenswrapper[4971]: I1127 08:53:25.716546 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d7bfb7d8c-drcrf"] Nov 27 08:53:25 crc kubenswrapper[4971]: I1127 08:53:25.717577 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-d7bfb7d8c-drcrf" podUID="5c490ae6-410f-4be8-a025-1c8813f27678" containerName="dnsmasq-dns" containerID="cri-o://c5f4e4928894bc260a0740c500ef7cba3a8628447c67b31fe9c572ebdca23bfb" gracePeriod=10 Nov 27 08:53:26 crc kubenswrapper[4971]: I1127 08:53:26.091944 4971 generic.go:334] "Generic (PLEG): container finished" podID="5c490ae6-410f-4be8-a025-1c8813f27678" containerID="c5f4e4928894bc260a0740c500ef7cba3a8628447c67b31fe9c572ebdca23bfb" exitCode=0 Nov 27 08:53:26 crc kubenswrapper[4971]: I1127 08:53:26.093345 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d7bfb7d8c-drcrf" event={"ID":"5c490ae6-410f-4be8-a025-1c8813f27678","Type":"ContainerDied","Data":"c5f4e4928894bc260a0740c500ef7cba3a8628447c67b31fe9c572ebdca23bfb"} Nov 27 08:53:26 crc kubenswrapper[4971]: I1127 08:53:26.094797 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-74fbdbb586-65qb7" Nov 27 08:53:26 crc kubenswrapper[4971]: I1127 08:53:26.180411 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d7bfb7d8c-drcrf" Nov 27 08:53:26 crc kubenswrapper[4971]: I1127 08:53:26.309796 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c490ae6-410f-4be8-a025-1c8813f27678-ovsdbserver-nb\") pod \"5c490ae6-410f-4be8-a025-1c8813f27678\" (UID: \"5c490ae6-410f-4be8-a025-1c8813f27678\") " Nov 27 08:53:26 crc kubenswrapper[4971]: I1127 08:53:26.310249 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c490ae6-410f-4be8-a025-1c8813f27678-dns-svc\") pod \"5c490ae6-410f-4be8-a025-1c8813f27678\" (UID: \"5c490ae6-410f-4be8-a025-1c8813f27678\") " Nov 27 08:53:26 crc kubenswrapper[4971]: I1127 08:53:26.310409 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-48dkq\" (UniqueName: \"kubernetes.io/projected/5c490ae6-410f-4be8-a025-1c8813f27678-kube-api-access-48dkq\") pod \"5c490ae6-410f-4be8-a025-1c8813f27678\" (UID: \"5c490ae6-410f-4be8-a025-1c8813f27678\") " Nov 27 08:53:26 crc kubenswrapper[4971]: I1127 08:53:26.310513 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c490ae6-410f-4be8-a025-1c8813f27678-ovsdbserver-sb\") pod \"5c490ae6-410f-4be8-a025-1c8813f27678\" (UID: \"5c490ae6-410f-4be8-a025-1c8813f27678\") " Nov 27 08:53:26 crc kubenswrapper[4971]: I1127 08:53:26.310667 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c490ae6-410f-4be8-a025-1c8813f27678-config\") pod \"5c490ae6-410f-4be8-a025-1c8813f27678\" (UID: \"5c490ae6-410f-4be8-a025-1c8813f27678\") " Nov 27 08:53:26 crc kubenswrapper[4971]: I1127 08:53:26.325393 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c490ae6-410f-4be8-a025-1c8813f27678-kube-api-access-48dkq" (OuterVolumeSpecName: "kube-api-access-48dkq") pod "5c490ae6-410f-4be8-a025-1c8813f27678" (UID: "5c490ae6-410f-4be8-a025-1c8813f27678"). InnerVolumeSpecName "kube-api-access-48dkq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:53:26 crc kubenswrapper[4971]: I1127 08:53:26.360461 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c490ae6-410f-4be8-a025-1c8813f27678-config" (OuterVolumeSpecName: "config") pod "5c490ae6-410f-4be8-a025-1c8813f27678" (UID: "5c490ae6-410f-4be8-a025-1c8813f27678"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:53:26 crc kubenswrapper[4971]: I1127 08:53:26.361358 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c490ae6-410f-4be8-a025-1c8813f27678-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5c490ae6-410f-4be8-a025-1c8813f27678" (UID: "5c490ae6-410f-4be8-a025-1c8813f27678"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:53:26 crc kubenswrapper[4971]: I1127 08:53:26.365383 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c490ae6-410f-4be8-a025-1c8813f27678-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5c490ae6-410f-4be8-a025-1c8813f27678" (UID: "5c490ae6-410f-4be8-a025-1c8813f27678"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:53:26 crc kubenswrapper[4971]: I1127 08:53:26.372232 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c490ae6-410f-4be8-a025-1c8813f27678-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5c490ae6-410f-4be8-a025-1c8813f27678" (UID: "5c490ae6-410f-4be8-a025-1c8813f27678"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:53:26 crc kubenswrapper[4971]: I1127 08:53:26.413118 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-48dkq\" (UniqueName: \"kubernetes.io/projected/5c490ae6-410f-4be8-a025-1c8813f27678-kube-api-access-48dkq\") on node \"crc\" DevicePath \"\"" Nov 27 08:53:26 crc kubenswrapper[4971]: I1127 08:53:26.413152 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c490ae6-410f-4be8-a025-1c8813f27678-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 27 08:53:26 crc kubenswrapper[4971]: I1127 08:53:26.413164 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c490ae6-410f-4be8-a025-1c8813f27678-config\") on node \"crc\" DevicePath \"\"" Nov 27 08:53:26 crc kubenswrapper[4971]: I1127 08:53:26.413172 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c490ae6-410f-4be8-a025-1c8813f27678-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 27 08:53:26 crc kubenswrapper[4971]: I1127 08:53:26.413181 4971 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c490ae6-410f-4be8-a025-1c8813f27678-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 27 08:53:27 crc kubenswrapper[4971]: I1127 08:53:27.103364 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d7bfb7d8c-drcrf" event={"ID":"5c490ae6-410f-4be8-a025-1c8813f27678","Type":"ContainerDied","Data":"5e0963611632e7bb24a05671ebe824436a2af63e54f0847ebb2904bc69369536"} Nov 27 08:53:27 crc kubenswrapper[4971]: I1127 08:53:27.103392 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d7bfb7d8c-drcrf" Nov 27 08:53:27 crc kubenswrapper[4971]: I1127 08:53:27.103456 4971 scope.go:117] "RemoveContainer" containerID="c5f4e4928894bc260a0740c500ef7cba3a8628447c67b31fe9c572ebdca23bfb" Nov 27 08:53:27 crc kubenswrapper[4971]: I1127 08:53:27.132503 4971 scope.go:117] "RemoveContainer" containerID="a6a8ba3d680c062035f287c37b86b2a0df2fc0906533f633a0cc89a242c6e0ff" Nov 27 08:53:27 crc kubenswrapper[4971]: I1127 08:53:27.133188 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d7bfb7d8c-drcrf"] Nov 27 08:53:27 crc kubenswrapper[4971]: I1127 08:53:27.142205 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-d7bfb7d8c-drcrf"] Nov 27 08:53:28 crc kubenswrapper[4971]: I1127 08:53:28.553075 4971 scope.go:117] "RemoveContainer" containerID="503fed002d2daf941b71890d178a428a5c3cffb1b48a2221858dc92e28178dc4" Nov 27 08:53:28 crc kubenswrapper[4971]: E1127 08:53:28.553816 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:53:28 crc kubenswrapper[4971]: I1127 08:53:28.563693 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c490ae6-410f-4be8-a025-1c8813f27678" path="/var/lib/kubelet/pods/5c490ae6-410f-4be8-a025-1c8813f27678/volumes" Nov 27 08:53:41 crc kubenswrapper[4971]: I1127 08:53:41.551385 4971 scope.go:117] "RemoveContainer" containerID="503fed002d2daf941b71890d178a428a5c3cffb1b48a2221858dc92e28178dc4" Nov 27 08:53:41 crc kubenswrapper[4971]: E1127 08:53:41.552740 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:53:54 crc kubenswrapper[4971]: I1127 08:53:54.564186 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-74fbdbb586-65qb7" Nov 27 08:53:54 crc kubenswrapper[4971]: I1127 08:53:54.564854 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-74fbdbb586-65qb7" Nov 27 08:53:56 crc kubenswrapper[4971]: I1127 08:53:56.550710 4971 scope.go:117] "RemoveContainer" containerID="503fed002d2daf941b71890d178a428a5c3cffb1b48a2221858dc92e28178dc4" Nov 27 08:53:56 crc kubenswrapper[4971]: E1127 08:53:56.551418 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:54:10 crc kubenswrapper[4971]: I1127 08:54:10.551617 4971 scope.go:117] "RemoveContainer" containerID="503fed002d2daf941b71890d178a428a5c3cffb1b48a2221858dc92e28178dc4" Nov 27 08:54:10 crc kubenswrapper[4971]: E1127 08:54:10.553044 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:54:19 crc kubenswrapper[4971]: I1127 08:54:19.824302 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-r4rk7"] Nov 27 08:54:19 crc kubenswrapper[4971]: E1127 08:54:19.825350 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c490ae6-410f-4be8-a025-1c8813f27678" containerName="init" Nov 27 08:54:19 crc kubenswrapper[4971]: I1127 08:54:19.825365 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c490ae6-410f-4be8-a025-1c8813f27678" containerName="init" Nov 27 08:54:19 crc kubenswrapper[4971]: E1127 08:54:19.825396 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c490ae6-410f-4be8-a025-1c8813f27678" containerName="dnsmasq-dns" Nov 27 08:54:19 crc kubenswrapper[4971]: I1127 08:54:19.825403 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c490ae6-410f-4be8-a025-1c8813f27678" containerName="dnsmasq-dns" Nov 27 08:54:19 crc kubenswrapper[4971]: I1127 08:54:19.825624 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c490ae6-410f-4be8-a025-1c8813f27678" containerName="dnsmasq-dns" Nov 27 08:54:19 crc kubenswrapper[4971]: I1127 08:54:19.826393 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-r4rk7" Nov 27 08:54:19 crc kubenswrapper[4971]: I1127 08:54:19.847300 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-r4rk7"] Nov 27 08:54:19 crc kubenswrapper[4971]: I1127 08:54:19.859252 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85bff7b4-875a-4b89-b961-06b4177b91cf-operator-scripts\") pod \"nova-api-db-create-r4rk7\" (UID: \"85bff7b4-875a-4b89-b961-06b4177b91cf\") " pod="openstack/nova-api-db-create-r4rk7" Nov 27 08:54:19 crc kubenswrapper[4971]: I1127 08:54:19.859356 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkj8l\" (UniqueName: \"kubernetes.io/projected/85bff7b4-875a-4b89-b961-06b4177b91cf-kube-api-access-jkj8l\") pod \"nova-api-db-create-r4rk7\" (UID: \"85bff7b4-875a-4b89-b961-06b4177b91cf\") " pod="openstack/nova-api-db-create-r4rk7" Nov 27 08:54:19 crc kubenswrapper[4971]: I1127 08:54:19.937417 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-66dtj"] Nov 27 08:54:19 crc kubenswrapper[4971]: I1127 08:54:19.939187 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-66dtj" Nov 27 08:54:19 crc kubenswrapper[4971]: I1127 08:54:19.948919 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-66dtj"] Nov 27 08:54:19 crc kubenswrapper[4971]: I1127 08:54:19.964940 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85bff7b4-875a-4b89-b961-06b4177b91cf-operator-scripts\") pod \"nova-api-db-create-r4rk7\" (UID: \"85bff7b4-875a-4b89-b961-06b4177b91cf\") " pod="openstack/nova-api-db-create-r4rk7" Nov 27 08:54:19 crc kubenswrapper[4971]: I1127 08:54:19.965030 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkj8l\" (UniqueName: \"kubernetes.io/projected/85bff7b4-875a-4b89-b961-06b4177b91cf-kube-api-access-jkj8l\") pod \"nova-api-db-create-r4rk7\" (UID: \"85bff7b4-875a-4b89-b961-06b4177b91cf\") " pod="openstack/nova-api-db-create-r4rk7" Nov 27 08:54:19 crc kubenswrapper[4971]: I1127 08:54:19.965127 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/27d1bc48-7132-44a9-aa8b-cd0e45e6d106-operator-scripts\") pod \"nova-cell0-db-create-66dtj\" (UID: \"27d1bc48-7132-44a9-aa8b-cd0e45e6d106\") " pod="openstack/nova-cell0-db-create-66dtj" Nov 27 08:54:19 crc kubenswrapper[4971]: I1127 08:54:19.965197 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t2fr4\" (UniqueName: \"kubernetes.io/projected/27d1bc48-7132-44a9-aa8b-cd0e45e6d106-kube-api-access-t2fr4\") pod \"nova-cell0-db-create-66dtj\" (UID: \"27d1bc48-7132-44a9-aa8b-cd0e45e6d106\") " pod="openstack/nova-cell0-db-create-66dtj" Nov 27 08:54:19 crc kubenswrapper[4971]: I1127 08:54:19.971178 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85bff7b4-875a-4b89-b961-06b4177b91cf-operator-scripts\") pod \"nova-api-db-create-r4rk7\" (UID: \"85bff7b4-875a-4b89-b961-06b4177b91cf\") " pod="openstack/nova-api-db-create-r4rk7" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.012774 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkj8l\" (UniqueName: \"kubernetes.io/projected/85bff7b4-875a-4b89-b961-06b4177b91cf-kube-api-access-jkj8l\") pod \"nova-api-db-create-r4rk7\" (UID: \"85bff7b4-875a-4b89-b961-06b4177b91cf\") " pod="openstack/nova-api-db-create-r4rk7" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.036966 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-4814-account-create-update-5lnrc"] Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.038568 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-4814-account-create-update-5lnrc" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.042395 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.050336 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-bh6jw"] Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.052096 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-bh6jw" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.061883 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-4814-account-create-update-5lnrc"] Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.075429 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t2fr4\" (UniqueName: \"kubernetes.io/projected/27d1bc48-7132-44a9-aa8b-cd0e45e6d106-kube-api-access-t2fr4\") pod \"nova-cell0-db-create-66dtj\" (UID: \"27d1bc48-7132-44a9-aa8b-cd0e45e6d106\") " pod="openstack/nova-cell0-db-create-66dtj" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.075774 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrgkd\" (UniqueName: \"kubernetes.io/projected/82e0248c-1ccd-489a-8eb8-253782084ef2-kube-api-access-lrgkd\") pod \"nova-api-4814-account-create-update-5lnrc\" (UID: \"82e0248c-1ccd-489a-8eb8-253782084ef2\") " pod="openstack/nova-api-4814-account-create-update-5lnrc" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.075885 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/82e0248c-1ccd-489a-8eb8-253782084ef2-operator-scripts\") pod \"nova-api-4814-account-create-update-5lnrc\" (UID: \"82e0248c-1ccd-489a-8eb8-253782084ef2\") " pod="openstack/nova-api-4814-account-create-update-5lnrc" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.076053 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/27d1bc48-7132-44a9-aa8b-cd0e45e6d106-operator-scripts\") pod \"nova-cell0-db-create-66dtj\" (UID: \"27d1bc48-7132-44a9-aa8b-cd0e45e6d106\") " pod="openstack/nova-cell0-db-create-66dtj" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.076939 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/27d1bc48-7132-44a9-aa8b-cd0e45e6d106-operator-scripts\") pod \"nova-cell0-db-create-66dtj\" (UID: \"27d1bc48-7132-44a9-aa8b-cd0e45e6d106\") " pod="openstack/nova-cell0-db-create-66dtj" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.091924 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-bh6jw"] Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.106636 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t2fr4\" (UniqueName: \"kubernetes.io/projected/27d1bc48-7132-44a9-aa8b-cd0e45e6d106-kube-api-access-t2fr4\") pod \"nova-cell0-db-create-66dtj\" (UID: \"27d1bc48-7132-44a9-aa8b-cd0e45e6d106\") " pod="openstack/nova-cell0-db-create-66dtj" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.153990 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-r4rk7" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.178394 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lnl2z\" (UniqueName: \"kubernetes.io/projected/25227c00-9f73-4bfa-9cfe-90bc9aa2c0b2-kube-api-access-lnl2z\") pod \"nova-cell1-db-create-bh6jw\" (UID: \"25227c00-9f73-4bfa-9cfe-90bc9aa2c0b2\") " pod="openstack/nova-cell1-db-create-bh6jw" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.178493 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/25227c00-9f73-4bfa-9cfe-90bc9aa2c0b2-operator-scripts\") pod \"nova-cell1-db-create-bh6jw\" (UID: \"25227c00-9f73-4bfa-9cfe-90bc9aa2c0b2\") " pod="openstack/nova-cell1-db-create-bh6jw" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.178648 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrgkd\" (UniqueName: \"kubernetes.io/projected/82e0248c-1ccd-489a-8eb8-253782084ef2-kube-api-access-lrgkd\") pod \"nova-api-4814-account-create-update-5lnrc\" (UID: \"82e0248c-1ccd-489a-8eb8-253782084ef2\") " pod="openstack/nova-api-4814-account-create-update-5lnrc" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.178706 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/82e0248c-1ccd-489a-8eb8-253782084ef2-operator-scripts\") pod \"nova-api-4814-account-create-update-5lnrc\" (UID: \"82e0248c-1ccd-489a-8eb8-253782084ef2\") " pod="openstack/nova-api-4814-account-create-update-5lnrc" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.179564 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/82e0248c-1ccd-489a-8eb8-253782084ef2-operator-scripts\") pod \"nova-api-4814-account-create-update-5lnrc\" (UID: \"82e0248c-1ccd-489a-8eb8-253782084ef2\") " pod="openstack/nova-api-4814-account-create-update-5lnrc" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.199652 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrgkd\" (UniqueName: \"kubernetes.io/projected/82e0248c-1ccd-489a-8eb8-253782084ef2-kube-api-access-lrgkd\") pod \"nova-api-4814-account-create-update-5lnrc\" (UID: \"82e0248c-1ccd-489a-8eb8-253782084ef2\") " pod="openstack/nova-api-4814-account-create-update-5lnrc" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.237297 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-15ab-account-create-update-d2vpj"] Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.239150 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-15ab-account-create-update-d2vpj" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.243004 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.249588 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-15ab-account-create-update-d2vpj"] Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.265755 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-66dtj" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.280703 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p28ns\" (UniqueName: \"kubernetes.io/projected/6f97874d-2c1b-44b0-b1f3-e5a337a4b209-kube-api-access-p28ns\") pod \"nova-cell0-15ab-account-create-update-d2vpj\" (UID: \"6f97874d-2c1b-44b0-b1f3-e5a337a4b209\") " pod="openstack/nova-cell0-15ab-account-create-update-d2vpj" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.280837 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6f97874d-2c1b-44b0-b1f3-e5a337a4b209-operator-scripts\") pod \"nova-cell0-15ab-account-create-update-d2vpj\" (UID: \"6f97874d-2c1b-44b0-b1f3-e5a337a4b209\") " pod="openstack/nova-cell0-15ab-account-create-update-d2vpj" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.280879 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lnl2z\" (UniqueName: \"kubernetes.io/projected/25227c00-9f73-4bfa-9cfe-90bc9aa2c0b2-kube-api-access-lnl2z\") pod \"nova-cell1-db-create-bh6jw\" (UID: \"25227c00-9f73-4bfa-9cfe-90bc9aa2c0b2\") " pod="openstack/nova-cell1-db-create-bh6jw" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.280926 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/25227c00-9f73-4bfa-9cfe-90bc9aa2c0b2-operator-scripts\") pod \"nova-cell1-db-create-bh6jw\" (UID: \"25227c00-9f73-4bfa-9cfe-90bc9aa2c0b2\") " pod="openstack/nova-cell1-db-create-bh6jw" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.281620 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/25227c00-9f73-4bfa-9cfe-90bc9aa2c0b2-operator-scripts\") pod \"nova-cell1-db-create-bh6jw\" (UID: \"25227c00-9f73-4bfa-9cfe-90bc9aa2c0b2\") " pod="openstack/nova-cell1-db-create-bh6jw" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.304356 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lnl2z\" (UniqueName: \"kubernetes.io/projected/25227c00-9f73-4bfa-9cfe-90bc9aa2c0b2-kube-api-access-lnl2z\") pod \"nova-cell1-db-create-bh6jw\" (UID: \"25227c00-9f73-4bfa-9cfe-90bc9aa2c0b2\") " pod="openstack/nova-cell1-db-create-bh6jw" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.349628 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-efd3-account-create-update-v8hnf"] Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.354504 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-efd3-account-create-update-v8hnf" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.356731 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-efd3-account-create-update-v8hnf"] Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.357780 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.365158 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-4814-account-create-update-5lnrc" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.392815 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-bh6jw" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.393857 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p28ns\" (UniqueName: \"kubernetes.io/projected/6f97874d-2c1b-44b0-b1f3-e5a337a4b209-kube-api-access-p28ns\") pod \"nova-cell0-15ab-account-create-update-d2vpj\" (UID: \"6f97874d-2c1b-44b0-b1f3-e5a337a4b209\") " pod="openstack/nova-cell0-15ab-account-create-update-d2vpj" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.393999 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6f97874d-2c1b-44b0-b1f3-e5a337a4b209-operator-scripts\") pod \"nova-cell0-15ab-account-create-update-d2vpj\" (UID: \"6f97874d-2c1b-44b0-b1f3-e5a337a4b209\") " pod="openstack/nova-cell0-15ab-account-create-update-d2vpj" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.394825 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6f97874d-2c1b-44b0-b1f3-e5a337a4b209-operator-scripts\") pod \"nova-cell0-15ab-account-create-update-d2vpj\" (UID: \"6f97874d-2c1b-44b0-b1f3-e5a337a4b209\") " pod="openstack/nova-cell0-15ab-account-create-update-d2vpj" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.427015 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p28ns\" (UniqueName: \"kubernetes.io/projected/6f97874d-2c1b-44b0-b1f3-e5a337a4b209-kube-api-access-p28ns\") pod \"nova-cell0-15ab-account-create-update-d2vpj\" (UID: \"6f97874d-2c1b-44b0-b1f3-e5a337a4b209\") " pod="openstack/nova-cell0-15ab-account-create-update-d2vpj" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.497633 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtr99\" (UniqueName: \"kubernetes.io/projected/96677690-6dcc-4fe6-a2ec-418a02ca414b-kube-api-access-jtr99\") pod \"nova-cell1-efd3-account-create-update-v8hnf\" (UID: \"96677690-6dcc-4fe6-a2ec-418a02ca414b\") " pod="openstack/nova-cell1-efd3-account-create-update-v8hnf" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.497885 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96677690-6dcc-4fe6-a2ec-418a02ca414b-operator-scripts\") pod \"nova-cell1-efd3-account-create-update-v8hnf\" (UID: \"96677690-6dcc-4fe6-a2ec-418a02ca414b\") " pod="openstack/nova-cell1-efd3-account-create-update-v8hnf" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.578257 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-15ab-account-create-update-d2vpj" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.599547 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtr99\" (UniqueName: \"kubernetes.io/projected/96677690-6dcc-4fe6-a2ec-418a02ca414b-kube-api-access-jtr99\") pod \"nova-cell1-efd3-account-create-update-v8hnf\" (UID: \"96677690-6dcc-4fe6-a2ec-418a02ca414b\") " pod="openstack/nova-cell1-efd3-account-create-update-v8hnf" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.600158 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96677690-6dcc-4fe6-a2ec-418a02ca414b-operator-scripts\") pod \"nova-cell1-efd3-account-create-update-v8hnf\" (UID: \"96677690-6dcc-4fe6-a2ec-418a02ca414b\") " pod="openstack/nova-cell1-efd3-account-create-update-v8hnf" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.602496 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96677690-6dcc-4fe6-a2ec-418a02ca414b-operator-scripts\") pod \"nova-cell1-efd3-account-create-update-v8hnf\" (UID: \"96677690-6dcc-4fe6-a2ec-418a02ca414b\") " pod="openstack/nova-cell1-efd3-account-create-update-v8hnf" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.633175 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtr99\" (UniqueName: \"kubernetes.io/projected/96677690-6dcc-4fe6-a2ec-418a02ca414b-kube-api-access-jtr99\") pod \"nova-cell1-efd3-account-create-update-v8hnf\" (UID: \"96677690-6dcc-4fe6-a2ec-418a02ca414b\") " pod="openstack/nova-cell1-efd3-account-create-update-v8hnf" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.681244 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-efd3-account-create-update-v8hnf" Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.726083 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-66dtj"] Nov 27 08:54:20 crc kubenswrapper[4971]: I1127 08:54:20.810724 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-r4rk7"] Nov 27 08:54:21 crc kubenswrapper[4971]: I1127 08:54:21.051715 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-4814-account-create-update-5lnrc"] Nov 27 08:54:21 crc kubenswrapper[4971]: I1127 08:54:21.092956 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-bh6jw"] Nov 27 08:54:21 crc kubenswrapper[4971]: I1127 08:54:21.132213 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-efd3-account-create-update-v8hnf"] Nov 27 08:54:21 crc kubenswrapper[4971]: W1127 08:54:21.154873 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod25227c00_9f73_4bfa_9cfe_90bc9aa2c0b2.slice/crio-b5d92effaba53ffd81c3d14b493d914c33cb98c3ebec0670ea9fd5118e5ac7b9 WatchSource:0}: Error finding container b5d92effaba53ffd81c3d14b493d914c33cb98c3ebec0670ea9fd5118e5ac7b9: Status 404 returned error can't find the container with id b5d92effaba53ffd81c3d14b493d914c33cb98c3ebec0670ea9fd5118e5ac7b9 Nov 27 08:54:21 crc kubenswrapper[4971]: W1127 08:54:21.168819 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod96677690_6dcc_4fe6_a2ec_418a02ca414b.slice/crio-ababa7c7a173ba62b4c58b0baea571a5bd7f1e3f6daefbf996b5e7676bb67881 WatchSource:0}: Error finding container ababa7c7a173ba62b4c58b0baea571a5bd7f1e3f6daefbf996b5e7676bb67881: Status 404 returned error can't find the container with id ababa7c7a173ba62b4c58b0baea571a5bd7f1e3f6daefbf996b5e7676bb67881 Nov 27 08:54:21 crc kubenswrapper[4971]: I1127 08:54:21.227026 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-15ab-account-create-update-d2vpj"] Nov 27 08:54:21 crc kubenswrapper[4971]: W1127 08:54:21.243418 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6f97874d_2c1b_44b0_b1f3_e5a337a4b209.slice/crio-c0d04b63e179a496ca997a1453bd6918edbee7a045562d622a71a498e51554a8 WatchSource:0}: Error finding container c0d04b63e179a496ca997a1453bd6918edbee7a045562d622a71a498e51554a8: Status 404 returned error can't find the container with id c0d04b63e179a496ca997a1453bd6918edbee7a045562d622a71a498e51554a8 Nov 27 08:54:21 crc kubenswrapper[4971]: I1127 08:54:21.550616 4971 scope.go:117] "RemoveContainer" containerID="503fed002d2daf941b71890d178a428a5c3cffb1b48a2221858dc92e28178dc4" Nov 27 08:54:21 crc kubenswrapper[4971]: E1127 08:54:21.551253 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:54:21 crc kubenswrapper[4971]: I1127 08:54:21.680219 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-4814-account-create-update-5lnrc" event={"ID":"82e0248c-1ccd-489a-8eb8-253782084ef2","Type":"ContainerStarted","Data":"cc757a4e86a535473ae72e0025a96aa9a3c01ef23b0bd305016b8230903dfaeb"} Nov 27 08:54:21 crc kubenswrapper[4971]: I1127 08:54:21.680273 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-4814-account-create-update-5lnrc" event={"ID":"82e0248c-1ccd-489a-8eb8-253782084ef2","Type":"ContainerStarted","Data":"bfa5d5b8487651d7548f4131741db5021378e2b81dcf5135365551e10991f2c2"} Nov 27 08:54:21 crc kubenswrapper[4971]: I1127 08:54:21.683244 4971 generic.go:334] "Generic (PLEG): container finished" podID="27d1bc48-7132-44a9-aa8b-cd0e45e6d106" containerID="f0e915b9be57e48e3b5aee1002fae8d3fc94328f723583ceb5e063155efd5cea" exitCode=0 Nov 27 08:54:21 crc kubenswrapper[4971]: I1127 08:54:21.683271 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-66dtj" event={"ID":"27d1bc48-7132-44a9-aa8b-cd0e45e6d106","Type":"ContainerDied","Data":"f0e915b9be57e48e3b5aee1002fae8d3fc94328f723583ceb5e063155efd5cea"} Nov 27 08:54:21 crc kubenswrapper[4971]: I1127 08:54:21.683303 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-66dtj" event={"ID":"27d1bc48-7132-44a9-aa8b-cd0e45e6d106","Type":"ContainerStarted","Data":"5a90256c3af2dd7c77851c291b59dc91f59dba8451432d18c9b0cf694df9d494"} Nov 27 08:54:21 crc kubenswrapper[4971]: I1127 08:54:21.685010 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-bh6jw" event={"ID":"25227c00-9f73-4bfa-9cfe-90bc9aa2c0b2","Type":"ContainerStarted","Data":"60e0950e080fd373e5c08cf73708afaa541cacd867fe60c4cdcb8a8650af61d1"} Nov 27 08:54:21 crc kubenswrapper[4971]: I1127 08:54:21.685040 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-bh6jw" event={"ID":"25227c00-9f73-4bfa-9cfe-90bc9aa2c0b2","Type":"ContainerStarted","Data":"b5d92effaba53ffd81c3d14b493d914c33cb98c3ebec0670ea9fd5118e5ac7b9"} Nov 27 08:54:21 crc kubenswrapper[4971]: I1127 08:54:21.690107 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-efd3-account-create-update-v8hnf" event={"ID":"96677690-6dcc-4fe6-a2ec-418a02ca414b","Type":"ContainerStarted","Data":"cc2916b73b97ece859f8abc68826e4215470d807b9ae4e2c8ceaa6a7a4790447"} Nov 27 08:54:21 crc kubenswrapper[4971]: I1127 08:54:21.690159 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-efd3-account-create-update-v8hnf" event={"ID":"96677690-6dcc-4fe6-a2ec-418a02ca414b","Type":"ContainerStarted","Data":"ababa7c7a173ba62b4c58b0baea571a5bd7f1e3f6daefbf996b5e7676bb67881"} Nov 27 08:54:21 crc kubenswrapper[4971]: I1127 08:54:21.694502 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-15ab-account-create-update-d2vpj" event={"ID":"6f97874d-2c1b-44b0-b1f3-e5a337a4b209","Type":"ContainerStarted","Data":"217b88d2bffc8b52d0946665d723fe0f2c3589500d8debe45c7d281749be052a"} Nov 27 08:54:21 crc kubenswrapper[4971]: I1127 08:54:21.694570 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-15ab-account-create-update-d2vpj" event={"ID":"6f97874d-2c1b-44b0-b1f3-e5a337a4b209","Type":"ContainerStarted","Data":"c0d04b63e179a496ca997a1453bd6918edbee7a045562d622a71a498e51554a8"} Nov 27 08:54:21 crc kubenswrapper[4971]: I1127 08:54:21.704263 4971 generic.go:334] "Generic (PLEG): container finished" podID="85bff7b4-875a-4b89-b961-06b4177b91cf" containerID="06b8e541e148417e34aa29d66e1a98adc537fb510ea0fa8574487380a2988ada" exitCode=0 Nov 27 08:54:21 crc kubenswrapper[4971]: I1127 08:54:21.704336 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-r4rk7" event={"ID":"85bff7b4-875a-4b89-b961-06b4177b91cf","Type":"ContainerDied","Data":"06b8e541e148417e34aa29d66e1a98adc537fb510ea0fa8574487380a2988ada"} Nov 27 08:54:21 crc kubenswrapper[4971]: I1127 08:54:21.704507 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-r4rk7" event={"ID":"85bff7b4-875a-4b89-b961-06b4177b91cf","Type":"ContainerStarted","Data":"94f0947eb8f4b7b92f1b7ecf141cd42163d56ea74352a2eeadfcde104674a281"} Nov 27 08:54:21 crc kubenswrapper[4971]: I1127 08:54:21.709714 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-4814-account-create-update-5lnrc" podStartSLOduration=1.709691705 podStartE2EDuration="1.709691705s" podCreationTimestamp="2025-11-27 08:54:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:54:21.701185451 +0000 UTC m=+7299.893229379" watchObservedRunningTime="2025-11-27 08:54:21.709691705 +0000 UTC m=+7299.901735623" Nov 27 08:54:21 crc kubenswrapper[4971]: I1127 08:54:21.727555 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-efd3-account-create-update-v8hnf" podStartSLOduration=1.727510265 podStartE2EDuration="1.727510265s" podCreationTimestamp="2025-11-27 08:54:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:54:21.722289076 +0000 UTC m=+7299.914332994" watchObservedRunningTime="2025-11-27 08:54:21.727510265 +0000 UTC m=+7299.919554183" Nov 27 08:54:21 crc kubenswrapper[4971]: I1127 08:54:21.748518 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-db-create-bh6jw" podStartSLOduration=1.748483696 podStartE2EDuration="1.748483696s" podCreationTimestamp="2025-11-27 08:54:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:54:21.738184101 +0000 UTC m=+7299.930228029" watchObservedRunningTime="2025-11-27 08:54:21.748483696 +0000 UTC m=+7299.940527614" Nov 27 08:54:21 crc kubenswrapper[4971]: I1127 08:54:21.765632 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-15ab-account-create-update-d2vpj" podStartSLOduration=1.7656098660000001 podStartE2EDuration="1.765609866s" podCreationTimestamp="2025-11-27 08:54:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:54:21.75736703 +0000 UTC m=+7299.949410948" watchObservedRunningTime="2025-11-27 08:54:21.765609866 +0000 UTC m=+7299.957653774" Nov 27 08:54:22 crc kubenswrapper[4971]: I1127 08:54:22.718058 4971 generic.go:334] "Generic (PLEG): container finished" podID="82e0248c-1ccd-489a-8eb8-253782084ef2" containerID="cc757a4e86a535473ae72e0025a96aa9a3c01ef23b0bd305016b8230903dfaeb" exitCode=0 Nov 27 08:54:22 crc kubenswrapper[4971]: I1127 08:54:22.718160 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-4814-account-create-update-5lnrc" event={"ID":"82e0248c-1ccd-489a-8eb8-253782084ef2","Type":"ContainerDied","Data":"cc757a4e86a535473ae72e0025a96aa9a3c01ef23b0bd305016b8230903dfaeb"} Nov 27 08:54:22 crc kubenswrapper[4971]: I1127 08:54:22.720361 4971 generic.go:334] "Generic (PLEG): container finished" podID="25227c00-9f73-4bfa-9cfe-90bc9aa2c0b2" containerID="60e0950e080fd373e5c08cf73708afaa541cacd867fe60c4cdcb8a8650af61d1" exitCode=0 Nov 27 08:54:22 crc kubenswrapper[4971]: I1127 08:54:22.720489 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-bh6jw" event={"ID":"25227c00-9f73-4bfa-9cfe-90bc9aa2c0b2","Type":"ContainerDied","Data":"60e0950e080fd373e5c08cf73708afaa541cacd867fe60c4cdcb8a8650af61d1"} Nov 27 08:54:22 crc kubenswrapper[4971]: I1127 08:54:22.722034 4971 generic.go:334] "Generic (PLEG): container finished" podID="96677690-6dcc-4fe6-a2ec-418a02ca414b" containerID="cc2916b73b97ece859f8abc68826e4215470d807b9ae4e2c8ceaa6a7a4790447" exitCode=0 Nov 27 08:54:22 crc kubenswrapper[4971]: I1127 08:54:22.722070 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-efd3-account-create-update-v8hnf" event={"ID":"96677690-6dcc-4fe6-a2ec-418a02ca414b","Type":"ContainerDied","Data":"cc2916b73b97ece859f8abc68826e4215470d807b9ae4e2c8ceaa6a7a4790447"} Nov 27 08:54:22 crc kubenswrapper[4971]: I1127 08:54:22.724682 4971 generic.go:334] "Generic (PLEG): container finished" podID="6f97874d-2c1b-44b0-b1f3-e5a337a4b209" containerID="217b88d2bffc8b52d0946665d723fe0f2c3589500d8debe45c7d281749be052a" exitCode=0 Nov 27 08:54:22 crc kubenswrapper[4971]: I1127 08:54:22.724908 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-15ab-account-create-update-d2vpj" event={"ID":"6f97874d-2c1b-44b0-b1f3-e5a337a4b209","Type":"ContainerDied","Data":"217b88d2bffc8b52d0946665d723fe0f2c3589500d8debe45c7d281749be052a"} Nov 27 08:54:23 crc kubenswrapper[4971]: I1127 08:54:23.162622 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-66dtj" Nov 27 08:54:23 crc kubenswrapper[4971]: I1127 08:54:23.169379 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-r4rk7" Nov 27 08:54:23 crc kubenswrapper[4971]: I1127 08:54:23.272497 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/27d1bc48-7132-44a9-aa8b-cd0e45e6d106-operator-scripts\") pod \"27d1bc48-7132-44a9-aa8b-cd0e45e6d106\" (UID: \"27d1bc48-7132-44a9-aa8b-cd0e45e6d106\") " Nov 27 08:54:23 crc kubenswrapper[4971]: I1127 08:54:23.272612 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkj8l\" (UniqueName: \"kubernetes.io/projected/85bff7b4-875a-4b89-b961-06b4177b91cf-kube-api-access-jkj8l\") pod \"85bff7b4-875a-4b89-b961-06b4177b91cf\" (UID: \"85bff7b4-875a-4b89-b961-06b4177b91cf\") " Nov 27 08:54:23 crc kubenswrapper[4971]: I1127 08:54:23.272667 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85bff7b4-875a-4b89-b961-06b4177b91cf-operator-scripts\") pod \"85bff7b4-875a-4b89-b961-06b4177b91cf\" (UID: \"85bff7b4-875a-4b89-b961-06b4177b91cf\") " Nov 27 08:54:23 crc kubenswrapper[4971]: I1127 08:54:23.272704 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t2fr4\" (UniqueName: \"kubernetes.io/projected/27d1bc48-7132-44a9-aa8b-cd0e45e6d106-kube-api-access-t2fr4\") pod \"27d1bc48-7132-44a9-aa8b-cd0e45e6d106\" (UID: \"27d1bc48-7132-44a9-aa8b-cd0e45e6d106\") " Nov 27 08:54:23 crc kubenswrapper[4971]: I1127 08:54:23.273473 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85bff7b4-875a-4b89-b961-06b4177b91cf-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "85bff7b4-875a-4b89-b961-06b4177b91cf" (UID: "85bff7b4-875a-4b89-b961-06b4177b91cf"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:54:23 crc kubenswrapper[4971]: I1127 08:54:23.273473 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27d1bc48-7132-44a9-aa8b-cd0e45e6d106-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "27d1bc48-7132-44a9-aa8b-cd0e45e6d106" (UID: "27d1bc48-7132-44a9-aa8b-cd0e45e6d106"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:54:23 crc kubenswrapper[4971]: I1127 08:54:23.279355 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85bff7b4-875a-4b89-b961-06b4177b91cf-kube-api-access-jkj8l" (OuterVolumeSpecName: "kube-api-access-jkj8l") pod "85bff7b4-875a-4b89-b961-06b4177b91cf" (UID: "85bff7b4-875a-4b89-b961-06b4177b91cf"). InnerVolumeSpecName "kube-api-access-jkj8l". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:54:23 crc kubenswrapper[4971]: I1127 08:54:23.279475 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27d1bc48-7132-44a9-aa8b-cd0e45e6d106-kube-api-access-t2fr4" (OuterVolumeSpecName: "kube-api-access-t2fr4") pod "27d1bc48-7132-44a9-aa8b-cd0e45e6d106" (UID: "27d1bc48-7132-44a9-aa8b-cd0e45e6d106"). InnerVolumeSpecName "kube-api-access-t2fr4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:54:23 crc kubenswrapper[4971]: I1127 08:54:23.375095 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/27d1bc48-7132-44a9-aa8b-cd0e45e6d106-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 08:54:23 crc kubenswrapper[4971]: I1127 08:54:23.375319 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkj8l\" (UniqueName: \"kubernetes.io/projected/85bff7b4-875a-4b89-b961-06b4177b91cf-kube-api-access-jkj8l\") on node \"crc\" DevicePath \"\"" Nov 27 08:54:23 crc kubenswrapper[4971]: I1127 08:54:23.375419 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85bff7b4-875a-4b89-b961-06b4177b91cf-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 08:54:23 crc kubenswrapper[4971]: I1127 08:54:23.375471 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t2fr4\" (UniqueName: \"kubernetes.io/projected/27d1bc48-7132-44a9-aa8b-cd0e45e6d106-kube-api-access-t2fr4\") on node \"crc\" DevicePath \"\"" Nov 27 08:54:23 crc kubenswrapper[4971]: I1127 08:54:23.736022 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-r4rk7" Nov 27 08:54:23 crc kubenswrapper[4971]: I1127 08:54:23.736042 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-r4rk7" event={"ID":"85bff7b4-875a-4b89-b961-06b4177b91cf","Type":"ContainerDied","Data":"94f0947eb8f4b7b92f1b7ecf141cd42163d56ea74352a2eeadfcde104674a281"} Nov 27 08:54:23 crc kubenswrapper[4971]: I1127 08:54:23.736115 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="94f0947eb8f4b7b92f1b7ecf141cd42163d56ea74352a2eeadfcde104674a281" Nov 27 08:54:23 crc kubenswrapper[4971]: I1127 08:54:23.743498 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-66dtj" event={"ID":"27d1bc48-7132-44a9-aa8b-cd0e45e6d106","Type":"ContainerDied","Data":"5a90256c3af2dd7c77851c291b59dc91f59dba8451432d18c9b0cf694df9d494"} Nov 27 08:54:23 crc kubenswrapper[4971]: I1127 08:54:23.743645 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5a90256c3af2dd7c77851c291b59dc91f59dba8451432d18c9b0cf694df9d494" Nov 27 08:54:23 crc kubenswrapper[4971]: I1127 08:54:23.743915 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-66dtj" Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.172970 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-4814-account-create-update-5lnrc" Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.321552 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/82e0248c-1ccd-489a-8eb8-253782084ef2-operator-scripts\") pod \"82e0248c-1ccd-489a-8eb8-253782084ef2\" (UID: \"82e0248c-1ccd-489a-8eb8-253782084ef2\") " Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.321728 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lrgkd\" (UniqueName: \"kubernetes.io/projected/82e0248c-1ccd-489a-8eb8-253782084ef2-kube-api-access-lrgkd\") pod \"82e0248c-1ccd-489a-8eb8-253782084ef2\" (UID: \"82e0248c-1ccd-489a-8eb8-253782084ef2\") " Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.322079 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82e0248c-1ccd-489a-8eb8-253782084ef2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "82e0248c-1ccd-489a-8eb8-253782084ef2" (UID: "82e0248c-1ccd-489a-8eb8-253782084ef2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.322358 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/82e0248c-1ccd-489a-8eb8-253782084ef2-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.325352 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82e0248c-1ccd-489a-8eb8-253782084ef2-kube-api-access-lrgkd" (OuterVolumeSpecName: "kube-api-access-lrgkd") pod "82e0248c-1ccd-489a-8eb8-253782084ef2" (UID: "82e0248c-1ccd-489a-8eb8-253782084ef2"). InnerVolumeSpecName "kube-api-access-lrgkd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.389713 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-15ab-account-create-update-d2vpj" Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.396310 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-efd3-account-create-update-v8hnf" Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.401913 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-bh6jw" Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.424146 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lrgkd\" (UniqueName: \"kubernetes.io/projected/82e0248c-1ccd-489a-8eb8-253782084ef2-kube-api-access-lrgkd\") on node \"crc\" DevicePath \"\"" Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.525180 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lnl2z\" (UniqueName: \"kubernetes.io/projected/25227c00-9f73-4bfa-9cfe-90bc9aa2c0b2-kube-api-access-lnl2z\") pod \"25227c00-9f73-4bfa-9cfe-90bc9aa2c0b2\" (UID: \"25227c00-9f73-4bfa-9cfe-90bc9aa2c0b2\") " Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.525649 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/25227c00-9f73-4bfa-9cfe-90bc9aa2c0b2-operator-scripts\") pod \"25227c00-9f73-4bfa-9cfe-90bc9aa2c0b2\" (UID: \"25227c00-9f73-4bfa-9cfe-90bc9aa2c0b2\") " Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.525699 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jtr99\" (UniqueName: \"kubernetes.io/projected/96677690-6dcc-4fe6-a2ec-418a02ca414b-kube-api-access-jtr99\") pod \"96677690-6dcc-4fe6-a2ec-418a02ca414b\" (UID: \"96677690-6dcc-4fe6-a2ec-418a02ca414b\") " Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.525756 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96677690-6dcc-4fe6-a2ec-418a02ca414b-operator-scripts\") pod \"96677690-6dcc-4fe6-a2ec-418a02ca414b\" (UID: \"96677690-6dcc-4fe6-a2ec-418a02ca414b\") " Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.525884 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6f97874d-2c1b-44b0-b1f3-e5a337a4b209-operator-scripts\") pod \"6f97874d-2c1b-44b0-b1f3-e5a337a4b209\" (UID: \"6f97874d-2c1b-44b0-b1f3-e5a337a4b209\") " Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.525958 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p28ns\" (UniqueName: \"kubernetes.io/projected/6f97874d-2c1b-44b0-b1f3-e5a337a4b209-kube-api-access-p28ns\") pod \"6f97874d-2c1b-44b0-b1f3-e5a337a4b209\" (UID: \"6f97874d-2c1b-44b0-b1f3-e5a337a4b209\") " Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.526284 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25227c00-9f73-4bfa-9cfe-90bc9aa2c0b2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "25227c00-9f73-4bfa-9cfe-90bc9aa2c0b2" (UID: "25227c00-9f73-4bfa-9cfe-90bc9aa2c0b2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.526476 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6f97874d-2c1b-44b0-b1f3-e5a337a4b209-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6f97874d-2c1b-44b0-b1f3-e5a337a4b209" (UID: "6f97874d-2c1b-44b0-b1f3-e5a337a4b209"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.527173 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96677690-6dcc-4fe6-a2ec-418a02ca414b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "96677690-6dcc-4fe6-a2ec-418a02ca414b" (UID: "96677690-6dcc-4fe6-a2ec-418a02ca414b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.528239 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25227c00-9f73-4bfa-9cfe-90bc9aa2c0b2-kube-api-access-lnl2z" (OuterVolumeSpecName: "kube-api-access-lnl2z") pod "25227c00-9f73-4bfa-9cfe-90bc9aa2c0b2" (UID: "25227c00-9f73-4bfa-9cfe-90bc9aa2c0b2"). InnerVolumeSpecName "kube-api-access-lnl2z". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.529518 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f97874d-2c1b-44b0-b1f3-e5a337a4b209-kube-api-access-p28ns" (OuterVolumeSpecName: "kube-api-access-p28ns") pod "6f97874d-2c1b-44b0-b1f3-e5a337a4b209" (UID: "6f97874d-2c1b-44b0-b1f3-e5a337a4b209"). InnerVolumeSpecName "kube-api-access-p28ns". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.529755 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96677690-6dcc-4fe6-a2ec-418a02ca414b-kube-api-access-jtr99" (OuterVolumeSpecName: "kube-api-access-jtr99") pod "96677690-6dcc-4fe6-a2ec-418a02ca414b" (UID: "96677690-6dcc-4fe6-a2ec-418a02ca414b"). InnerVolumeSpecName "kube-api-access-jtr99". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.628141 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6f97874d-2c1b-44b0-b1f3-e5a337a4b209-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.628177 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p28ns\" (UniqueName: \"kubernetes.io/projected/6f97874d-2c1b-44b0-b1f3-e5a337a4b209-kube-api-access-p28ns\") on node \"crc\" DevicePath \"\"" Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.628188 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lnl2z\" (UniqueName: \"kubernetes.io/projected/25227c00-9f73-4bfa-9cfe-90bc9aa2c0b2-kube-api-access-lnl2z\") on node \"crc\" DevicePath \"\"" Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.628197 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/25227c00-9f73-4bfa-9cfe-90bc9aa2c0b2-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.628208 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jtr99\" (UniqueName: \"kubernetes.io/projected/96677690-6dcc-4fe6-a2ec-418a02ca414b-kube-api-access-jtr99\") on node \"crc\" DevicePath \"\"" Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.628217 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96677690-6dcc-4fe6-a2ec-418a02ca414b-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.759013 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-15ab-account-create-update-d2vpj" event={"ID":"6f97874d-2c1b-44b0-b1f3-e5a337a4b209","Type":"ContainerDied","Data":"c0d04b63e179a496ca997a1453bd6918edbee7a045562d622a71a498e51554a8"} Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.759061 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c0d04b63e179a496ca997a1453bd6918edbee7a045562d622a71a498e51554a8" Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.759120 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-15ab-account-create-update-d2vpj" Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.768519 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-4814-account-create-update-5lnrc" Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.768766 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-4814-account-create-update-5lnrc" event={"ID":"82e0248c-1ccd-489a-8eb8-253782084ef2","Type":"ContainerDied","Data":"bfa5d5b8487651d7548f4131741db5021378e2b81dcf5135365551e10991f2c2"} Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.768818 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bfa5d5b8487651d7548f4131741db5021378e2b81dcf5135365551e10991f2c2" Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.771911 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-bh6jw" Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.772823 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-bh6jw" event={"ID":"25227c00-9f73-4bfa-9cfe-90bc9aa2c0b2","Type":"ContainerDied","Data":"b5d92effaba53ffd81c3d14b493d914c33cb98c3ebec0670ea9fd5118e5ac7b9"} Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.772873 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b5d92effaba53ffd81c3d14b493d914c33cb98c3ebec0670ea9fd5118e5ac7b9" Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.776837 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-efd3-account-create-update-v8hnf" event={"ID":"96677690-6dcc-4fe6-a2ec-418a02ca414b","Type":"ContainerDied","Data":"ababa7c7a173ba62b4c58b0baea571a5bd7f1e3f6daefbf996b5e7676bb67881"} Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.776868 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ababa7c7a173ba62b4c58b0baea571a5bd7f1e3f6daefbf996b5e7676bb67881" Nov 27 08:54:24 crc kubenswrapper[4971]: I1127 08:54:24.776928 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-efd3-account-create-update-v8hnf" Nov 27 08:54:25 crc kubenswrapper[4971]: I1127 08:54:25.553874 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-d4wtj"] Nov 27 08:54:25 crc kubenswrapper[4971]: E1127 08:54:25.554521 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f97874d-2c1b-44b0-b1f3-e5a337a4b209" containerName="mariadb-account-create-update" Nov 27 08:54:25 crc kubenswrapper[4971]: I1127 08:54:25.554577 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f97874d-2c1b-44b0-b1f3-e5a337a4b209" containerName="mariadb-account-create-update" Nov 27 08:54:25 crc kubenswrapper[4971]: E1127 08:54:25.554603 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25227c00-9f73-4bfa-9cfe-90bc9aa2c0b2" containerName="mariadb-database-create" Nov 27 08:54:25 crc kubenswrapper[4971]: I1127 08:54:25.554617 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="25227c00-9f73-4bfa-9cfe-90bc9aa2c0b2" containerName="mariadb-database-create" Nov 27 08:54:25 crc kubenswrapper[4971]: E1127 08:54:25.554659 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96677690-6dcc-4fe6-a2ec-418a02ca414b" containerName="mariadb-account-create-update" Nov 27 08:54:25 crc kubenswrapper[4971]: I1127 08:54:25.554673 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="96677690-6dcc-4fe6-a2ec-418a02ca414b" containerName="mariadb-account-create-update" Nov 27 08:54:25 crc kubenswrapper[4971]: E1127 08:54:25.554703 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27d1bc48-7132-44a9-aa8b-cd0e45e6d106" containerName="mariadb-database-create" Nov 27 08:54:25 crc kubenswrapper[4971]: I1127 08:54:25.554717 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="27d1bc48-7132-44a9-aa8b-cd0e45e6d106" containerName="mariadb-database-create" Nov 27 08:54:25 crc kubenswrapper[4971]: E1127 08:54:25.554750 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82e0248c-1ccd-489a-8eb8-253782084ef2" containerName="mariadb-account-create-update" Nov 27 08:54:25 crc kubenswrapper[4971]: I1127 08:54:25.554763 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="82e0248c-1ccd-489a-8eb8-253782084ef2" containerName="mariadb-account-create-update" Nov 27 08:54:25 crc kubenswrapper[4971]: E1127 08:54:25.554779 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85bff7b4-875a-4b89-b961-06b4177b91cf" containerName="mariadb-database-create" Nov 27 08:54:25 crc kubenswrapper[4971]: I1127 08:54:25.554791 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="85bff7b4-875a-4b89-b961-06b4177b91cf" containerName="mariadb-database-create" Nov 27 08:54:25 crc kubenswrapper[4971]: I1127 08:54:25.555146 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="25227c00-9f73-4bfa-9cfe-90bc9aa2c0b2" containerName="mariadb-database-create" Nov 27 08:54:25 crc kubenswrapper[4971]: I1127 08:54:25.555174 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="82e0248c-1ccd-489a-8eb8-253782084ef2" containerName="mariadb-account-create-update" Nov 27 08:54:25 crc kubenswrapper[4971]: I1127 08:54:25.555204 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="96677690-6dcc-4fe6-a2ec-418a02ca414b" containerName="mariadb-account-create-update" Nov 27 08:54:25 crc kubenswrapper[4971]: I1127 08:54:25.555221 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="85bff7b4-875a-4b89-b961-06b4177b91cf" containerName="mariadb-database-create" Nov 27 08:54:25 crc kubenswrapper[4971]: I1127 08:54:25.555251 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f97874d-2c1b-44b0-b1f3-e5a337a4b209" containerName="mariadb-account-create-update" Nov 27 08:54:25 crc kubenswrapper[4971]: I1127 08:54:25.555268 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="27d1bc48-7132-44a9-aa8b-cd0e45e6d106" containerName="mariadb-database-create" Nov 27 08:54:25 crc kubenswrapper[4971]: I1127 08:54:25.556427 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-d4wtj" Nov 27 08:54:25 crc kubenswrapper[4971]: I1127 08:54:25.559848 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-qzmjr" Nov 27 08:54:25 crc kubenswrapper[4971]: I1127 08:54:25.560112 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Nov 27 08:54:25 crc kubenswrapper[4971]: I1127 08:54:25.565130 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Nov 27 08:54:25 crc kubenswrapper[4971]: I1127 08:54:25.569054 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-d4wtj"] Nov 27 08:54:25 crc kubenswrapper[4971]: I1127 08:54:25.646944 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/356faab6-ab83-48bd-96e6-9c241d711bc7-config-data\") pod \"nova-cell0-conductor-db-sync-d4wtj\" (UID: \"356faab6-ab83-48bd-96e6-9c241d711bc7\") " pod="openstack/nova-cell0-conductor-db-sync-d4wtj" Nov 27 08:54:25 crc kubenswrapper[4971]: I1127 08:54:25.647147 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/356faab6-ab83-48bd-96e6-9c241d711bc7-scripts\") pod \"nova-cell0-conductor-db-sync-d4wtj\" (UID: \"356faab6-ab83-48bd-96e6-9c241d711bc7\") " pod="openstack/nova-cell0-conductor-db-sync-d4wtj" Nov 27 08:54:25 crc kubenswrapper[4971]: I1127 08:54:25.647170 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/356faab6-ab83-48bd-96e6-9c241d711bc7-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-d4wtj\" (UID: \"356faab6-ab83-48bd-96e6-9c241d711bc7\") " pod="openstack/nova-cell0-conductor-db-sync-d4wtj" Nov 27 08:54:25 crc kubenswrapper[4971]: I1127 08:54:25.647228 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fr5m9\" (UniqueName: \"kubernetes.io/projected/356faab6-ab83-48bd-96e6-9c241d711bc7-kube-api-access-fr5m9\") pod \"nova-cell0-conductor-db-sync-d4wtj\" (UID: \"356faab6-ab83-48bd-96e6-9c241d711bc7\") " pod="openstack/nova-cell0-conductor-db-sync-d4wtj" Nov 27 08:54:25 crc kubenswrapper[4971]: I1127 08:54:25.749757 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/356faab6-ab83-48bd-96e6-9c241d711bc7-scripts\") pod \"nova-cell0-conductor-db-sync-d4wtj\" (UID: \"356faab6-ab83-48bd-96e6-9c241d711bc7\") " pod="openstack/nova-cell0-conductor-db-sync-d4wtj" Nov 27 08:54:25 crc kubenswrapper[4971]: I1127 08:54:25.749816 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/356faab6-ab83-48bd-96e6-9c241d711bc7-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-d4wtj\" (UID: \"356faab6-ab83-48bd-96e6-9c241d711bc7\") " pod="openstack/nova-cell0-conductor-db-sync-d4wtj" Nov 27 08:54:25 crc kubenswrapper[4971]: I1127 08:54:25.749864 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fr5m9\" (UniqueName: \"kubernetes.io/projected/356faab6-ab83-48bd-96e6-9c241d711bc7-kube-api-access-fr5m9\") pod \"nova-cell0-conductor-db-sync-d4wtj\" (UID: \"356faab6-ab83-48bd-96e6-9c241d711bc7\") " pod="openstack/nova-cell0-conductor-db-sync-d4wtj" Nov 27 08:54:25 crc kubenswrapper[4971]: I1127 08:54:25.749935 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/356faab6-ab83-48bd-96e6-9c241d711bc7-config-data\") pod \"nova-cell0-conductor-db-sync-d4wtj\" (UID: \"356faab6-ab83-48bd-96e6-9c241d711bc7\") " pod="openstack/nova-cell0-conductor-db-sync-d4wtj" Nov 27 08:54:25 crc kubenswrapper[4971]: I1127 08:54:25.757087 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/356faab6-ab83-48bd-96e6-9c241d711bc7-config-data\") pod \"nova-cell0-conductor-db-sync-d4wtj\" (UID: \"356faab6-ab83-48bd-96e6-9c241d711bc7\") " pod="openstack/nova-cell0-conductor-db-sync-d4wtj" Nov 27 08:54:25 crc kubenswrapper[4971]: I1127 08:54:25.773338 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/356faab6-ab83-48bd-96e6-9c241d711bc7-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-d4wtj\" (UID: \"356faab6-ab83-48bd-96e6-9c241d711bc7\") " pod="openstack/nova-cell0-conductor-db-sync-d4wtj" Nov 27 08:54:25 crc kubenswrapper[4971]: I1127 08:54:25.773368 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/356faab6-ab83-48bd-96e6-9c241d711bc7-scripts\") pod \"nova-cell0-conductor-db-sync-d4wtj\" (UID: \"356faab6-ab83-48bd-96e6-9c241d711bc7\") " pod="openstack/nova-cell0-conductor-db-sync-d4wtj" Nov 27 08:54:25 crc kubenswrapper[4971]: I1127 08:54:25.777236 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fr5m9\" (UniqueName: \"kubernetes.io/projected/356faab6-ab83-48bd-96e6-9c241d711bc7-kube-api-access-fr5m9\") pod \"nova-cell0-conductor-db-sync-d4wtj\" (UID: \"356faab6-ab83-48bd-96e6-9c241d711bc7\") " pod="openstack/nova-cell0-conductor-db-sync-d4wtj" Nov 27 08:54:25 crc kubenswrapper[4971]: I1127 08:54:25.893622 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-d4wtj" Nov 27 08:54:26 crc kubenswrapper[4971]: I1127 08:54:26.380247 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-d4wtj"] Nov 27 08:54:26 crc kubenswrapper[4971]: I1127 08:54:26.796451 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-d4wtj" event={"ID":"356faab6-ab83-48bd-96e6-9c241d711bc7","Type":"ContainerStarted","Data":"670c46e06a334a0138a89c599ef3b4a2c1546af57fc65a8971da410286feceb8"} Nov 27 08:54:35 crc kubenswrapper[4971]: I1127 08:54:35.550852 4971 scope.go:117] "RemoveContainer" containerID="503fed002d2daf941b71890d178a428a5c3cffb1b48a2221858dc92e28178dc4" Nov 27 08:54:35 crc kubenswrapper[4971]: E1127 08:54:35.551771 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:54:35 crc kubenswrapper[4971]: I1127 08:54:35.900465 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-d4wtj" event={"ID":"356faab6-ab83-48bd-96e6-9c241d711bc7","Type":"ContainerStarted","Data":"5dbfca667d556d21fb7bd2060f1eb017e32c820a35886d5269913cf3d92fa360"} Nov 27 08:54:41 crc kubenswrapper[4971]: I1127 08:54:41.952340 4971 generic.go:334] "Generic (PLEG): container finished" podID="356faab6-ab83-48bd-96e6-9c241d711bc7" containerID="5dbfca667d556d21fb7bd2060f1eb017e32c820a35886d5269913cf3d92fa360" exitCode=0 Nov 27 08:54:41 crc kubenswrapper[4971]: I1127 08:54:41.952433 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-d4wtj" event={"ID":"356faab6-ab83-48bd-96e6-9c241d711bc7","Type":"ContainerDied","Data":"5dbfca667d556d21fb7bd2060f1eb017e32c820a35886d5269913cf3d92fa360"} Nov 27 08:54:43 crc kubenswrapper[4971]: I1127 08:54:43.273289 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-d4wtj" Nov 27 08:54:43 crc kubenswrapper[4971]: I1127 08:54:43.321309 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fr5m9\" (UniqueName: \"kubernetes.io/projected/356faab6-ab83-48bd-96e6-9c241d711bc7-kube-api-access-fr5m9\") pod \"356faab6-ab83-48bd-96e6-9c241d711bc7\" (UID: \"356faab6-ab83-48bd-96e6-9c241d711bc7\") " Nov 27 08:54:43 crc kubenswrapper[4971]: I1127 08:54:43.321442 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/356faab6-ab83-48bd-96e6-9c241d711bc7-combined-ca-bundle\") pod \"356faab6-ab83-48bd-96e6-9c241d711bc7\" (UID: \"356faab6-ab83-48bd-96e6-9c241d711bc7\") " Nov 27 08:54:43 crc kubenswrapper[4971]: I1127 08:54:43.321474 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/356faab6-ab83-48bd-96e6-9c241d711bc7-scripts\") pod \"356faab6-ab83-48bd-96e6-9c241d711bc7\" (UID: \"356faab6-ab83-48bd-96e6-9c241d711bc7\") " Nov 27 08:54:43 crc kubenswrapper[4971]: I1127 08:54:43.321592 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/356faab6-ab83-48bd-96e6-9c241d711bc7-config-data\") pod \"356faab6-ab83-48bd-96e6-9c241d711bc7\" (UID: \"356faab6-ab83-48bd-96e6-9c241d711bc7\") " Nov 27 08:54:43 crc kubenswrapper[4971]: I1127 08:54:43.327848 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/356faab6-ab83-48bd-96e6-9c241d711bc7-scripts" (OuterVolumeSpecName: "scripts") pod "356faab6-ab83-48bd-96e6-9c241d711bc7" (UID: "356faab6-ab83-48bd-96e6-9c241d711bc7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:54:43 crc kubenswrapper[4971]: I1127 08:54:43.328704 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/356faab6-ab83-48bd-96e6-9c241d711bc7-kube-api-access-fr5m9" (OuterVolumeSpecName: "kube-api-access-fr5m9") pod "356faab6-ab83-48bd-96e6-9c241d711bc7" (UID: "356faab6-ab83-48bd-96e6-9c241d711bc7"). InnerVolumeSpecName "kube-api-access-fr5m9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:54:43 crc kubenswrapper[4971]: E1127 08:54:43.349849 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/356faab6-ab83-48bd-96e6-9c241d711bc7-combined-ca-bundle podName:356faab6-ab83-48bd-96e6-9c241d711bc7 nodeName:}" failed. No retries permitted until 2025-11-27 08:54:43.849806916 +0000 UTC m=+7322.041850834 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/356faab6-ab83-48bd-96e6-9c241d711bc7-combined-ca-bundle") pod "356faab6-ab83-48bd-96e6-9c241d711bc7" (UID: "356faab6-ab83-48bd-96e6-9c241d711bc7") : error deleting /var/lib/kubelet/pods/356faab6-ab83-48bd-96e6-9c241d711bc7/volume-subpaths: remove /var/lib/kubelet/pods/356faab6-ab83-48bd-96e6-9c241d711bc7/volume-subpaths: no such file or directory Nov 27 08:54:43 crc kubenswrapper[4971]: I1127 08:54:43.352426 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/356faab6-ab83-48bd-96e6-9c241d711bc7-config-data" (OuterVolumeSpecName: "config-data") pod "356faab6-ab83-48bd-96e6-9c241d711bc7" (UID: "356faab6-ab83-48bd-96e6-9c241d711bc7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:54:43 crc kubenswrapper[4971]: I1127 08:54:43.423435 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/356faab6-ab83-48bd-96e6-9c241d711bc7-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 08:54:43 crc kubenswrapper[4971]: I1127 08:54:43.423464 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/356faab6-ab83-48bd-96e6-9c241d711bc7-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 08:54:43 crc kubenswrapper[4971]: I1127 08:54:43.423474 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fr5m9\" (UniqueName: \"kubernetes.io/projected/356faab6-ab83-48bd-96e6-9c241d711bc7-kube-api-access-fr5m9\") on node \"crc\" DevicePath \"\"" Nov 27 08:54:43 crc kubenswrapper[4971]: I1127 08:54:43.934452 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/356faab6-ab83-48bd-96e6-9c241d711bc7-combined-ca-bundle\") pod \"356faab6-ab83-48bd-96e6-9c241d711bc7\" (UID: \"356faab6-ab83-48bd-96e6-9c241d711bc7\") " Nov 27 08:54:43 crc kubenswrapper[4971]: I1127 08:54:43.939675 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/356faab6-ab83-48bd-96e6-9c241d711bc7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "356faab6-ab83-48bd-96e6-9c241d711bc7" (UID: "356faab6-ab83-48bd-96e6-9c241d711bc7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:54:43 crc kubenswrapper[4971]: I1127 08:54:43.977170 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-d4wtj" event={"ID":"356faab6-ab83-48bd-96e6-9c241d711bc7","Type":"ContainerDied","Data":"670c46e06a334a0138a89c599ef3b4a2c1546af57fc65a8971da410286feceb8"} Nov 27 08:54:43 crc kubenswrapper[4971]: I1127 08:54:43.977530 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="670c46e06a334a0138a89c599ef3b4a2c1546af57fc65a8971da410286feceb8" Nov 27 08:54:43 crc kubenswrapper[4971]: I1127 08:54:43.977286 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-d4wtj" Nov 27 08:54:44 crc kubenswrapper[4971]: I1127 08:54:44.037936 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/356faab6-ab83-48bd-96e6-9c241d711bc7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 08:54:44 crc kubenswrapper[4971]: I1127 08:54:44.061960 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 27 08:54:44 crc kubenswrapper[4971]: E1127 08:54:44.062464 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="356faab6-ab83-48bd-96e6-9c241d711bc7" containerName="nova-cell0-conductor-db-sync" Nov 27 08:54:44 crc kubenswrapper[4971]: I1127 08:54:44.062487 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="356faab6-ab83-48bd-96e6-9c241d711bc7" containerName="nova-cell0-conductor-db-sync" Nov 27 08:54:44 crc kubenswrapper[4971]: I1127 08:54:44.062798 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="356faab6-ab83-48bd-96e6-9c241d711bc7" containerName="nova-cell0-conductor-db-sync" Nov 27 08:54:44 crc kubenswrapper[4971]: I1127 08:54:44.063693 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 27 08:54:44 crc kubenswrapper[4971]: I1127 08:54:44.069166 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Nov 27 08:54:44 crc kubenswrapper[4971]: I1127 08:54:44.069196 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-qzmjr" Nov 27 08:54:44 crc kubenswrapper[4971]: I1127 08:54:44.071603 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 27 08:54:44 crc kubenswrapper[4971]: I1127 08:54:44.140309 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7b90c66-3028-4daa-8df5-27c0ce1e10e6-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"e7b90c66-3028-4daa-8df5-27c0ce1e10e6\") " pod="openstack/nova-cell0-conductor-0" Nov 27 08:54:44 crc kubenswrapper[4971]: I1127 08:54:44.140482 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7b90c66-3028-4daa-8df5-27c0ce1e10e6-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"e7b90c66-3028-4daa-8df5-27c0ce1e10e6\") " pod="openstack/nova-cell0-conductor-0" Nov 27 08:54:44 crc kubenswrapper[4971]: I1127 08:54:44.140522 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4skxh\" (UniqueName: \"kubernetes.io/projected/e7b90c66-3028-4daa-8df5-27c0ce1e10e6-kube-api-access-4skxh\") pod \"nova-cell0-conductor-0\" (UID: \"e7b90c66-3028-4daa-8df5-27c0ce1e10e6\") " pod="openstack/nova-cell0-conductor-0" Nov 27 08:54:44 crc kubenswrapper[4971]: I1127 08:54:44.242795 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7b90c66-3028-4daa-8df5-27c0ce1e10e6-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"e7b90c66-3028-4daa-8df5-27c0ce1e10e6\") " pod="openstack/nova-cell0-conductor-0" Nov 27 08:54:44 crc kubenswrapper[4971]: I1127 08:54:44.242892 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7b90c66-3028-4daa-8df5-27c0ce1e10e6-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"e7b90c66-3028-4daa-8df5-27c0ce1e10e6\") " pod="openstack/nova-cell0-conductor-0" Nov 27 08:54:44 crc kubenswrapper[4971]: I1127 08:54:44.242919 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4skxh\" (UniqueName: \"kubernetes.io/projected/e7b90c66-3028-4daa-8df5-27c0ce1e10e6-kube-api-access-4skxh\") pod \"nova-cell0-conductor-0\" (UID: \"e7b90c66-3028-4daa-8df5-27c0ce1e10e6\") " pod="openstack/nova-cell0-conductor-0" Nov 27 08:54:44 crc kubenswrapper[4971]: I1127 08:54:44.250405 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7b90c66-3028-4daa-8df5-27c0ce1e10e6-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"e7b90c66-3028-4daa-8df5-27c0ce1e10e6\") " pod="openstack/nova-cell0-conductor-0" Nov 27 08:54:44 crc kubenswrapper[4971]: I1127 08:54:44.251084 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7b90c66-3028-4daa-8df5-27c0ce1e10e6-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"e7b90c66-3028-4daa-8df5-27c0ce1e10e6\") " pod="openstack/nova-cell0-conductor-0" Nov 27 08:54:44 crc kubenswrapper[4971]: I1127 08:54:44.274186 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4skxh\" (UniqueName: \"kubernetes.io/projected/e7b90c66-3028-4daa-8df5-27c0ce1e10e6-kube-api-access-4skxh\") pod \"nova-cell0-conductor-0\" (UID: \"e7b90c66-3028-4daa-8df5-27c0ce1e10e6\") " pod="openstack/nova-cell0-conductor-0" Nov 27 08:54:44 crc kubenswrapper[4971]: I1127 08:54:44.379975 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 27 08:54:44 crc kubenswrapper[4971]: I1127 08:54:44.813955 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 27 08:54:44 crc kubenswrapper[4971]: I1127 08:54:44.987749 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"e7b90c66-3028-4daa-8df5-27c0ce1e10e6","Type":"ContainerStarted","Data":"e86c45d44a5760a15fe885ab091934392be1791d755e86910b677a716db7449b"} Nov 27 08:54:44 crc kubenswrapper[4971]: I1127 08:54:44.987795 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"e7b90c66-3028-4daa-8df5-27c0ce1e10e6","Type":"ContainerStarted","Data":"3b04dda16fba15a0fe67cf18cabdc7c209c54872b4b0d9a90cfb8297fa07d6af"} Nov 27 08:54:44 crc kubenswrapper[4971]: I1127 08:54:44.987929 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Nov 27 08:54:45 crc kubenswrapper[4971]: I1127 08:54:45.012969 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=1.012945699 podStartE2EDuration="1.012945699s" podCreationTimestamp="2025-11-27 08:54:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:54:45.006251157 +0000 UTC m=+7323.198295075" watchObservedRunningTime="2025-11-27 08:54:45.012945699 +0000 UTC m=+7323.204989617" Nov 27 08:54:49 crc kubenswrapper[4971]: I1127 08:54:49.410505 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Nov 27 08:54:49 crc kubenswrapper[4971]: I1127 08:54:49.884572 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-f6m7q"] Nov 27 08:54:49 crc kubenswrapper[4971]: I1127 08:54:49.885868 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-f6m7q" Nov 27 08:54:49 crc kubenswrapper[4971]: I1127 08:54:49.890498 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Nov 27 08:54:49 crc kubenswrapper[4971]: I1127 08:54:49.890560 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Nov 27 08:54:49 crc kubenswrapper[4971]: I1127 08:54:49.901835 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-f6m7q"] Nov 27 08:54:49 crc kubenswrapper[4971]: I1127 08:54:49.973913 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cb90f92-2547-47f8-adc6-7c7033567f90-config-data\") pod \"nova-cell0-cell-mapping-f6m7q\" (UID: \"4cb90f92-2547-47f8-adc6-7c7033567f90\") " pod="openstack/nova-cell0-cell-mapping-f6m7q" Nov 27 08:54:49 crc kubenswrapper[4971]: I1127 08:54:49.974155 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cb90f92-2547-47f8-adc6-7c7033567f90-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-f6m7q\" (UID: \"4cb90f92-2547-47f8-adc6-7c7033567f90\") " pod="openstack/nova-cell0-cell-mapping-f6m7q" Nov 27 08:54:49 crc kubenswrapper[4971]: I1127 08:54:49.974380 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8948l\" (UniqueName: \"kubernetes.io/projected/4cb90f92-2547-47f8-adc6-7c7033567f90-kube-api-access-8948l\") pod \"nova-cell0-cell-mapping-f6m7q\" (UID: \"4cb90f92-2547-47f8-adc6-7c7033567f90\") " pod="openstack/nova-cell0-cell-mapping-f6m7q" Nov 27 08:54:49 crc kubenswrapper[4971]: I1127 08:54:49.974462 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4cb90f92-2547-47f8-adc6-7c7033567f90-scripts\") pod \"nova-cell0-cell-mapping-f6m7q\" (UID: \"4cb90f92-2547-47f8-adc6-7c7033567f90\") " pod="openstack/nova-cell0-cell-mapping-f6m7q" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.021491 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.023581 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.030627 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.093748 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.096627 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cb90f92-2547-47f8-adc6-7c7033567f90-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-f6m7q\" (UID: \"4cb90f92-2547-47f8-adc6-7c7033567f90\") " pod="openstack/nova-cell0-cell-mapping-f6m7q" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.096934 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8948l\" (UniqueName: \"kubernetes.io/projected/4cb90f92-2547-47f8-adc6-7c7033567f90-kube-api-access-8948l\") pod \"nova-cell0-cell-mapping-f6m7q\" (UID: \"4cb90f92-2547-47f8-adc6-7c7033567f90\") " pod="openstack/nova-cell0-cell-mapping-f6m7q" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.097037 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4cb90f92-2547-47f8-adc6-7c7033567f90-scripts\") pod \"nova-cell0-cell-mapping-f6m7q\" (UID: \"4cb90f92-2547-47f8-adc6-7c7033567f90\") " pod="openstack/nova-cell0-cell-mapping-f6m7q" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.097152 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cb90f92-2547-47f8-adc6-7c7033567f90-config-data\") pod \"nova-cell0-cell-mapping-f6m7q\" (UID: \"4cb90f92-2547-47f8-adc6-7c7033567f90\") " pod="openstack/nova-cell0-cell-mapping-f6m7q" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.105138 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.169094 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4cb90f92-2547-47f8-adc6-7c7033567f90-scripts\") pod \"nova-cell0-cell-mapping-f6m7q\" (UID: \"4cb90f92-2547-47f8-adc6-7c7033567f90\") " pod="openstack/nova-cell0-cell-mapping-f6m7q" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.171198 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cb90f92-2547-47f8-adc6-7c7033567f90-config-data\") pod \"nova-cell0-cell-mapping-f6m7q\" (UID: \"4cb90f92-2547-47f8-adc6-7c7033567f90\") " pod="openstack/nova-cell0-cell-mapping-f6m7q" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.172187 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cb90f92-2547-47f8-adc6-7c7033567f90-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-f6m7q\" (UID: \"4cb90f92-2547-47f8-adc6-7c7033567f90\") " pod="openstack/nova-cell0-cell-mapping-f6m7q" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.176297 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.198815 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8948l\" (UniqueName: \"kubernetes.io/projected/4cb90f92-2547-47f8-adc6-7c7033567f90-kube-api-access-8948l\") pod \"nova-cell0-cell-mapping-f6m7q\" (UID: \"4cb90f92-2547-47f8-adc6-7c7033567f90\") " pod="openstack/nova-cell0-cell-mapping-f6m7q" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.200283 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74663fef-43f4-478d-9702-eb41e85cca23-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"74663fef-43f4-478d-9702-eb41e85cca23\") " pod="openstack/nova-api-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.200467 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74663fef-43f4-478d-9702-eb41e85cca23-config-data\") pod \"nova-api-0\" (UID: \"74663fef-43f4-478d-9702-eb41e85cca23\") " pod="openstack/nova-api-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.200624 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kn6zc\" (UniqueName: \"kubernetes.io/projected/74663fef-43f4-478d-9702-eb41e85cca23-kube-api-access-kn6zc\") pod \"nova-api-0\" (UID: \"74663fef-43f4-478d-9702-eb41e85cca23\") " pod="openstack/nova-api-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.200738 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.200760 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3009b067-05e5-435b-af96-a3fd67beb32e-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"3009b067-05e5-435b-af96-a3fd67beb32e\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.200931 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3009b067-05e5-435b-af96-a3fd67beb32e-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"3009b067-05e5-435b-af96-a3fd67beb32e\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.201026 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5pcdh\" (UniqueName: \"kubernetes.io/projected/3009b067-05e5-435b-af96-a3fd67beb32e-kube-api-access-5pcdh\") pod \"nova-cell1-novncproxy-0\" (UID: \"3009b067-05e5-435b-af96-a3fd67beb32e\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.201248 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/74663fef-43f4-478d-9702-eb41e85cca23-logs\") pod \"nova-api-0\" (UID: \"74663fef-43f4-478d-9702-eb41e85cca23\") " pod="openstack/nova-api-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.213418 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-f6m7q" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.231354 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.265170 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.273079 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.276852 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.294685 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.303137 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ba692abd-4b39-486b-9586-0619e79aa774-logs\") pod \"nova-metadata-0\" (UID: \"ba692abd-4b39-486b-9586-0619e79aa774\") " pod="openstack/nova-metadata-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.303222 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2s2z\" (UniqueName: \"kubernetes.io/projected/ba692abd-4b39-486b-9586-0619e79aa774-kube-api-access-d2s2z\") pod \"nova-metadata-0\" (UID: \"ba692abd-4b39-486b-9586-0619e79aa774\") " pod="openstack/nova-metadata-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.303267 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba692abd-4b39-486b-9586-0619e79aa774-config-data\") pod \"nova-metadata-0\" (UID: \"ba692abd-4b39-486b-9586-0619e79aa774\") " pod="openstack/nova-metadata-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.303298 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/74663fef-43f4-478d-9702-eb41e85cca23-logs\") pod \"nova-api-0\" (UID: \"74663fef-43f4-478d-9702-eb41e85cca23\") " pod="openstack/nova-api-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.303331 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74663fef-43f4-478d-9702-eb41e85cca23-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"74663fef-43f4-478d-9702-eb41e85cca23\") " pod="openstack/nova-api-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.303402 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74663fef-43f4-478d-9702-eb41e85cca23-config-data\") pod \"nova-api-0\" (UID: \"74663fef-43f4-478d-9702-eb41e85cca23\") " pod="openstack/nova-api-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.303425 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kn6zc\" (UniqueName: \"kubernetes.io/projected/74663fef-43f4-478d-9702-eb41e85cca23-kube-api-access-kn6zc\") pod \"nova-api-0\" (UID: \"74663fef-43f4-478d-9702-eb41e85cca23\") " pod="openstack/nova-api-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.303465 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3009b067-05e5-435b-af96-a3fd67beb32e-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"3009b067-05e5-435b-af96-a3fd67beb32e\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.303500 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3009b067-05e5-435b-af96-a3fd67beb32e-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"3009b067-05e5-435b-af96-a3fd67beb32e\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.303544 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pcdh\" (UniqueName: \"kubernetes.io/projected/3009b067-05e5-435b-af96-a3fd67beb32e-kube-api-access-5pcdh\") pod \"nova-cell1-novncproxy-0\" (UID: \"3009b067-05e5-435b-af96-a3fd67beb32e\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.303592 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba692abd-4b39-486b-9586-0619e79aa774-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ba692abd-4b39-486b-9586-0619e79aa774\") " pod="openstack/nova-metadata-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.304205 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/74663fef-43f4-478d-9702-eb41e85cca23-logs\") pod \"nova-api-0\" (UID: \"74663fef-43f4-478d-9702-eb41e85cca23\") " pod="openstack/nova-api-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.309183 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74663fef-43f4-478d-9702-eb41e85cca23-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"74663fef-43f4-478d-9702-eb41e85cca23\") " pod="openstack/nova-api-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.312571 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74663fef-43f4-478d-9702-eb41e85cca23-config-data\") pod \"nova-api-0\" (UID: \"74663fef-43f4-478d-9702-eb41e85cca23\") " pod="openstack/nova-api-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.318818 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.319502 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3009b067-05e5-435b-af96-a3fd67beb32e-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"3009b067-05e5-435b-af96-a3fd67beb32e\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.320222 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.321058 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3009b067-05e5-435b-af96-a3fd67beb32e-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"3009b067-05e5-435b-af96-a3fd67beb32e\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.323089 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.333678 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5pcdh\" (UniqueName: \"kubernetes.io/projected/3009b067-05e5-435b-af96-a3fd67beb32e-kube-api-access-5pcdh\") pod \"nova-cell1-novncproxy-0\" (UID: \"3009b067-05e5-435b-af96-a3fd67beb32e\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.343317 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kn6zc\" (UniqueName: \"kubernetes.io/projected/74663fef-43f4-478d-9702-eb41e85cca23-kube-api-access-kn6zc\") pod \"nova-api-0\" (UID: \"74663fef-43f4-478d-9702-eb41e85cca23\") " pod="openstack/nova-api-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.349466 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.374326 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5d8fdb9c79-qxcdk"] Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.375413 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.382442 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d8fdb9c79-qxcdk" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.400459 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d8fdb9c79-qxcdk"] Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.407307 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6hmzh\" (UniqueName: \"kubernetes.io/projected/3c50701f-bf73-43e1-9306-969eb07fbf5a-kube-api-access-6hmzh\") pod \"nova-scheduler-0\" (UID: \"3c50701f-bf73-43e1-9306-969eb07fbf5a\") " pod="openstack/nova-scheduler-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.407368 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c50701f-bf73-43e1-9306-969eb07fbf5a-config-data\") pod \"nova-scheduler-0\" (UID: \"3c50701f-bf73-43e1-9306-969eb07fbf5a\") " pod="openstack/nova-scheduler-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.407411 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba692abd-4b39-486b-9586-0619e79aa774-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ba692abd-4b39-486b-9586-0619e79aa774\") " pod="openstack/nova-metadata-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.407436 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c50701f-bf73-43e1-9306-969eb07fbf5a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3c50701f-bf73-43e1-9306-969eb07fbf5a\") " pod="openstack/nova-scheduler-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.407458 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ba692abd-4b39-486b-9586-0619e79aa774-logs\") pod \"nova-metadata-0\" (UID: \"ba692abd-4b39-486b-9586-0619e79aa774\") " pod="openstack/nova-metadata-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.411305 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ba692abd-4b39-486b-9586-0619e79aa774-logs\") pod \"nova-metadata-0\" (UID: \"ba692abd-4b39-486b-9586-0619e79aa774\") " pod="openstack/nova-metadata-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.411934 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2s2z\" (UniqueName: \"kubernetes.io/projected/ba692abd-4b39-486b-9586-0619e79aa774-kube-api-access-d2s2z\") pod \"nova-metadata-0\" (UID: \"ba692abd-4b39-486b-9586-0619e79aa774\") " pod="openstack/nova-metadata-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.412004 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba692abd-4b39-486b-9586-0619e79aa774-config-data\") pod \"nova-metadata-0\" (UID: \"ba692abd-4b39-486b-9586-0619e79aa774\") " pod="openstack/nova-metadata-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.414887 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba692abd-4b39-486b-9586-0619e79aa774-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ba692abd-4b39-486b-9586-0619e79aa774\") " pod="openstack/nova-metadata-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.416822 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba692abd-4b39-486b-9586-0619e79aa774-config-data\") pod \"nova-metadata-0\" (UID: \"ba692abd-4b39-486b-9586-0619e79aa774\") " pod="openstack/nova-metadata-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.430079 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2s2z\" (UniqueName: \"kubernetes.io/projected/ba692abd-4b39-486b-9586-0619e79aa774-kube-api-access-d2s2z\") pod \"nova-metadata-0\" (UID: \"ba692abd-4b39-486b-9586-0619e79aa774\") " pod="openstack/nova-metadata-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.513964 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e-ovsdbserver-nb\") pod \"dnsmasq-dns-5d8fdb9c79-qxcdk\" (UID: \"f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e\") " pod="openstack/dnsmasq-dns-5d8fdb9c79-qxcdk" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.514074 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e-config\") pod \"dnsmasq-dns-5d8fdb9c79-qxcdk\" (UID: \"f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e\") " pod="openstack/dnsmasq-dns-5d8fdb9c79-qxcdk" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.514099 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4kwxc\" (UniqueName: \"kubernetes.io/projected/f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e-kube-api-access-4kwxc\") pod \"dnsmasq-dns-5d8fdb9c79-qxcdk\" (UID: \"f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e\") " pod="openstack/dnsmasq-dns-5d8fdb9c79-qxcdk" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.514155 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6hmzh\" (UniqueName: \"kubernetes.io/projected/3c50701f-bf73-43e1-9306-969eb07fbf5a-kube-api-access-6hmzh\") pod \"nova-scheduler-0\" (UID: \"3c50701f-bf73-43e1-9306-969eb07fbf5a\") " pod="openstack/nova-scheduler-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.514228 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c50701f-bf73-43e1-9306-969eb07fbf5a-config-data\") pod \"nova-scheduler-0\" (UID: \"3c50701f-bf73-43e1-9306-969eb07fbf5a\") " pod="openstack/nova-scheduler-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.514246 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e-dns-svc\") pod \"dnsmasq-dns-5d8fdb9c79-qxcdk\" (UID: \"f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e\") " pod="openstack/dnsmasq-dns-5d8fdb9c79-qxcdk" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.514317 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c50701f-bf73-43e1-9306-969eb07fbf5a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3c50701f-bf73-43e1-9306-969eb07fbf5a\") " pod="openstack/nova-scheduler-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.514683 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e-ovsdbserver-sb\") pod \"dnsmasq-dns-5d8fdb9c79-qxcdk\" (UID: \"f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e\") " pod="openstack/dnsmasq-dns-5d8fdb9c79-qxcdk" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.519692 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c50701f-bf73-43e1-9306-969eb07fbf5a-config-data\") pod \"nova-scheduler-0\" (UID: \"3c50701f-bf73-43e1-9306-969eb07fbf5a\") " pod="openstack/nova-scheduler-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.523756 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c50701f-bf73-43e1-9306-969eb07fbf5a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3c50701f-bf73-43e1-9306-969eb07fbf5a\") " pod="openstack/nova-scheduler-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.535235 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6hmzh\" (UniqueName: \"kubernetes.io/projected/3c50701f-bf73-43e1-9306-969eb07fbf5a-kube-api-access-6hmzh\") pod \"nova-scheduler-0\" (UID: \"3c50701f-bf73-43e1-9306-969eb07fbf5a\") " pod="openstack/nova-scheduler-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.552639 4971 scope.go:117] "RemoveContainer" containerID="503fed002d2daf941b71890d178a428a5c3cffb1b48a2221858dc92e28178dc4" Nov 27 08:54:50 crc kubenswrapper[4971]: E1127 08:54:50.553152 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.564045 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.617833 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e-dns-svc\") pod \"dnsmasq-dns-5d8fdb9c79-qxcdk\" (UID: \"f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e\") " pod="openstack/dnsmasq-dns-5d8fdb9c79-qxcdk" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.618037 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e-ovsdbserver-sb\") pod \"dnsmasq-dns-5d8fdb9c79-qxcdk\" (UID: \"f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e\") " pod="openstack/dnsmasq-dns-5d8fdb9c79-qxcdk" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.618067 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e-ovsdbserver-nb\") pod \"dnsmasq-dns-5d8fdb9c79-qxcdk\" (UID: \"f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e\") " pod="openstack/dnsmasq-dns-5d8fdb9c79-qxcdk" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.618139 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e-config\") pod \"dnsmasq-dns-5d8fdb9c79-qxcdk\" (UID: \"f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e\") " pod="openstack/dnsmasq-dns-5d8fdb9c79-qxcdk" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.618183 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4kwxc\" (UniqueName: \"kubernetes.io/projected/f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e-kube-api-access-4kwxc\") pod \"dnsmasq-dns-5d8fdb9c79-qxcdk\" (UID: \"f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e\") " pod="openstack/dnsmasq-dns-5d8fdb9c79-qxcdk" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.620666 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e-dns-svc\") pod \"dnsmasq-dns-5d8fdb9c79-qxcdk\" (UID: \"f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e\") " pod="openstack/dnsmasq-dns-5d8fdb9c79-qxcdk" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.629020 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e-config\") pod \"dnsmasq-dns-5d8fdb9c79-qxcdk\" (UID: \"f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e\") " pod="openstack/dnsmasq-dns-5d8fdb9c79-qxcdk" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.629642 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e-ovsdbserver-sb\") pod \"dnsmasq-dns-5d8fdb9c79-qxcdk\" (UID: \"f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e\") " pod="openstack/dnsmasq-dns-5d8fdb9c79-qxcdk" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.629884 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e-ovsdbserver-nb\") pod \"dnsmasq-dns-5d8fdb9c79-qxcdk\" (UID: \"f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e\") " pod="openstack/dnsmasq-dns-5d8fdb9c79-qxcdk" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.656929 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4kwxc\" (UniqueName: \"kubernetes.io/projected/f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e-kube-api-access-4kwxc\") pod \"dnsmasq-dns-5d8fdb9c79-qxcdk\" (UID: \"f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e\") " pod="openstack/dnsmasq-dns-5d8fdb9c79-qxcdk" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.685154 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.697493 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.708962 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d8fdb9c79-qxcdk" Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.722432 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 27 08:54:50 crc kubenswrapper[4971]: I1127 08:54:50.817246 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-f6m7q"] Nov 27 08:54:50 crc kubenswrapper[4971]: W1127 08:54:50.825490 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4cb90f92_2547_47f8_adc6_7c7033567f90.slice/crio-d4be3158ff5a7e3c51ce66e740d09567c22c2fcc5dedab91d1f07812ce613065 WatchSource:0}: Error finding container d4be3158ff5a7e3c51ce66e740d09567c22c2fcc5dedab91d1f07812ce613065: Status 404 returned error can't find the container with id d4be3158ff5a7e3c51ce66e740d09567c22c2fcc5dedab91d1f07812ce613065 Nov 27 08:54:51 crc kubenswrapper[4971]: I1127 08:54:51.009878 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-7dg5v"] Nov 27 08:54:51 crc kubenswrapper[4971]: I1127 08:54:51.012146 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-7dg5v" Nov 27 08:54:51 crc kubenswrapper[4971]: I1127 08:54:51.017644 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Nov 27 08:54:51 crc kubenswrapper[4971]: I1127 08:54:51.018083 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Nov 27 08:54:51 crc kubenswrapper[4971]: I1127 08:54:51.027103 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-7dg5v"] Nov 27 08:54:51 crc kubenswrapper[4971]: I1127 08:54:51.069649 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"74663fef-43f4-478d-9702-eb41e85cca23","Type":"ContainerStarted","Data":"706e9d4a89c02dd008288ce285d3839c3e3b4c052e37e3d9d412de74ce797121"} Nov 27 08:54:51 crc kubenswrapper[4971]: I1127 08:54:51.070945 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-f6m7q" event={"ID":"4cb90f92-2547-47f8-adc6-7c7033567f90","Type":"ContainerStarted","Data":"d4be3158ff5a7e3c51ce66e740d09567c22c2fcc5dedab91d1f07812ce613065"} Nov 27 08:54:51 crc kubenswrapper[4971]: I1127 08:54:51.139300 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be8ebf05-06da-4453-a63a-60a42cbe569f-scripts\") pod \"nova-cell1-conductor-db-sync-7dg5v\" (UID: \"be8ebf05-06da-4453-a63a-60a42cbe569f\") " pod="openstack/nova-cell1-conductor-db-sync-7dg5v" Nov 27 08:54:51 crc kubenswrapper[4971]: I1127 08:54:51.139470 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be8ebf05-06da-4453-a63a-60a42cbe569f-config-data\") pod \"nova-cell1-conductor-db-sync-7dg5v\" (UID: \"be8ebf05-06da-4453-a63a-60a42cbe569f\") " pod="openstack/nova-cell1-conductor-db-sync-7dg5v" Nov 27 08:54:51 crc kubenswrapper[4971]: I1127 08:54:51.139734 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be8ebf05-06da-4453-a63a-60a42cbe569f-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-7dg5v\" (UID: \"be8ebf05-06da-4453-a63a-60a42cbe569f\") " pod="openstack/nova-cell1-conductor-db-sync-7dg5v" Nov 27 08:54:51 crc kubenswrapper[4971]: I1127 08:54:51.139849 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d9tr9\" (UniqueName: \"kubernetes.io/projected/be8ebf05-06da-4453-a63a-60a42cbe569f-kube-api-access-d9tr9\") pod \"nova-cell1-conductor-db-sync-7dg5v\" (UID: \"be8ebf05-06da-4453-a63a-60a42cbe569f\") " pod="openstack/nova-cell1-conductor-db-sync-7dg5v" Nov 27 08:54:51 crc kubenswrapper[4971]: I1127 08:54:51.229641 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 27 08:54:51 crc kubenswrapper[4971]: W1127 08:54:51.231330 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3009b067_05e5_435b_af96_a3fd67beb32e.slice/crio-794799ee64e08f4b73c5c5d48704acb8349a7bbe50fd4f2dc87fd90ddc289ec7 WatchSource:0}: Error finding container 794799ee64e08f4b73c5c5d48704acb8349a7bbe50fd4f2dc87fd90ddc289ec7: Status 404 returned error can't find the container with id 794799ee64e08f4b73c5c5d48704acb8349a7bbe50fd4f2dc87fd90ddc289ec7 Nov 27 08:54:51 crc kubenswrapper[4971]: I1127 08:54:51.242233 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be8ebf05-06da-4453-a63a-60a42cbe569f-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-7dg5v\" (UID: \"be8ebf05-06da-4453-a63a-60a42cbe569f\") " pod="openstack/nova-cell1-conductor-db-sync-7dg5v" Nov 27 08:54:51 crc kubenswrapper[4971]: I1127 08:54:51.242304 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d9tr9\" (UniqueName: \"kubernetes.io/projected/be8ebf05-06da-4453-a63a-60a42cbe569f-kube-api-access-d9tr9\") pod \"nova-cell1-conductor-db-sync-7dg5v\" (UID: \"be8ebf05-06da-4453-a63a-60a42cbe569f\") " pod="openstack/nova-cell1-conductor-db-sync-7dg5v" Nov 27 08:54:51 crc kubenswrapper[4971]: I1127 08:54:51.242404 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be8ebf05-06da-4453-a63a-60a42cbe569f-scripts\") pod \"nova-cell1-conductor-db-sync-7dg5v\" (UID: \"be8ebf05-06da-4453-a63a-60a42cbe569f\") " pod="openstack/nova-cell1-conductor-db-sync-7dg5v" Nov 27 08:54:51 crc kubenswrapper[4971]: I1127 08:54:51.242440 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be8ebf05-06da-4453-a63a-60a42cbe569f-config-data\") pod \"nova-cell1-conductor-db-sync-7dg5v\" (UID: \"be8ebf05-06da-4453-a63a-60a42cbe569f\") " pod="openstack/nova-cell1-conductor-db-sync-7dg5v" Nov 27 08:54:51 crc kubenswrapper[4971]: I1127 08:54:51.251013 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be8ebf05-06da-4453-a63a-60a42cbe569f-scripts\") pod \"nova-cell1-conductor-db-sync-7dg5v\" (UID: \"be8ebf05-06da-4453-a63a-60a42cbe569f\") " pod="openstack/nova-cell1-conductor-db-sync-7dg5v" Nov 27 08:54:51 crc kubenswrapper[4971]: I1127 08:54:51.251242 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be8ebf05-06da-4453-a63a-60a42cbe569f-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-7dg5v\" (UID: \"be8ebf05-06da-4453-a63a-60a42cbe569f\") " pod="openstack/nova-cell1-conductor-db-sync-7dg5v" Nov 27 08:54:51 crc kubenswrapper[4971]: I1127 08:54:51.255347 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be8ebf05-06da-4453-a63a-60a42cbe569f-config-data\") pod \"nova-cell1-conductor-db-sync-7dg5v\" (UID: \"be8ebf05-06da-4453-a63a-60a42cbe569f\") " pod="openstack/nova-cell1-conductor-db-sync-7dg5v" Nov 27 08:54:51 crc kubenswrapper[4971]: I1127 08:54:51.267044 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d9tr9\" (UniqueName: \"kubernetes.io/projected/be8ebf05-06da-4453-a63a-60a42cbe569f-kube-api-access-d9tr9\") pod \"nova-cell1-conductor-db-sync-7dg5v\" (UID: \"be8ebf05-06da-4453-a63a-60a42cbe569f\") " pod="openstack/nova-cell1-conductor-db-sync-7dg5v" Nov 27 08:54:51 crc kubenswrapper[4971]: I1127 08:54:51.333468 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-7dg5v" Nov 27 08:54:51 crc kubenswrapper[4971]: I1127 08:54:51.385196 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 08:54:51 crc kubenswrapper[4971]: I1127 08:54:51.393432 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d8fdb9c79-qxcdk"] Nov 27 08:54:51 crc kubenswrapper[4971]: I1127 08:54:51.501490 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 08:54:51 crc kubenswrapper[4971]: W1127 08:54:51.518495 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3c50701f_bf73_43e1_9306_969eb07fbf5a.slice/crio-142695ccd69070fccca0a2e642c08f8c533c1aef5f9d2211e1b1172f120bd1b8 WatchSource:0}: Error finding container 142695ccd69070fccca0a2e642c08f8c533c1aef5f9d2211e1b1172f120bd1b8: Status 404 returned error can't find the container with id 142695ccd69070fccca0a2e642c08f8c533c1aef5f9d2211e1b1172f120bd1b8 Nov 27 08:54:51 crc kubenswrapper[4971]: I1127 08:54:51.913232 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-7dg5v"] Nov 27 08:54:52 crc kubenswrapper[4971]: I1127 08:54:52.111059 4971 generic.go:334] "Generic (PLEG): container finished" podID="f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e" containerID="a399d2c0445ad8d19248882370913509dc9c9e9bc68625f0cee5248cd88d4039" exitCode=0 Nov 27 08:54:52 crc kubenswrapper[4971]: I1127 08:54:52.111149 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d8fdb9c79-qxcdk" event={"ID":"f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e","Type":"ContainerDied","Data":"a399d2c0445ad8d19248882370913509dc9c9e9bc68625f0cee5248cd88d4039"} Nov 27 08:54:52 crc kubenswrapper[4971]: I1127 08:54:52.111217 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d8fdb9c79-qxcdk" event={"ID":"f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e","Type":"ContainerStarted","Data":"4dd5d5bdf1e10b0ee7341b77aa5ef778efbc1eb5cccfd97949507418952d5c39"} Nov 27 08:54:52 crc kubenswrapper[4971]: I1127 08:54:52.114524 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-7dg5v" event={"ID":"be8ebf05-06da-4453-a63a-60a42cbe569f","Type":"ContainerStarted","Data":"ad370275f44b2043853e5fcbabd920a1889cded587f50525657d55cd5caf98ec"} Nov 27 08:54:52 crc kubenswrapper[4971]: I1127 08:54:52.117747 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"3009b067-05e5-435b-af96-a3fd67beb32e","Type":"ContainerStarted","Data":"794799ee64e08f4b73c5c5d48704acb8349a7bbe50fd4f2dc87fd90ddc289ec7"} Nov 27 08:54:52 crc kubenswrapper[4971]: I1127 08:54:52.121608 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3c50701f-bf73-43e1-9306-969eb07fbf5a","Type":"ContainerStarted","Data":"142695ccd69070fccca0a2e642c08f8c533c1aef5f9d2211e1b1172f120bd1b8"} Nov 27 08:54:52 crc kubenswrapper[4971]: I1127 08:54:52.124816 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-f6m7q" event={"ID":"4cb90f92-2547-47f8-adc6-7c7033567f90","Type":"ContainerStarted","Data":"196658c0cde7d522b136ba2b922f7123a780e5b94965ec4244d4a8f6a129d110"} Nov 27 08:54:52 crc kubenswrapper[4971]: I1127 08:54:52.127294 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ba692abd-4b39-486b-9586-0619e79aa774","Type":"ContainerStarted","Data":"1bacadff396dabe81c5d7067ae28195f05f3bfbecba0e67645b15492683c812d"} Nov 27 08:54:52 crc kubenswrapper[4971]: I1127 08:54:52.163121 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-f6m7q" podStartSLOduration=3.163095118 podStartE2EDuration="3.163095118s" podCreationTimestamp="2025-11-27 08:54:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:54:52.156023706 +0000 UTC m=+7330.348067634" watchObservedRunningTime="2025-11-27 08:54:52.163095118 +0000 UTC m=+7330.355139036" Nov 27 08:54:53 crc kubenswrapper[4971]: I1127 08:54:53.145152 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-7dg5v" event={"ID":"be8ebf05-06da-4453-a63a-60a42cbe569f","Type":"ContainerStarted","Data":"16a37aa11cc69085e1f1d0086e25071c7aa52d629323f75d4e6a60c2876ff5d6"} Nov 27 08:54:53 crc kubenswrapper[4971]: I1127 08:54:53.169437 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-7dg5v" podStartSLOduration=3.169417744 podStartE2EDuration="3.169417744s" podCreationTimestamp="2025-11-27 08:54:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:54:53.163589167 +0000 UTC m=+7331.355633095" watchObservedRunningTime="2025-11-27 08:54:53.169417744 +0000 UTC m=+7331.361461662" Nov 27 08:54:53 crc kubenswrapper[4971]: I1127 08:54:53.678026 4971 scope.go:117] "RemoveContainer" containerID="42fcb129990a3a9bce82819ac4069813165da82d40f539beb37df00c7359ac86" Nov 27 08:54:55 crc kubenswrapper[4971]: I1127 08:54:55.165953 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"3009b067-05e5-435b-af96-a3fd67beb32e","Type":"ContainerStarted","Data":"049af22b0c7d1906d48b6bef890039f31781624d2adf0c4753528ef38a67777f"} Nov 27 08:54:55 crc kubenswrapper[4971]: I1127 08:54:55.168788 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3c50701f-bf73-43e1-9306-969eb07fbf5a","Type":"ContainerStarted","Data":"f321cddd2c0bbdbecee90320d7f1cee0e7cfa2aa9da8ed335ced59adc2cde06e"} Nov 27 08:54:55 crc kubenswrapper[4971]: I1127 08:54:55.183346 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ba692abd-4b39-486b-9586-0619e79aa774","Type":"ContainerStarted","Data":"1befefa6b6e8296897a3344720df03990a77535fb62f03d8c81f6bb7128514c8"} Nov 27 08:54:55 crc kubenswrapper[4971]: I1127 08:54:55.183403 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ba692abd-4b39-486b-9586-0619e79aa774","Type":"ContainerStarted","Data":"76007cd90a94aeb935d6550bef4cd9b83fc010d173931c23c98113659387f73b"} Nov 27 08:54:55 crc kubenswrapper[4971]: I1127 08:54:55.202314 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=3.555414591 podStartE2EDuration="6.202291683s" podCreationTimestamp="2025-11-27 08:54:49 +0000 UTC" firstStartedPulling="2025-11-27 08:54:51.233319525 +0000 UTC m=+7329.425363443" lastFinishedPulling="2025-11-27 08:54:53.880196577 +0000 UTC m=+7332.072240535" observedRunningTime="2025-11-27 08:54:55.199325489 +0000 UTC m=+7333.391369437" watchObservedRunningTime="2025-11-27 08:54:55.202291683 +0000 UTC m=+7333.394335631" Nov 27 08:54:55 crc kubenswrapper[4971]: I1127 08:54:55.203157 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d8fdb9c79-qxcdk" event={"ID":"f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e","Type":"ContainerStarted","Data":"dc4a5a7ba4e0734b4a4819b8af4682864054b3b22e1bea26f0e720371fa4ff31"} Nov 27 08:54:55 crc kubenswrapper[4971]: I1127 08:54:55.203554 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5d8fdb9c79-qxcdk" Nov 27 08:54:55 crc kubenswrapper[4971]: I1127 08:54:55.205882 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"74663fef-43f4-478d-9702-eb41e85cca23","Type":"ContainerStarted","Data":"be40104a624c0e3f2e71d2bda439ce7e3c75c1b02d1f38b756483624b48a0870"} Nov 27 08:54:55 crc kubenswrapper[4971]: I1127 08:54:55.206016 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"74663fef-43f4-478d-9702-eb41e85cca23","Type":"ContainerStarted","Data":"70a330677f9ddbb80feb3fa9e4a3790cd7eeb5ac263e2d096266cf7c242ff1e8"} Nov 27 08:54:55 crc kubenswrapper[4971]: I1127 08:54:55.247949 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.888985353 podStartE2EDuration="5.24791541s" podCreationTimestamp="2025-11-27 08:54:50 +0000 UTC" firstStartedPulling="2025-11-27 08:54:51.521064954 +0000 UTC m=+7329.713108872" lastFinishedPulling="2025-11-27 08:54:53.879995021 +0000 UTC m=+7332.072038929" observedRunningTime="2025-11-27 08:54:55.220945308 +0000 UTC m=+7333.412989276" watchObservedRunningTime="2025-11-27 08:54:55.24791541 +0000 UTC m=+7333.439959328" Nov 27 08:54:55 crc kubenswrapper[4971]: I1127 08:54:55.276668 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.825803704 podStartE2EDuration="5.276629222s" podCreationTimestamp="2025-11-27 08:54:50 +0000 UTC" firstStartedPulling="2025-11-27 08:54:51.412168186 +0000 UTC m=+7329.604212104" lastFinishedPulling="2025-11-27 08:54:53.862993704 +0000 UTC m=+7332.055037622" observedRunningTime="2025-11-27 08:54:55.266135852 +0000 UTC m=+7333.458179770" watchObservedRunningTime="2025-11-27 08:54:55.276629222 +0000 UTC m=+7333.468673130" Nov 27 08:54:55 crc kubenswrapper[4971]: I1127 08:54:55.298906 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.209291101 podStartE2EDuration="6.298888519s" podCreationTimestamp="2025-11-27 08:54:49 +0000 UTC" firstStartedPulling="2025-11-27 08:54:50.773401196 +0000 UTC m=+7328.965445114" lastFinishedPulling="2025-11-27 08:54:53.862998564 +0000 UTC m=+7332.055042532" observedRunningTime="2025-11-27 08:54:55.29089057 +0000 UTC m=+7333.482934488" watchObservedRunningTime="2025-11-27 08:54:55.298888519 +0000 UTC m=+7333.490932437" Nov 27 08:54:55 crc kubenswrapper[4971]: I1127 08:54:55.325025 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5d8fdb9c79-qxcdk" podStartSLOduration=5.325006737 podStartE2EDuration="5.325006737s" podCreationTimestamp="2025-11-27 08:54:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:54:55.311885112 +0000 UTC m=+7333.503929050" watchObservedRunningTime="2025-11-27 08:54:55.325006737 +0000 UTC m=+7333.517050655" Nov 27 08:54:55 crc kubenswrapper[4971]: I1127 08:54:55.565088 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Nov 27 08:54:55 crc kubenswrapper[4971]: I1127 08:54:55.686743 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 27 08:54:55 crc kubenswrapper[4971]: I1127 08:54:55.686808 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 27 08:54:55 crc kubenswrapper[4971]: I1127 08:54:55.698016 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Nov 27 08:54:56 crc kubenswrapper[4971]: I1127 08:54:56.223339 4971 generic.go:334] "Generic (PLEG): container finished" podID="be8ebf05-06da-4453-a63a-60a42cbe569f" containerID="16a37aa11cc69085e1f1d0086e25071c7aa52d629323f75d4e6a60c2876ff5d6" exitCode=0 Nov 27 08:54:56 crc kubenswrapper[4971]: I1127 08:54:56.225320 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-7dg5v" event={"ID":"be8ebf05-06da-4453-a63a-60a42cbe569f","Type":"ContainerDied","Data":"16a37aa11cc69085e1f1d0086e25071c7aa52d629323f75d4e6a60c2876ff5d6"} Nov 27 08:54:57 crc kubenswrapper[4971]: I1127 08:54:57.240504 4971 generic.go:334] "Generic (PLEG): container finished" podID="4cb90f92-2547-47f8-adc6-7c7033567f90" containerID="196658c0cde7d522b136ba2b922f7123a780e5b94965ec4244d4a8f6a129d110" exitCode=0 Nov 27 08:54:57 crc kubenswrapper[4971]: I1127 08:54:57.240853 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-f6m7q" event={"ID":"4cb90f92-2547-47f8-adc6-7c7033567f90","Type":"ContainerDied","Data":"196658c0cde7d522b136ba2b922f7123a780e5b94965ec4244d4a8f6a129d110"} Nov 27 08:54:57 crc kubenswrapper[4971]: I1127 08:54:57.614768 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-7dg5v" Nov 27 08:54:57 crc kubenswrapper[4971]: I1127 08:54:57.796202 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be8ebf05-06da-4453-a63a-60a42cbe569f-scripts\") pod \"be8ebf05-06da-4453-a63a-60a42cbe569f\" (UID: \"be8ebf05-06da-4453-a63a-60a42cbe569f\") " Nov 27 08:54:57 crc kubenswrapper[4971]: I1127 08:54:57.796381 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d9tr9\" (UniqueName: \"kubernetes.io/projected/be8ebf05-06da-4453-a63a-60a42cbe569f-kube-api-access-d9tr9\") pod \"be8ebf05-06da-4453-a63a-60a42cbe569f\" (UID: \"be8ebf05-06da-4453-a63a-60a42cbe569f\") " Nov 27 08:54:57 crc kubenswrapper[4971]: I1127 08:54:57.796521 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be8ebf05-06da-4453-a63a-60a42cbe569f-combined-ca-bundle\") pod \"be8ebf05-06da-4453-a63a-60a42cbe569f\" (UID: \"be8ebf05-06da-4453-a63a-60a42cbe569f\") " Nov 27 08:54:57 crc kubenswrapper[4971]: I1127 08:54:57.796816 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be8ebf05-06da-4453-a63a-60a42cbe569f-config-data\") pod \"be8ebf05-06da-4453-a63a-60a42cbe569f\" (UID: \"be8ebf05-06da-4453-a63a-60a42cbe569f\") " Nov 27 08:54:57 crc kubenswrapper[4971]: I1127 08:54:57.803888 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be8ebf05-06da-4453-a63a-60a42cbe569f-kube-api-access-d9tr9" (OuterVolumeSpecName: "kube-api-access-d9tr9") pod "be8ebf05-06da-4453-a63a-60a42cbe569f" (UID: "be8ebf05-06da-4453-a63a-60a42cbe569f"). InnerVolumeSpecName "kube-api-access-d9tr9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:54:57 crc kubenswrapper[4971]: I1127 08:54:57.804857 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be8ebf05-06da-4453-a63a-60a42cbe569f-scripts" (OuterVolumeSpecName: "scripts") pod "be8ebf05-06da-4453-a63a-60a42cbe569f" (UID: "be8ebf05-06da-4453-a63a-60a42cbe569f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:54:57 crc kubenswrapper[4971]: I1127 08:54:57.828275 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be8ebf05-06da-4453-a63a-60a42cbe569f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "be8ebf05-06da-4453-a63a-60a42cbe569f" (UID: "be8ebf05-06da-4453-a63a-60a42cbe569f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:54:57 crc kubenswrapper[4971]: I1127 08:54:57.831666 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be8ebf05-06da-4453-a63a-60a42cbe569f-config-data" (OuterVolumeSpecName: "config-data") pod "be8ebf05-06da-4453-a63a-60a42cbe569f" (UID: "be8ebf05-06da-4453-a63a-60a42cbe569f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:54:57 crc kubenswrapper[4971]: I1127 08:54:57.899971 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be8ebf05-06da-4453-a63a-60a42cbe569f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 08:54:57 crc kubenswrapper[4971]: I1127 08:54:57.900020 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be8ebf05-06da-4453-a63a-60a42cbe569f-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 08:54:57 crc kubenswrapper[4971]: I1127 08:54:57.900033 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be8ebf05-06da-4453-a63a-60a42cbe569f-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 08:54:57 crc kubenswrapper[4971]: I1127 08:54:57.900140 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d9tr9\" (UniqueName: \"kubernetes.io/projected/be8ebf05-06da-4453-a63a-60a42cbe569f-kube-api-access-d9tr9\") on node \"crc\" DevicePath \"\"" Nov 27 08:54:58 crc kubenswrapper[4971]: I1127 08:54:58.252258 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-7dg5v" Nov 27 08:54:58 crc kubenswrapper[4971]: I1127 08:54:58.252267 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-7dg5v" event={"ID":"be8ebf05-06da-4453-a63a-60a42cbe569f","Type":"ContainerDied","Data":"ad370275f44b2043853e5fcbabd920a1889cded587f50525657d55cd5caf98ec"} Nov 27 08:54:58 crc kubenswrapper[4971]: I1127 08:54:58.252340 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ad370275f44b2043853e5fcbabd920a1889cded587f50525657d55cd5caf98ec" Nov 27 08:54:58 crc kubenswrapper[4971]: I1127 08:54:58.400195 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 27 08:54:58 crc kubenswrapper[4971]: E1127 08:54:58.400815 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be8ebf05-06da-4453-a63a-60a42cbe569f" containerName="nova-cell1-conductor-db-sync" Nov 27 08:54:58 crc kubenswrapper[4971]: I1127 08:54:58.400835 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="be8ebf05-06da-4453-a63a-60a42cbe569f" containerName="nova-cell1-conductor-db-sync" Nov 27 08:54:58 crc kubenswrapper[4971]: I1127 08:54:58.401111 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="be8ebf05-06da-4453-a63a-60a42cbe569f" containerName="nova-cell1-conductor-db-sync" Nov 27 08:54:58 crc kubenswrapper[4971]: I1127 08:54:58.401914 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 27 08:54:58 crc kubenswrapper[4971]: I1127 08:54:58.406697 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Nov 27 08:54:58 crc kubenswrapper[4971]: I1127 08:54:58.427980 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 27 08:54:58 crc kubenswrapper[4971]: I1127 08:54:58.510946 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c119bf30-716a-491e-ae7a-225b2bdf42fb-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"c119bf30-716a-491e-ae7a-225b2bdf42fb\") " pod="openstack/nova-cell1-conductor-0" Nov 27 08:54:58 crc kubenswrapper[4971]: I1127 08:54:58.511018 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hs8mj\" (UniqueName: \"kubernetes.io/projected/c119bf30-716a-491e-ae7a-225b2bdf42fb-kube-api-access-hs8mj\") pod \"nova-cell1-conductor-0\" (UID: \"c119bf30-716a-491e-ae7a-225b2bdf42fb\") " pod="openstack/nova-cell1-conductor-0" Nov 27 08:54:58 crc kubenswrapper[4971]: I1127 08:54:58.511070 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c119bf30-716a-491e-ae7a-225b2bdf42fb-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"c119bf30-716a-491e-ae7a-225b2bdf42fb\") " pod="openstack/nova-cell1-conductor-0" Nov 27 08:54:58 crc kubenswrapper[4971]: I1127 08:54:58.612988 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c119bf30-716a-491e-ae7a-225b2bdf42fb-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"c119bf30-716a-491e-ae7a-225b2bdf42fb\") " pod="openstack/nova-cell1-conductor-0" Nov 27 08:54:58 crc kubenswrapper[4971]: I1127 08:54:58.613082 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hs8mj\" (UniqueName: \"kubernetes.io/projected/c119bf30-716a-491e-ae7a-225b2bdf42fb-kube-api-access-hs8mj\") pod \"nova-cell1-conductor-0\" (UID: \"c119bf30-716a-491e-ae7a-225b2bdf42fb\") " pod="openstack/nova-cell1-conductor-0" Nov 27 08:54:58 crc kubenswrapper[4971]: I1127 08:54:58.613112 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c119bf30-716a-491e-ae7a-225b2bdf42fb-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"c119bf30-716a-491e-ae7a-225b2bdf42fb\") " pod="openstack/nova-cell1-conductor-0" Nov 27 08:54:58 crc kubenswrapper[4971]: I1127 08:54:58.619351 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c119bf30-716a-491e-ae7a-225b2bdf42fb-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"c119bf30-716a-491e-ae7a-225b2bdf42fb\") " pod="openstack/nova-cell1-conductor-0" Nov 27 08:54:58 crc kubenswrapper[4971]: I1127 08:54:58.619774 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c119bf30-716a-491e-ae7a-225b2bdf42fb-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"c119bf30-716a-491e-ae7a-225b2bdf42fb\") " pod="openstack/nova-cell1-conductor-0" Nov 27 08:54:58 crc kubenswrapper[4971]: I1127 08:54:58.622082 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-f6m7q" Nov 27 08:54:58 crc kubenswrapper[4971]: I1127 08:54:58.634605 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hs8mj\" (UniqueName: \"kubernetes.io/projected/c119bf30-716a-491e-ae7a-225b2bdf42fb-kube-api-access-hs8mj\") pod \"nova-cell1-conductor-0\" (UID: \"c119bf30-716a-491e-ae7a-225b2bdf42fb\") " pod="openstack/nova-cell1-conductor-0" Nov 27 08:54:58 crc kubenswrapper[4971]: I1127 08:54:58.714389 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cb90f92-2547-47f8-adc6-7c7033567f90-combined-ca-bundle\") pod \"4cb90f92-2547-47f8-adc6-7c7033567f90\" (UID: \"4cb90f92-2547-47f8-adc6-7c7033567f90\") " Nov 27 08:54:58 crc kubenswrapper[4971]: I1127 08:54:58.714959 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4cb90f92-2547-47f8-adc6-7c7033567f90-scripts\") pod \"4cb90f92-2547-47f8-adc6-7c7033567f90\" (UID: \"4cb90f92-2547-47f8-adc6-7c7033567f90\") " Nov 27 08:54:58 crc kubenswrapper[4971]: I1127 08:54:58.715071 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8948l\" (UniqueName: \"kubernetes.io/projected/4cb90f92-2547-47f8-adc6-7c7033567f90-kube-api-access-8948l\") pod \"4cb90f92-2547-47f8-adc6-7c7033567f90\" (UID: \"4cb90f92-2547-47f8-adc6-7c7033567f90\") " Nov 27 08:54:58 crc kubenswrapper[4971]: I1127 08:54:58.715145 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cb90f92-2547-47f8-adc6-7c7033567f90-config-data\") pod \"4cb90f92-2547-47f8-adc6-7c7033567f90\" (UID: \"4cb90f92-2547-47f8-adc6-7c7033567f90\") " Nov 27 08:54:58 crc kubenswrapper[4971]: I1127 08:54:58.719947 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4cb90f92-2547-47f8-adc6-7c7033567f90-scripts" (OuterVolumeSpecName: "scripts") pod "4cb90f92-2547-47f8-adc6-7c7033567f90" (UID: "4cb90f92-2547-47f8-adc6-7c7033567f90"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:54:58 crc kubenswrapper[4971]: I1127 08:54:58.719961 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4cb90f92-2547-47f8-adc6-7c7033567f90-kube-api-access-8948l" (OuterVolumeSpecName: "kube-api-access-8948l") pod "4cb90f92-2547-47f8-adc6-7c7033567f90" (UID: "4cb90f92-2547-47f8-adc6-7c7033567f90"). InnerVolumeSpecName "kube-api-access-8948l". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:54:58 crc kubenswrapper[4971]: I1127 08:54:58.734255 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 27 08:54:58 crc kubenswrapper[4971]: I1127 08:54:58.740920 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4cb90f92-2547-47f8-adc6-7c7033567f90-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4cb90f92-2547-47f8-adc6-7c7033567f90" (UID: "4cb90f92-2547-47f8-adc6-7c7033567f90"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:54:58 crc kubenswrapper[4971]: I1127 08:54:58.743477 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4cb90f92-2547-47f8-adc6-7c7033567f90-config-data" (OuterVolumeSpecName: "config-data") pod "4cb90f92-2547-47f8-adc6-7c7033567f90" (UID: "4cb90f92-2547-47f8-adc6-7c7033567f90"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:54:59 crc kubenswrapper[4971]: I1127 08:54:59.654400 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8948l\" (UniqueName: \"kubernetes.io/projected/4cb90f92-2547-47f8-adc6-7c7033567f90-kube-api-access-8948l\") on node \"crc\" DevicePath \"\"" Nov 27 08:54:59 crc kubenswrapper[4971]: I1127 08:54:59.654448 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cb90f92-2547-47f8-adc6-7c7033567f90-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 08:54:59 crc kubenswrapper[4971]: I1127 08:54:59.654463 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cb90f92-2547-47f8-adc6-7c7033567f90-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 08:54:59 crc kubenswrapper[4971]: I1127 08:54:59.654474 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4cb90f92-2547-47f8-adc6-7c7033567f90-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 08:54:59 crc kubenswrapper[4971]: I1127 08:54:59.677063 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 27 08:54:59 crc kubenswrapper[4971]: I1127 08:54:59.677994 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="74663fef-43f4-478d-9702-eb41e85cca23" containerName="nova-api-log" containerID="cri-o://70a330677f9ddbb80feb3fa9e4a3790cd7eeb5ac263e2d096266cf7c242ff1e8" gracePeriod=30 Nov 27 08:54:59 crc kubenswrapper[4971]: I1127 08:54:59.680654 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="74663fef-43f4-478d-9702-eb41e85cca23" containerName="nova-api-api" containerID="cri-o://be40104a624c0e3f2e71d2bda439ce7e3c75c1b02d1f38b756483624b48a0870" gracePeriod=30 Nov 27 08:54:59 crc kubenswrapper[4971]: I1127 08:54:59.686221 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-f6m7q" event={"ID":"4cb90f92-2547-47f8-adc6-7c7033567f90","Type":"ContainerDied","Data":"d4be3158ff5a7e3c51ce66e740d09567c22c2fcc5dedab91d1f07812ce613065"} Nov 27 08:54:59 crc kubenswrapper[4971]: I1127 08:54:59.686290 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d4be3158ff5a7e3c51ce66e740d09567c22c2fcc5dedab91d1f07812ce613065" Nov 27 08:54:59 crc kubenswrapper[4971]: I1127 08:54:59.686479 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-f6m7q" Nov 27 08:54:59 crc kubenswrapper[4971]: I1127 08:54:59.690538 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 08:54:59 crc kubenswrapper[4971]: I1127 08:54:59.690762 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="3c50701f-bf73-43e1-9306-969eb07fbf5a" containerName="nova-scheduler-scheduler" containerID="cri-o://f321cddd2c0bbdbecee90320d7f1cee0e7cfa2aa9da8ed335ced59adc2cde06e" gracePeriod=30 Nov 27 08:54:59 crc kubenswrapper[4971]: I1127 08:54:59.700276 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 08:54:59 crc kubenswrapper[4971]: I1127 08:54:59.700582 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="ba692abd-4b39-486b-9586-0619e79aa774" containerName="nova-metadata-log" containerID="cri-o://76007cd90a94aeb935d6550bef4cd9b83fc010d173931c23c98113659387f73b" gracePeriod=30 Nov 27 08:54:59 crc kubenswrapper[4971]: I1127 08:54:59.700765 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="ba692abd-4b39-486b-9586-0619e79aa774" containerName="nova-metadata-metadata" containerID="cri-o://1befefa6b6e8296897a3344720df03990a77535fb62f03d8c81f6bb7128514c8" gracePeriod=30 Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.081615 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 27 08:55:00 crc kubenswrapper[4971]: E1127 08:55:00.123061 4971 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podba692abd_4b39_486b_9586_0619e79aa774.slice/crio-conmon-1befefa6b6e8296897a3344720df03990a77535fb62f03d8c81f6bb7128514c8.scope\": RecentStats: unable to find data in memory cache]" Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.253482 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.367491 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba692abd-4b39-486b-9586-0619e79aa774-config-data\") pod \"ba692abd-4b39-486b-9586-0619e79aa774\" (UID: \"ba692abd-4b39-486b-9586-0619e79aa774\") " Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.367631 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ba692abd-4b39-486b-9586-0619e79aa774-logs\") pod \"ba692abd-4b39-486b-9586-0619e79aa774\" (UID: \"ba692abd-4b39-486b-9586-0619e79aa774\") " Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.367701 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d2s2z\" (UniqueName: \"kubernetes.io/projected/ba692abd-4b39-486b-9586-0619e79aa774-kube-api-access-d2s2z\") pod \"ba692abd-4b39-486b-9586-0619e79aa774\" (UID: \"ba692abd-4b39-486b-9586-0619e79aa774\") " Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.367760 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba692abd-4b39-486b-9586-0619e79aa774-combined-ca-bundle\") pod \"ba692abd-4b39-486b-9586-0619e79aa774\" (UID: \"ba692abd-4b39-486b-9586-0619e79aa774\") " Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.368573 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba692abd-4b39-486b-9586-0619e79aa774-logs" (OuterVolumeSpecName: "logs") pod "ba692abd-4b39-486b-9586-0619e79aa774" (UID: "ba692abd-4b39-486b-9586-0619e79aa774"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.372384 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba692abd-4b39-486b-9586-0619e79aa774-kube-api-access-d2s2z" (OuterVolumeSpecName: "kube-api-access-d2s2z") pod "ba692abd-4b39-486b-9586-0619e79aa774" (UID: "ba692abd-4b39-486b-9586-0619e79aa774"). InnerVolumeSpecName "kube-api-access-d2s2z". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.391245 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.399508 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba692abd-4b39-486b-9586-0619e79aa774-config-data" (OuterVolumeSpecName: "config-data") pod "ba692abd-4b39-486b-9586-0619e79aa774" (UID: "ba692abd-4b39-486b-9586-0619e79aa774"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.402452 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba692abd-4b39-486b-9586-0619e79aa774-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ba692abd-4b39-486b-9586-0619e79aa774" (UID: "ba692abd-4b39-486b-9586-0619e79aa774"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.469851 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba692abd-4b39-486b-9586-0619e79aa774-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.469884 4971 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ba692abd-4b39-486b-9586-0619e79aa774-logs\") on node \"crc\" DevicePath \"\"" Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.469896 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d2s2z\" (UniqueName: \"kubernetes.io/projected/ba692abd-4b39-486b-9586-0619e79aa774-kube-api-access-d2s2z\") on node \"crc\" DevicePath \"\"" Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.469908 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba692abd-4b39-486b-9586-0619e79aa774-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.577026 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74663fef-43f4-478d-9702-eb41e85cca23-combined-ca-bundle\") pod \"74663fef-43f4-478d-9702-eb41e85cca23\" (UID: \"74663fef-43f4-478d-9702-eb41e85cca23\") " Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.577831 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kn6zc\" (UniqueName: \"kubernetes.io/projected/74663fef-43f4-478d-9702-eb41e85cca23-kube-api-access-kn6zc\") pod \"74663fef-43f4-478d-9702-eb41e85cca23\" (UID: \"74663fef-43f4-478d-9702-eb41e85cca23\") " Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.577973 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/74663fef-43f4-478d-9702-eb41e85cca23-logs\") pod \"74663fef-43f4-478d-9702-eb41e85cca23\" (UID: \"74663fef-43f4-478d-9702-eb41e85cca23\") " Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.578811 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/74663fef-43f4-478d-9702-eb41e85cca23-logs" (OuterVolumeSpecName: "logs") pod "74663fef-43f4-478d-9702-eb41e85cca23" (UID: "74663fef-43f4-478d-9702-eb41e85cca23"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.579105 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74663fef-43f4-478d-9702-eb41e85cca23-config-data\") pod \"74663fef-43f4-478d-9702-eb41e85cca23\" (UID: \"74663fef-43f4-478d-9702-eb41e85cca23\") " Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.579663 4971 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/74663fef-43f4-478d-9702-eb41e85cca23-logs\") on node \"crc\" DevicePath \"\"" Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.582147 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74663fef-43f4-478d-9702-eb41e85cca23-kube-api-access-kn6zc" (OuterVolumeSpecName: "kube-api-access-kn6zc") pod "74663fef-43f4-478d-9702-eb41e85cca23" (UID: "74663fef-43f4-478d-9702-eb41e85cca23"). InnerVolumeSpecName "kube-api-access-kn6zc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.602832 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74663fef-43f4-478d-9702-eb41e85cca23-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "74663fef-43f4-478d-9702-eb41e85cca23" (UID: "74663fef-43f4-478d-9702-eb41e85cca23"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.605105 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74663fef-43f4-478d-9702-eb41e85cca23-config-data" (OuterVolumeSpecName: "config-data") pod "74663fef-43f4-478d-9702-eb41e85cca23" (UID: "74663fef-43f4-478d-9702-eb41e85cca23"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.680951 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.681070 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.689771 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74663fef-43f4-478d-9702-eb41e85cca23-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.689820 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kn6zc\" (UniqueName: \"kubernetes.io/projected/74663fef-43f4-478d-9702-eb41e85cca23-kube-api-access-kn6zc\") on node \"crc\" DevicePath \"\"" Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.689840 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74663fef-43f4-478d-9702-eb41e85cca23-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.693395 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.711018 4971 generic.go:334] "Generic (PLEG): container finished" podID="74663fef-43f4-478d-9702-eb41e85cca23" containerID="be40104a624c0e3f2e71d2bda439ce7e3c75c1b02d1f38b756483624b48a0870" exitCode=0 Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.711052 4971 generic.go:334] "Generic (PLEG): container finished" podID="74663fef-43f4-478d-9702-eb41e85cca23" containerID="70a330677f9ddbb80feb3fa9e4a3790cd7eeb5ac263e2d096266cf7c242ff1e8" exitCode=143 Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.711112 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"74663fef-43f4-478d-9702-eb41e85cca23","Type":"ContainerDied","Data":"be40104a624c0e3f2e71d2bda439ce7e3c75c1b02d1f38b756483624b48a0870"} Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.711115 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.711142 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"74663fef-43f4-478d-9702-eb41e85cca23","Type":"ContainerDied","Data":"70a330677f9ddbb80feb3fa9e4a3790cd7eeb5ac263e2d096266cf7c242ff1e8"} Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.711156 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"74663fef-43f4-478d-9702-eb41e85cca23","Type":"ContainerDied","Data":"706e9d4a89c02dd008288ce285d3839c3e3b4c052e37e3d9d412de74ce797121"} Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.711173 4971 scope.go:117] "RemoveContainer" containerID="be40104a624c0e3f2e71d2bda439ce7e3c75c1b02d1f38b756483624b48a0870" Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.711788 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5d8fdb9c79-qxcdk" Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.715222 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ba692abd-4b39-486b-9586-0619e79aa774","Type":"ContainerDied","Data":"1befefa6b6e8296897a3344720df03990a77535fb62f03d8c81f6bb7128514c8"} Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.715230 4971 generic.go:334] "Generic (PLEG): container finished" podID="ba692abd-4b39-486b-9586-0619e79aa774" containerID="1befefa6b6e8296897a3344720df03990a77535fb62f03d8c81f6bb7128514c8" exitCode=0 Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.715275 4971 generic.go:334] "Generic (PLEG): container finished" podID="ba692abd-4b39-486b-9586-0619e79aa774" containerID="76007cd90a94aeb935d6550bef4cd9b83fc010d173931c23c98113659387f73b" exitCode=143 Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.715360 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.715374 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ba692abd-4b39-486b-9586-0619e79aa774","Type":"ContainerDied","Data":"76007cd90a94aeb935d6550bef4cd9b83fc010d173931c23c98113659387f73b"} Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.715418 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ba692abd-4b39-486b-9586-0619e79aa774","Type":"ContainerDied","Data":"1bacadff396dabe81c5d7067ae28195f05f3bfbecba0e67645b15492683c812d"} Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.717361 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"c119bf30-716a-491e-ae7a-225b2bdf42fb","Type":"ContainerStarted","Data":"7c13bb87dc0a13e3e813b43c0aaf1db573a4ae5141fd66eef87b70d22e57de29"} Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.717418 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"c119bf30-716a-491e-ae7a-225b2bdf42fb","Type":"ContainerStarted","Data":"9ea4dbcb6c7d34a6c86ab400925ca9228919bbc592ae5e600bf2983b3681660c"} Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.774253 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.774230932 podStartE2EDuration="2.774230932s" podCreationTimestamp="2025-11-27 08:54:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:55:00.766623165 +0000 UTC m=+7338.958667103" watchObservedRunningTime="2025-11-27 08:55:00.774230932 +0000 UTC m=+7338.966274870" Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.777959 4971 scope.go:117] "RemoveContainer" containerID="70a330677f9ddbb80feb3fa9e4a3790cd7eeb5ac263e2d096266cf7c242ff1e8" Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.872015 4971 scope.go:117] "RemoveContainer" containerID="be40104a624c0e3f2e71d2bda439ce7e3c75c1b02d1f38b756483624b48a0870" Nov 27 08:55:00 crc kubenswrapper[4971]: E1127 08:55:00.883185 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be40104a624c0e3f2e71d2bda439ce7e3c75c1b02d1f38b756483624b48a0870\": container with ID starting with be40104a624c0e3f2e71d2bda439ce7e3c75c1b02d1f38b756483624b48a0870 not found: ID does not exist" containerID="be40104a624c0e3f2e71d2bda439ce7e3c75c1b02d1f38b756483624b48a0870" Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.883260 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be40104a624c0e3f2e71d2bda439ce7e3c75c1b02d1f38b756483624b48a0870"} err="failed to get container status \"be40104a624c0e3f2e71d2bda439ce7e3c75c1b02d1f38b756483624b48a0870\": rpc error: code = NotFound desc = could not find container \"be40104a624c0e3f2e71d2bda439ce7e3c75c1b02d1f38b756483624b48a0870\": container with ID starting with be40104a624c0e3f2e71d2bda439ce7e3c75c1b02d1f38b756483624b48a0870 not found: ID does not exist" Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.883291 4971 scope.go:117] "RemoveContainer" containerID="70a330677f9ddbb80feb3fa9e4a3790cd7eeb5ac263e2d096266cf7c242ff1e8" Nov 27 08:55:00 crc kubenswrapper[4971]: E1127 08:55:00.887693 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70a330677f9ddbb80feb3fa9e4a3790cd7eeb5ac263e2d096266cf7c242ff1e8\": container with ID starting with 70a330677f9ddbb80feb3fa9e4a3790cd7eeb5ac263e2d096266cf7c242ff1e8 not found: ID does not exist" containerID="70a330677f9ddbb80feb3fa9e4a3790cd7eeb5ac263e2d096266cf7c242ff1e8" Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.898837 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70a330677f9ddbb80feb3fa9e4a3790cd7eeb5ac263e2d096266cf7c242ff1e8"} err="failed to get container status \"70a330677f9ddbb80feb3fa9e4a3790cd7eeb5ac263e2d096266cf7c242ff1e8\": rpc error: code = NotFound desc = could not find container \"70a330677f9ddbb80feb3fa9e4a3790cd7eeb5ac263e2d096266cf7c242ff1e8\": container with ID starting with 70a330677f9ddbb80feb3fa9e4a3790cd7eeb5ac263e2d096266cf7c242ff1e8 not found: ID does not exist" Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.898942 4971 scope.go:117] "RemoveContainer" containerID="be40104a624c0e3f2e71d2bda439ce7e3c75c1b02d1f38b756483624b48a0870" Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.935227 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be40104a624c0e3f2e71d2bda439ce7e3c75c1b02d1f38b756483624b48a0870"} err="failed to get container status \"be40104a624c0e3f2e71d2bda439ce7e3c75c1b02d1f38b756483624b48a0870\": rpc error: code = NotFound desc = could not find container \"be40104a624c0e3f2e71d2bda439ce7e3c75c1b02d1f38b756483624b48a0870\": container with ID starting with be40104a624c0e3f2e71d2bda439ce7e3c75c1b02d1f38b756483624b48a0870 not found: ID does not exist" Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.935467 4971 scope.go:117] "RemoveContainer" containerID="70a330677f9ddbb80feb3fa9e4a3790cd7eeb5ac263e2d096266cf7c242ff1e8" Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.936784 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70a330677f9ddbb80feb3fa9e4a3790cd7eeb5ac263e2d096266cf7c242ff1e8"} err="failed to get container status \"70a330677f9ddbb80feb3fa9e4a3790cd7eeb5ac263e2d096266cf7c242ff1e8\": rpc error: code = NotFound desc = could not find container \"70a330677f9ddbb80feb3fa9e4a3790cd7eeb5ac263e2d096266cf7c242ff1e8\": container with ID starting with 70a330677f9ddbb80feb3fa9e4a3790cd7eeb5ac263e2d096266cf7c242ff1e8 not found: ID does not exist" Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.936825 4971 scope.go:117] "RemoveContainer" containerID="1befefa6b6e8296897a3344720df03990a77535fb62f03d8c81f6bb7128514c8" Nov 27 08:55:00 crc kubenswrapper[4971]: I1127 08:55:00.937012 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:00.994161 4971 scope.go:117] "RemoveContainer" containerID="76007cd90a94aeb935d6550bef4cd9b83fc010d173931c23c98113659387f73b" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.004998 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.036880 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-559c7c46ff-gddgr"] Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.037579 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-559c7c46ff-gddgr" podUID="caf8c7f8-182f-471e-b6ba-cc257b879d1f" containerName="dnsmasq-dns" containerID="cri-o://90f2fbf20f00cacb129f9ffb34bd45dececf911add9a0de6ce800f667230729f" gracePeriod=10 Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.054026 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 27 08:55:01 crc kubenswrapper[4971]: E1127 08:55:01.054591 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74663fef-43f4-478d-9702-eb41e85cca23" containerName="nova-api-log" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.054610 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="74663fef-43f4-478d-9702-eb41e85cca23" containerName="nova-api-log" Nov 27 08:55:01 crc kubenswrapper[4971]: E1127 08:55:01.054635 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba692abd-4b39-486b-9586-0619e79aa774" containerName="nova-metadata-log" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.054645 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba692abd-4b39-486b-9586-0619e79aa774" containerName="nova-metadata-log" Nov 27 08:55:01 crc kubenswrapper[4971]: E1127 08:55:01.054655 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4cb90f92-2547-47f8-adc6-7c7033567f90" containerName="nova-manage" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.054661 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="4cb90f92-2547-47f8-adc6-7c7033567f90" containerName="nova-manage" Nov 27 08:55:01 crc kubenswrapper[4971]: E1127 08:55:01.054669 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74663fef-43f4-478d-9702-eb41e85cca23" containerName="nova-api-api" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.054676 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="74663fef-43f4-478d-9702-eb41e85cca23" containerName="nova-api-api" Nov 27 08:55:01 crc kubenswrapper[4971]: E1127 08:55:01.054693 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba692abd-4b39-486b-9586-0619e79aa774" containerName="nova-metadata-metadata" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.054699 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba692abd-4b39-486b-9586-0619e79aa774" containerName="nova-metadata-metadata" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.054919 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba692abd-4b39-486b-9586-0619e79aa774" containerName="nova-metadata-metadata" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.054942 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="4cb90f92-2547-47f8-adc6-7c7033567f90" containerName="nova-manage" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.054959 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="74663fef-43f4-478d-9702-eb41e85cca23" containerName="nova-api-log" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.054978 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="74663fef-43f4-478d-9702-eb41e85cca23" containerName="nova-api-api" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.054994 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba692abd-4b39-486b-9586-0619e79aa774" containerName="nova-metadata-log" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.056081 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.064997 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.066112 4971 scope.go:117] "RemoveContainer" containerID="1befefa6b6e8296897a3344720df03990a77535fb62f03d8c81f6bb7128514c8" Nov 27 08:55:01 crc kubenswrapper[4971]: E1127 08:55:01.066635 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1befefa6b6e8296897a3344720df03990a77535fb62f03d8c81f6bb7128514c8\": container with ID starting with 1befefa6b6e8296897a3344720df03990a77535fb62f03d8c81f6bb7128514c8 not found: ID does not exist" containerID="1befefa6b6e8296897a3344720df03990a77535fb62f03d8c81f6bb7128514c8" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.066662 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1befefa6b6e8296897a3344720df03990a77535fb62f03d8c81f6bb7128514c8"} err="failed to get container status \"1befefa6b6e8296897a3344720df03990a77535fb62f03d8c81f6bb7128514c8\": rpc error: code = NotFound desc = could not find container \"1befefa6b6e8296897a3344720df03990a77535fb62f03d8c81f6bb7128514c8\": container with ID starting with 1befefa6b6e8296897a3344720df03990a77535fb62f03d8c81f6bb7128514c8 not found: ID does not exist" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.066684 4971 scope.go:117] "RemoveContainer" containerID="76007cd90a94aeb935d6550bef4cd9b83fc010d173931c23c98113659387f73b" Nov 27 08:55:01 crc kubenswrapper[4971]: E1127 08:55:01.066899 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76007cd90a94aeb935d6550bef4cd9b83fc010d173931c23c98113659387f73b\": container with ID starting with 76007cd90a94aeb935d6550bef4cd9b83fc010d173931c23c98113659387f73b not found: ID does not exist" containerID="76007cd90a94aeb935d6550bef4cd9b83fc010d173931c23c98113659387f73b" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.066915 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76007cd90a94aeb935d6550bef4cd9b83fc010d173931c23c98113659387f73b"} err="failed to get container status \"76007cd90a94aeb935d6550bef4cd9b83fc010d173931c23c98113659387f73b\": rpc error: code = NotFound desc = could not find container \"76007cd90a94aeb935d6550bef4cd9b83fc010d173931c23c98113659387f73b\": container with ID starting with 76007cd90a94aeb935d6550bef4cd9b83fc010d173931c23c98113659387f73b not found: ID does not exist" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.066956 4971 scope.go:117] "RemoveContainer" containerID="1befefa6b6e8296897a3344720df03990a77535fb62f03d8c81f6bb7128514c8" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.067377 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1befefa6b6e8296897a3344720df03990a77535fb62f03d8c81f6bb7128514c8"} err="failed to get container status \"1befefa6b6e8296897a3344720df03990a77535fb62f03d8c81f6bb7128514c8\": rpc error: code = NotFound desc = could not find container \"1befefa6b6e8296897a3344720df03990a77535fb62f03d8c81f6bb7128514c8\": container with ID starting with 1befefa6b6e8296897a3344720df03990a77535fb62f03d8c81f6bb7128514c8 not found: ID does not exist" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.067393 4971 scope.go:117] "RemoveContainer" containerID="76007cd90a94aeb935d6550bef4cd9b83fc010d173931c23c98113659387f73b" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.068143 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76007cd90a94aeb935d6550bef4cd9b83fc010d173931c23c98113659387f73b"} err="failed to get container status \"76007cd90a94aeb935d6550bef4cd9b83fc010d173931c23c98113659387f73b\": rpc error: code = NotFound desc = could not find container \"76007cd90a94aeb935d6550bef4cd9b83fc010d173931c23c98113659387f73b\": container with ID starting with 76007cd90a94aeb935d6550bef4cd9b83fc010d173931c23c98113659387f73b not found: ID does not exist" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.071590 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.087734 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.105975 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.109708 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.111686 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.124389 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.139959 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.209583 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b800971-c06c-43e8-9628-5b85099abe8c-logs\") pod \"nova-metadata-0\" (UID: \"6b800971-c06c-43e8-9628-5b85099abe8c\") " pod="openstack/nova-metadata-0" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.209649 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b800971-c06c-43e8-9628-5b85099abe8c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6b800971-c06c-43e8-9628-5b85099abe8c\") " pod="openstack/nova-metadata-0" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.209676 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7n4n\" (UniqueName: \"kubernetes.io/projected/6b800971-c06c-43e8-9628-5b85099abe8c-kube-api-access-k7n4n\") pod \"nova-metadata-0\" (UID: \"6b800971-c06c-43e8-9628-5b85099abe8c\") " pod="openstack/nova-metadata-0" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.209754 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b800971-c06c-43e8-9628-5b85099abe8c-config-data\") pod \"nova-metadata-0\" (UID: \"6b800971-c06c-43e8-9628-5b85099abe8c\") " pod="openstack/nova-metadata-0" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.312104 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf5f9d27-578a-4577-810c-c28895116963-config-data\") pod \"nova-api-0\" (UID: \"cf5f9d27-578a-4577-810c-c28895116963\") " pod="openstack/nova-api-0" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.312186 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b800971-c06c-43e8-9628-5b85099abe8c-config-data\") pod \"nova-metadata-0\" (UID: \"6b800971-c06c-43e8-9628-5b85099abe8c\") " pod="openstack/nova-metadata-0" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.312301 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf5f9d27-578a-4577-810c-c28895116963-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"cf5f9d27-578a-4577-810c-c28895116963\") " pod="openstack/nova-api-0" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.312375 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b800971-c06c-43e8-9628-5b85099abe8c-logs\") pod \"nova-metadata-0\" (UID: \"6b800971-c06c-43e8-9628-5b85099abe8c\") " pod="openstack/nova-metadata-0" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.312410 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf5f9d27-578a-4577-810c-c28895116963-logs\") pod \"nova-api-0\" (UID: \"cf5f9d27-578a-4577-810c-c28895116963\") " pod="openstack/nova-api-0" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.312479 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ddlfx\" (UniqueName: \"kubernetes.io/projected/cf5f9d27-578a-4577-810c-c28895116963-kube-api-access-ddlfx\") pod \"nova-api-0\" (UID: \"cf5f9d27-578a-4577-810c-c28895116963\") " pod="openstack/nova-api-0" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.312515 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b800971-c06c-43e8-9628-5b85099abe8c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6b800971-c06c-43e8-9628-5b85099abe8c\") " pod="openstack/nova-metadata-0" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.312581 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7n4n\" (UniqueName: \"kubernetes.io/projected/6b800971-c06c-43e8-9628-5b85099abe8c-kube-api-access-k7n4n\") pod \"nova-metadata-0\" (UID: \"6b800971-c06c-43e8-9628-5b85099abe8c\") " pod="openstack/nova-metadata-0" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.313799 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b800971-c06c-43e8-9628-5b85099abe8c-logs\") pod \"nova-metadata-0\" (UID: \"6b800971-c06c-43e8-9628-5b85099abe8c\") " pod="openstack/nova-metadata-0" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.318719 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b800971-c06c-43e8-9628-5b85099abe8c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6b800971-c06c-43e8-9628-5b85099abe8c\") " pod="openstack/nova-metadata-0" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.319252 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b800971-c06c-43e8-9628-5b85099abe8c-config-data\") pod \"nova-metadata-0\" (UID: \"6b800971-c06c-43e8-9628-5b85099abe8c\") " pod="openstack/nova-metadata-0" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.332388 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7n4n\" (UniqueName: \"kubernetes.io/projected/6b800971-c06c-43e8-9628-5b85099abe8c-kube-api-access-k7n4n\") pod \"nova-metadata-0\" (UID: \"6b800971-c06c-43e8-9628-5b85099abe8c\") " pod="openstack/nova-metadata-0" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.414306 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf5f9d27-578a-4577-810c-c28895116963-config-data\") pod \"nova-api-0\" (UID: \"cf5f9d27-578a-4577-810c-c28895116963\") " pod="openstack/nova-api-0" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.414388 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf5f9d27-578a-4577-810c-c28895116963-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"cf5f9d27-578a-4577-810c-c28895116963\") " pod="openstack/nova-api-0" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.414436 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf5f9d27-578a-4577-810c-c28895116963-logs\") pod \"nova-api-0\" (UID: \"cf5f9d27-578a-4577-810c-c28895116963\") " pod="openstack/nova-api-0" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.414460 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ddlfx\" (UniqueName: \"kubernetes.io/projected/cf5f9d27-578a-4577-810c-c28895116963-kube-api-access-ddlfx\") pod \"nova-api-0\" (UID: \"cf5f9d27-578a-4577-810c-c28895116963\") " pod="openstack/nova-api-0" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.416942 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf5f9d27-578a-4577-810c-c28895116963-logs\") pod \"nova-api-0\" (UID: \"cf5f9d27-578a-4577-810c-c28895116963\") " pod="openstack/nova-api-0" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.417180 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.419315 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf5f9d27-578a-4577-810c-c28895116963-config-data\") pod \"nova-api-0\" (UID: \"cf5f9d27-578a-4577-810c-c28895116963\") " pod="openstack/nova-api-0" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.419336 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf5f9d27-578a-4577-810c-c28895116963-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"cf5f9d27-578a-4577-810c-c28895116963\") " pod="openstack/nova-api-0" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.435307 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ddlfx\" (UniqueName: \"kubernetes.io/projected/cf5f9d27-578a-4577-810c-c28895116963-kube-api-access-ddlfx\") pod \"nova-api-0\" (UID: \"cf5f9d27-578a-4577-810c-c28895116963\") " pod="openstack/nova-api-0" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.537185 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.544229 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-559c7c46ff-gddgr" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.719797 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/caf8c7f8-182f-471e-b6ba-cc257b879d1f-dns-svc\") pod \"caf8c7f8-182f-471e-b6ba-cc257b879d1f\" (UID: \"caf8c7f8-182f-471e-b6ba-cc257b879d1f\") " Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.719911 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f4j4w\" (UniqueName: \"kubernetes.io/projected/caf8c7f8-182f-471e-b6ba-cc257b879d1f-kube-api-access-f4j4w\") pod \"caf8c7f8-182f-471e-b6ba-cc257b879d1f\" (UID: \"caf8c7f8-182f-471e-b6ba-cc257b879d1f\") " Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.720056 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/caf8c7f8-182f-471e-b6ba-cc257b879d1f-ovsdbserver-sb\") pod \"caf8c7f8-182f-471e-b6ba-cc257b879d1f\" (UID: \"caf8c7f8-182f-471e-b6ba-cc257b879d1f\") " Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.720086 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/caf8c7f8-182f-471e-b6ba-cc257b879d1f-ovsdbserver-nb\") pod \"caf8c7f8-182f-471e-b6ba-cc257b879d1f\" (UID: \"caf8c7f8-182f-471e-b6ba-cc257b879d1f\") " Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.720147 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/caf8c7f8-182f-471e-b6ba-cc257b879d1f-config\") pod \"caf8c7f8-182f-471e-b6ba-cc257b879d1f\" (UID: \"caf8c7f8-182f-471e-b6ba-cc257b879d1f\") " Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.742070 4971 generic.go:334] "Generic (PLEG): container finished" podID="caf8c7f8-182f-471e-b6ba-cc257b879d1f" containerID="90f2fbf20f00cacb129f9ffb34bd45dececf911add9a0de6ce800f667230729f" exitCode=0 Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.742112 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/caf8c7f8-182f-471e-b6ba-cc257b879d1f-kube-api-access-f4j4w" (OuterVolumeSpecName: "kube-api-access-f4j4w") pod "caf8c7f8-182f-471e-b6ba-cc257b879d1f" (UID: "caf8c7f8-182f-471e-b6ba-cc257b879d1f"). InnerVolumeSpecName "kube-api-access-f4j4w". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.742231 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-559c7c46ff-gddgr" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.742218 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-559c7c46ff-gddgr" event={"ID":"caf8c7f8-182f-471e-b6ba-cc257b879d1f","Type":"ContainerDied","Data":"90f2fbf20f00cacb129f9ffb34bd45dececf911add9a0de6ce800f667230729f"} Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.743807 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-559c7c46ff-gddgr" event={"ID":"caf8c7f8-182f-471e-b6ba-cc257b879d1f","Type":"ContainerDied","Data":"0f2731dfdb324d36a63e5cf1458a32f978fb25ddb30e5d252a5e5388d5901fe2"} Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.743851 4971 scope.go:117] "RemoveContainer" containerID="90f2fbf20f00cacb129f9ffb34bd45dececf911add9a0de6ce800f667230729f" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.766127 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.793660 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/caf8c7f8-182f-471e-b6ba-cc257b879d1f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "caf8c7f8-182f-471e-b6ba-cc257b879d1f" (UID: "caf8c7f8-182f-471e-b6ba-cc257b879d1f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.796103 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/caf8c7f8-182f-471e-b6ba-cc257b879d1f-config" (OuterVolumeSpecName: "config") pod "caf8c7f8-182f-471e-b6ba-cc257b879d1f" (UID: "caf8c7f8-182f-471e-b6ba-cc257b879d1f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.814343 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/caf8c7f8-182f-471e-b6ba-cc257b879d1f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "caf8c7f8-182f-471e-b6ba-cc257b879d1f" (UID: "caf8c7f8-182f-471e-b6ba-cc257b879d1f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.823970 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/caf8c7f8-182f-471e-b6ba-cc257b879d1f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.824023 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/caf8c7f8-182f-471e-b6ba-cc257b879d1f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.824041 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/caf8c7f8-182f-471e-b6ba-cc257b879d1f-config\") on node \"crc\" DevicePath \"\"" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.824056 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f4j4w\" (UniqueName: \"kubernetes.io/projected/caf8c7f8-182f-471e-b6ba-cc257b879d1f-kube-api-access-f4j4w\") on node \"crc\" DevicePath \"\"" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.832759 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/caf8c7f8-182f-471e-b6ba-cc257b879d1f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "caf8c7f8-182f-471e-b6ba-cc257b879d1f" (UID: "caf8c7f8-182f-471e-b6ba-cc257b879d1f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.891181 4971 scope.go:117] "RemoveContainer" containerID="523049562fb23f9b50de806f0a7a72fa89d4551ab816205eaf5ab0f94f4b2793" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.919833 4971 scope.go:117] "RemoveContainer" containerID="90f2fbf20f00cacb129f9ffb34bd45dececf911add9a0de6ce800f667230729f" Nov 27 08:55:01 crc kubenswrapper[4971]: E1127 08:55:01.922070 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90f2fbf20f00cacb129f9ffb34bd45dececf911add9a0de6ce800f667230729f\": container with ID starting with 90f2fbf20f00cacb129f9ffb34bd45dececf911add9a0de6ce800f667230729f not found: ID does not exist" containerID="90f2fbf20f00cacb129f9ffb34bd45dececf911add9a0de6ce800f667230729f" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.922113 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90f2fbf20f00cacb129f9ffb34bd45dececf911add9a0de6ce800f667230729f"} err="failed to get container status \"90f2fbf20f00cacb129f9ffb34bd45dececf911add9a0de6ce800f667230729f\": rpc error: code = NotFound desc = could not find container \"90f2fbf20f00cacb129f9ffb34bd45dececf911add9a0de6ce800f667230729f\": container with ID starting with 90f2fbf20f00cacb129f9ffb34bd45dececf911add9a0de6ce800f667230729f not found: ID does not exist" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.922140 4971 scope.go:117] "RemoveContainer" containerID="523049562fb23f9b50de806f0a7a72fa89d4551ab816205eaf5ab0f94f4b2793" Nov 27 08:55:01 crc kubenswrapper[4971]: E1127 08:55:01.922877 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"523049562fb23f9b50de806f0a7a72fa89d4551ab816205eaf5ab0f94f4b2793\": container with ID starting with 523049562fb23f9b50de806f0a7a72fa89d4551ab816205eaf5ab0f94f4b2793 not found: ID does not exist" containerID="523049562fb23f9b50de806f0a7a72fa89d4551ab816205eaf5ab0f94f4b2793" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.922935 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"523049562fb23f9b50de806f0a7a72fa89d4551ab816205eaf5ab0f94f4b2793"} err="failed to get container status \"523049562fb23f9b50de806f0a7a72fa89d4551ab816205eaf5ab0f94f4b2793\": rpc error: code = NotFound desc = could not find container \"523049562fb23f9b50de806f0a7a72fa89d4551ab816205eaf5ab0f94f4b2793\": container with ID starting with 523049562fb23f9b50de806f0a7a72fa89d4551ab816205eaf5ab0f94f4b2793 not found: ID does not exist" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.926609 4971 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/caf8c7f8-182f-471e-b6ba-cc257b879d1f-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 27 08:55:01 crc kubenswrapper[4971]: I1127 08:55:01.929284 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 08:55:01 crc kubenswrapper[4971]: W1127 08:55:01.932847 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6b800971_c06c_43e8_9628_5b85099abe8c.slice/crio-1cf6c38099f1e9b0930db18f2d3c4cae3114de1d775aecc38646ccb2602c69b0 WatchSource:0}: Error finding container 1cf6c38099f1e9b0930db18f2d3c4cae3114de1d775aecc38646ccb2602c69b0: Status 404 returned error can't find the container with id 1cf6c38099f1e9b0930db18f2d3c4cae3114de1d775aecc38646ccb2602c69b0 Nov 27 08:55:02 crc kubenswrapper[4971]: I1127 08:55:02.045513 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 27 08:55:02 crc kubenswrapper[4971]: I1127 08:55:02.100449 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-559c7c46ff-gddgr"] Nov 27 08:55:02 crc kubenswrapper[4971]: I1127 08:55:02.109891 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-559c7c46ff-gddgr"] Nov 27 08:55:02 crc kubenswrapper[4971]: I1127 08:55:02.588627 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74663fef-43f4-478d-9702-eb41e85cca23" path="/var/lib/kubelet/pods/74663fef-43f4-478d-9702-eb41e85cca23/volumes" Nov 27 08:55:02 crc kubenswrapper[4971]: I1127 08:55:02.589871 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba692abd-4b39-486b-9586-0619e79aa774" path="/var/lib/kubelet/pods/ba692abd-4b39-486b-9586-0619e79aa774/volumes" Nov 27 08:55:02 crc kubenswrapper[4971]: I1127 08:55:02.590709 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="caf8c7f8-182f-471e-b6ba-cc257b879d1f" path="/var/lib/kubelet/pods/caf8c7f8-182f-471e-b6ba-cc257b879d1f/volumes" Nov 27 08:55:02 crc kubenswrapper[4971]: I1127 08:55:02.670631 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 27 08:55:02 crc kubenswrapper[4971]: I1127 08:55:02.743839 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6hmzh\" (UniqueName: \"kubernetes.io/projected/3c50701f-bf73-43e1-9306-969eb07fbf5a-kube-api-access-6hmzh\") pod \"3c50701f-bf73-43e1-9306-969eb07fbf5a\" (UID: \"3c50701f-bf73-43e1-9306-969eb07fbf5a\") " Nov 27 08:55:02 crc kubenswrapper[4971]: I1127 08:55:02.743939 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c50701f-bf73-43e1-9306-969eb07fbf5a-config-data\") pod \"3c50701f-bf73-43e1-9306-969eb07fbf5a\" (UID: \"3c50701f-bf73-43e1-9306-969eb07fbf5a\") " Nov 27 08:55:02 crc kubenswrapper[4971]: I1127 08:55:02.744156 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c50701f-bf73-43e1-9306-969eb07fbf5a-combined-ca-bundle\") pod \"3c50701f-bf73-43e1-9306-969eb07fbf5a\" (UID: \"3c50701f-bf73-43e1-9306-969eb07fbf5a\") " Nov 27 08:55:02 crc kubenswrapper[4971]: I1127 08:55:02.749129 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c50701f-bf73-43e1-9306-969eb07fbf5a-kube-api-access-6hmzh" (OuterVolumeSpecName: "kube-api-access-6hmzh") pod "3c50701f-bf73-43e1-9306-969eb07fbf5a" (UID: "3c50701f-bf73-43e1-9306-969eb07fbf5a"). InnerVolumeSpecName "kube-api-access-6hmzh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:55:02 crc kubenswrapper[4971]: I1127 08:55:02.770792 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c50701f-bf73-43e1-9306-969eb07fbf5a-config-data" (OuterVolumeSpecName: "config-data") pod "3c50701f-bf73-43e1-9306-969eb07fbf5a" (UID: "3c50701f-bf73-43e1-9306-969eb07fbf5a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:55:02 crc kubenswrapper[4971]: I1127 08:55:02.776488 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cf5f9d27-578a-4577-810c-c28895116963","Type":"ContainerStarted","Data":"f53dc42abf2ddf077a44f008477e95940ec9f25f843f78932fa8e4492a4ee603"} Nov 27 08:55:02 crc kubenswrapper[4971]: I1127 08:55:02.776550 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cf5f9d27-578a-4577-810c-c28895116963","Type":"ContainerStarted","Data":"37dc79150eb0089fb08e5e24cb927167b6adb0077b8f4f158d6f464b1a6a5897"} Nov 27 08:55:02 crc kubenswrapper[4971]: I1127 08:55:02.776565 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cf5f9d27-578a-4577-810c-c28895116963","Type":"ContainerStarted","Data":"82ef1b3f694d056c634e717a908fc7fdf999004b6a59683657e460767590dd0e"} Nov 27 08:55:02 crc kubenswrapper[4971]: I1127 08:55:02.781963 4971 generic.go:334] "Generic (PLEG): container finished" podID="3c50701f-bf73-43e1-9306-969eb07fbf5a" containerID="f321cddd2c0bbdbecee90320d7f1cee0e7cfa2aa9da8ed335ced59adc2cde06e" exitCode=0 Nov 27 08:55:02 crc kubenswrapper[4971]: I1127 08:55:02.782029 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3c50701f-bf73-43e1-9306-969eb07fbf5a","Type":"ContainerDied","Data":"f321cddd2c0bbdbecee90320d7f1cee0e7cfa2aa9da8ed335ced59adc2cde06e"} Nov 27 08:55:02 crc kubenswrapper[4971]: I1127 08:55:02.782034 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 27 08:55:02 crc kubenswrapper[4971]: I1127 08:55:02.782056 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3c50701f-bf73-43e1-9306-969eb07fbf5a","Type":"ContainerDied","Data":"142695ccd69070fccca0a2e642c08f8c533c1aef5f9d2211e1b1172f120bd1b8"} Nov 27 08:55:02 crc kubenswrapper[4971]: I1127 08:55:02.782074 4971 scope.go:117] "RemoveContainer" containerID="f321cddd2c0bbdbecee90320d7f1cee0e7cfa2aa9da8ed335ced59adc2cde06e" Nov 27 08:55:02 crc kubenswrapper[4971]: I1127 08:55:02.788176 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c50701f-bf73-43e1-9306-969eb07fbf5a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3c50701f-bf73-43e1-9306-969eb07fbf5a" (UID: "3c50701f-bf73-43e1-9306-969eb07fbf5a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:55:02 crc kubenswrapper[4971]: I1127 08:55:02.789339 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6b800971-c06c-43e8-9628-5b85099abe8c","Type":"ContainerStarted","Data":"a63f0a28be5f96fa01c7dcbafb9308c736ec7d0ba22158e2d9b2744d93d745f3"} Nov 27 08:55:02 crc kubenswrapper[4971]: I1127 08:55:02.789377 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6b800971-c06c-43e8-9628-5b85099abe8c","Type":"ContainerStarted","Data":"efcf395ed596eded74842cfc57e84d8bb9c125297a7b3fbe255b246b5561fa89"} Nov 27 08:55:02 crc kubenswrapper[4971]: I1127 08:55:02.789388 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6b800971-c06c-43e8-9628-5b85099abe8c","Type":"ContainerStarted","Data":"1cf6c38099f1e9b0930db18f2d3c4cae3114de1d775aecc38646ccb2602c69b0"} Nov 27 08:55:02 crc kubenswrapper[4971]: I1127 08:55:02.802195 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.802175121 podStartE2EDuration="2.802175121s" podCreationTimestamp="2025-11-27 08:55:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:55:02.798337961 +0000 UTC m=+7340.990381889" watchObservedRunningTime="2025-11-27 08:55:02.802175121 +0000 UTC m=+7340.994219039" Nov 27 08:55:02 crc kubenswrapper[4971]: I1127 08:55:02.822328 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.822302277 podStartE2EDuration="2.822302277s" podCreationTimestamp="2025-11-27 08:55:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:55:02.813123724 +0000 UTC m=+7341.005167652" watchObservedRunningTime="2025-11-27 08:55:02.822302277 +0000 UTC m=+7341.014346185" Nov 27 08:55:02 crc kubenswrapper[4971]: I1127 08:55:02.846875 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c50701f-bf73-43e1-9306-969eb07fbf5a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 08:55:02 crc kubenswrapper[4971]: I1127 08:55:02.846917 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6hmzh\" (UniqueName: \"kubernetes.io/projected/3c50701f-bf73-43e1-9306-969eb07fbf5a-kube-api-access-6hmzh\") on node \"crc\" DevicePath \"\"" Nov 27 08:55:02 crc kubenswrapper[4971]: I1127 08:55:02.846934 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c50701f-bf73-43e1-9306-969eb07fbf5a-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 08:55:02 crc kubenswrapper[4971]: I1127 08:55:02.879335 4971 scope.go:117] "RemoveContainer" containerID="f321cddd2c0bbdbecee90320d7f1cee0e7cfa2aa9da8ed335ced59adc2cde06e" Nov 27 08:55:02 crc kubenswrapper[4971]: E1127 08:55:02.879763 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f321cddd2c0bbdbecee90320d7f1cee0e7cfa2aa9da8ed335ced59adc2cde06e\": container with ID starting with f321cddd2c0bbdbecee90320d7f1cee0e7cfa2aa9da8ed335ced59adc2cde06e not found: ID does not exist" containerID="f321cddd2c0bbdbecee90320d7f1cee0e7cfa2aa9da8ed335ced59adc2cde06e" Nov 27 08:55:02 crc kubenswrapper[4971]: I1127 08:55:02.879807 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f321cddd2c0bbdbecee90320d7f1cee0e7cfa2aa9da8ed335ced59adc2cde06e"} err="failed to get container status \"f321cddd2c0bbdbecee90320d7f1cee0e7cfa2aa9da8ed335ced59adc2cde06e\": rpc error: code = NotFound desc = could not find container \"f321cddd2c0bbdbecee90320d7f1cee0e7cfa2aa9da8ed335ced59adc2cde06e\": container with ID starting with f321cddd2c0bbdbecee90320d7f1cee0e7cfa2aa9da8ed335ced59adc2cde06e not found: ID does not exist" Nov 27 08:55:03 crc kubenswrapper[4971]: I1127 08:55:03.127952 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 08:55:03 crc kubenswrapper[4971]: I1127 08:55:03.164402 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 08:55:03 crc kubenswrapper[4971]: I1127 08:55:03.179153 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 08:55:03 crc kubenswrapper[4971]: E1127 08:55:03.180168 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="caf8c7f8-182f-471e-b6ba-cc257b879d1f" containerName="init" Nov 27 08:55:03 crc kubenswrapper[4971]: I1127 08:55:03.180189 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="caf8c7f8-182f-471e-b6ba-cc257b879d1f" containerName="init" Nov 27 08:55:03 crc kubenswrapper[4971]: E1127 08:55:03.180214 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="caf8c7f8-182f-471e-b6ba-cc257b879d1f" containerName="dnsmasq-dns" Nov 27 08:55:03 crc kubenswrapper[4971]: I1127 08:55:03.180220 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="caf8c7f8-182f-471e-b6ba-cc257b879d1f" containerName="dnsmasq-dns" Nov 27 08:55:03 crc kubenswrapper[4971]: E1127 08:55:03.180255 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c50701f-bf73-43e1-9306-969eb07fbf5a" containerName="nova-scheduler-scheduler" Nov 27 08:55:03 crc kubenswrapper[4971]: I1127 08:55:03.180263 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c50701f-bf73-43e1-9306-969eb07fbf5a" containerName="nova-scheduler-scheduler" Nov 27 08:55:03 crc kubenswrapper[4971]: I1127 08:55:03.180647 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="caf8c7f8-182f-471e-b6ba-cc257b879d1f" containerName="dnsmasq-dns" Nov 27 08:55:03 crc kubenswrapper[4971]: I1127 08:55:03.180674 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c50701f-bf73-43e1-9306-969eb07fbf5a" containerName="nova-scheduler-scheduler" Nov 27 08:55:03 crc kubenswrapper[4971]: I1127 08:55:03.181630 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 27 08:55:03 crc kubenswrapper[4971]: I1127 08:55:03.185133 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Nov 27 08:55:03 crc kubenswrapper[4971]: I1127 08:55:03.204927 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 08:55:03 crc kubenswrapper[4971]: I1127 08:55:03.262813 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55zq8\" (UniqueName: \"kubernetes.io/projected/e2bb452e-0870-4054-9b13-b01e8ad1cca6-kube-api-access-55zq8\") pod \"nova-scheduler-0\" (UID: \"e2bb452e-0870-4054-9b13-b01e8ad1cca6\") " pod="openstack/nova-scheduler-0" Nov 27 08:55:03 crc kubenswrapper[4971]: I1127 08:55:03.263290 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2bb452e-0870-4054-9b13-b01e8ad1cca6-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e2bb452e-0870-4054-9b13-b01e8ad1cca6\") " pod="openstack/nova-scheduler-0" Nov 27 08:55:03 crc kubenswrapper[4971]: I1127 08:55:03.263435 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2bb452e-0870-4054-9b13-b01e8ad1cca6-config-data\") pod \"nova-scheduler-0\" (UID: \"e2bb452e-0870-4054-9b13-b01e8ad1cca6\") " pod="openstack/nova-scheduler-0" Nov 27 08:55:03 crc kubenswrapper[4971]: I1127 08:55:03.365696 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2bb452e-0870-4054-9b13-b01e8ad1cca6-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e2bb452e-0870-4054-9b13-b01e8ad1cca6\") " pod="openstack/nova-scheduler-0" Nov 27 08:55:03 crc kubenswrapper[4971]: I1127 08:55:03.365757 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2bb452e-0870-4054-9b13-b01e8ad1cca6-config-data\") pod \"nova-scheduler-0\" (UID: \"e2bb452e-0870-4054-9b13-b01e8ad1cca6\") " pod="openstack/nova-scheduler-0" Nov 27 08:55:03 crc kubenswrapper[4971]: I1127 08:55:03.365799 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55zq8\" (UniqueName: \"kubernetes.io/projected/e2bb452e-0870-4054-9b13-b01e8ad1cca6-kube-api-access-55zq8\") pod \"nova-scheduler-0\" (UID: \"e2bb452e-0870-4054-9b13-b01e8ad1cca6\") " pod="openstack/nova-scheduler-0" Nov 27 08:55:03 crc kubenswrapper[4971]: I1127 08:55:03.370273 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2bb452e-0870-4054-9b13-b01e8ad1cca6-config-data\") pod \"nova-scheduler-0\" (UID: \"e2bb452e-0870-4054-9b13-b01e8ad1cca6\") " pod="openstack/nova-scheduler-0" Nov 27 08:55:03 crc kubenswrapper[4971]: I1127 08:55:03.372458 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2bb452e-0870-4054-9b13-b01e8ad1cca6-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e2bb452e-0870-4054-9b13-b01e8ad1cca6\") " pod="openstack/nova-scheduler-0" Nov 27 08:55:03 crc kubenswrapper[4971]: I1127 08:55:03.383453 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55zq8\" (UniqueName: \"kubernetes.io/projected/e2bb452e-0870-4054-9b13-b01e8ad1cca6-kube-api-access-55zq8\") pod \"nova-scheduler-0\" (UID: \"e2bb452e-0870-4054-9b13-b01e8ad1cca6\") " pod="openstack/nova-scheduler-0" Nov 27 08:55:03 crc kubenswrapper[4971]: I1127 08:55:03.510392 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 27 08:55:03 crc kubenswrapper[4971]: I1127 08:55:03.991198 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 08:55:03 crc kubenswrapper[4971]: W1127 08:55:03.996724 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode2bb452e_0870_4054_9b13_b01e8ad1cca6.slice/crio-0ea4df11e19c8cceef8b146df4959c350680021e010b16e0f4a441024ec1523d WatchSource:0}: Error finding container 0ea4df11e19c8cceef8b146df4959c350680021e010b16e0f4a441024ec1523d: Status 404 returned error can't find the container with id 0ea4df11e19c8cceef8b146df4959c350680021e010b16e0f4a441024ec1523d Nov 27 08:55:04 crc kubenswrapper[4971]: I1127 08:55:04.561963 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c50701f-bf73-43e1-9306-969eb07fbf5a" path="/var/lib/kubelet/pods/3c50701f-bf73-43e1-9306-969eb07fbf5a/volumes" Nov 27 08:55:04 crc kubenswrapper[4971]: I1127 08:55:04.810664 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e2bb452e-0870-4054-9b13-b01e8ad1cca6","Type":"ContainerStarted","Data":"ab4142ac42de530cd48d2557569e52f400fe9335280bb740f6a04efe948fd929"} Nov 27 08:55:04 crc kubenswrapper[4971]: I1127 08:55:04.810720 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e2bb452e-0870-4054-9b13-b01e8ad1cca6","Type":"ContainerStarted","Data":"0ea4df11e19c8cceef8b146df4959c350680021e010b16e0f4a441024ec1523d"} Nov 27 08:55:04 crc kubenswrapper[4971]: I1127 08:55:04.828349 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.828308788 podStartE2EDuration="1.828308788s" podCreationTimestamp="2025-11-27 08:55:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:55:04.82594776 +0000 UTC m=+7343.017991678" watchObservedRunningTime="2025-11-27 08:55:04.828308788 +0000 UTC m=+7343.020352706" Nov 27 08:55:05 crc kubenswrapper[4971]: I1127 08:55:05.551461 4971 scope.go:117] "RemoveContainer" containerID="503fed002d2daf941b71890d178a428a5c3cffb1b48a2221858dc92e28178dc4" Nov 27 08:55:05 crc kubenswrapper[4971]: E1127 08:55:05.552140 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:55:06 crc kubenswrapper[4971]: I1127 08:55:06.418113 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 27 08:55:06 crc kubenswrapper[4971]: I1127 08:55:06.418246 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 27 08:55:08 crc kubenswrapper[4971]: I1127 08:55:08.511511 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Nov 27 08:55:08 crc kubenswrapper[4971]: I1127 08:55:08.761286 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Nov 27 08:55:09 crc kubenswrapper[4971]: I1127 08:55:09.331658 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-zfsxx"] Nov 27 08:55:09 crc kubenswrapper[4971]: I1127 08:55:09.333926 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-zfsxx" Nov 27 08:55:09 crc kubenswrapper[4971]: I1127 08:55:09.336652 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Nov 27 08:55:09 crc kubenswrapper[4971]: I1127 08:55:09.336983 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Nov 27 08:55:09 crc kubenswrapper[4971]: I1127 08:55:09.343150 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-zfsxx"] Nov 27 08:55:09 crc kubenswrapper[4971]: I1127 08:55:09.405384 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bea64ae-37fc-465f-9313-83d1f02236f1-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-zfsxx\" (UID: \"5bea64ae-37fc-465f-9313-83d1f02236f1\") " pod="openstack/nova-cell1-cell-mapping-zfsxx" Nov 27 08:55:09 crc kubenswrapper[4971]: I1127 08:55:09.405623 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5bea64ae-37fc-465f-9313-83d1f02236f1-config-data\") pod \"nova-cell1-cell-mapping-zfsxx\" (UID: \"5bea64ae-37fc-465f-9313-83d1f02236f1\") " pod="openstack/nova-cell1-cell-mapping-zfsxx" Nov 27 08:55:09 crc kubenswrapper[4971]: I1127 08:55:09.405730 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dq2sb\" (UniqueName: \"kubernetes.io/projected/5bea64ae-37fc-465f-9313-83d1f02236f1-kube-api-access-dq2sb\") pod \"nova-cell1-cell-mapping-zfsxx\" (UID: \"5bea64ae-37fc-465f-9313-83d1f02236f1\") " pod="openstack/nova-cell1-cell-mapping-zfsxx" Nov 27 08:55:09 crc kubenswrapper[4971]: I1127 08:55:09.405811 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5bea64ae-37fc-465f-9313-83d1f02236f1-scripts\") pod \"nova-cell1-cell-mapping-zfsxx\" (UID: \"5bea64ae-37fc-465f-9313-83d1f02236f1\") " pod="openstack/nova-cell1-cell-mapping-zfsxx" Nov 27 08:55:09 crc kubenswrapper[4971]: I1127 08:55:09.508050 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5bea64ae-37fc-465f-9313-83d1f02236f1-config-data\") pod \"nova-cell1-cell-mapping-zfsxx\" (UID: \"5bea64ae-37fc-465f-9313-83d1f02236f1\") " pod="openstack/nova-cell1-cell-mapping-zfsxx" Nov 27 08:55:09 crc kubenswrapper[4971]: I1127 08:55:09.508114 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dq2sb\" (UniqueName: \"kubernetes.io/projected/5bea64ae-37fc-465f-9313-83d1f02236f1-kube-api-access-dq2sb\") pod \"nova-cell1-cell-mapping-zfsxx\" (UID: \"5bea64ae-37fc-465f-9313-83d1f02236f1\") " pod="openstack/nova-cell1-cell-mapping-zfsxx" Nov 27 08:55:09 crc kubenswrapper[4971]: I1127 08:55:09.508152 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5bea64ae-37fc-465f-9313-83d1f02236f1-scripts\") pod \"nova-cell1-cell-mapping-zfsxx\" (UID: \"5bea64ae-37fc-465f-9313-83d1f02236f1\") " pod="openstack/nova-cell1-cell-mapping-zfsxx" Nov 27 08:55:09 crc kubenswrapper[4971]: I1127 08:55:09.508206 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bea64ae-37fc-465f-9313-83d1f02236f1-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-zfsxx\" (UID: \"5bea64ae-37fc-465f-9313-83d1f02236f1\") " pod="openstack/nova-cell1-cell-mapping-zfsxx" Nov 27 08:55:09 crc kubenswrapper[4971]: I1127 08:55:09.517227 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bea64ae-37fc-465f-9313-83d1f02236f1-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-zfsxx\" (UID: \"5bea64ae-37fc-465f-9313-83d1f02236f1\") " pod="openstack/nova-cell1-cell-mapping-zfsxx" Nov 27 08:55:09 crc kubenswrapper[4971]: I1127 08:55:09.517786 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5bea64ae-37fc-465f-9313-83d1f02236f1-config-data\") pod \"nova-cell1-cell-mapping-zfsxx\" (UID: \"5bea64ae-37fc-465f-9313-83d1f02236f1\") " pod="openstack/nova-cell1-cell-mapping-zfsxx" Nov 27 08:55:09 crc kubenswrapper[4971]: I1127 08:55:09.531054 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5bea64ae-37fc-465f-9313-83d1f02236f1-scripts\") pod \"nova-cell1-cell-mapping-zfsxx\" (UID: \"5bea64ae-37fc-465f-9313-83d1f02236f1\") " pod="openstack/nova-cell1-cell-mapping-zfsxx" Nov 27 08:55:09 crc kubenswrapper[4971]: I1127 08:55:09.535891 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dq2sb\" (UniqueName: \"kubernetes.io/projected/5bea64ae-37fc-465f-9313-83d1f02236f1-kube-api-access-dq2sb\") pod \"nova-cell1-cell-mapping-zfsxx\" (UID: \"5bea64ae-37fc-465f-9313-83d1f02236f1\") " pod="openstack/nova-cell1-cell-mapping-zfsxx" Nov 27 08:55:09 crc kubenswrapper[4971]: I1127 08:55:09.657131 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-zfsxx" Nov 27 08:55:10 crc kubenswrapper[4971]: I1127 08:55:10.131584 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-zfsxx"] Nov 27 08:55:10 crc kubenswrapper[4971]: W1127 08:55:10.134078 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5bea64ae_37fc_465f_9313_83d1f02236f1.slice/crio-a88eb2abd5fdaf2f61f782ad914da506f7cb1a9aba13333ba56a4dca98d7fe64 WatchSource:0}: Error finding container a88eb2abd5fdaf2f61f782ad914da506f7cb1a9aba13333ba56a4dca98d7fe64: Status 404 returned error can't find the container with id a88eb2abd5fdaf2f61f782ad914da506f7cb1a9aba13333ba56a4dca98d7fe64 Nov 27 08:55:10 crc kubenswrapper[4971]: I1127 08:55:10.904695 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-zfsxx" event={"ID":"5bea64ae-37fc-465f-9313-83d1f02236f1","Type":"ContainerStarted","Data":"dfb1afa4f1ab9e1635b8900422da78d85a3b7c2515e7be13b7f15cb1a1a60cd8"} Nov 27 08:55:10 crc kubenswrapper[4971]: I1127 08:55:10.905189 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-zfsxx" event={"ID":"5bea64ae-37fc-465f-9313-83d1f02236f1","Type":"ContainerStarted","Data":"a88eb2abd5fdaf2f61f782ad914da506f7cb1a9aba13333ba56a4dca98d7fe64"} Nov 27 08:55:10 crc kubenswrapper[4971]: I1127 08:55:10.959130 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-zfsxx" podStartSLOduration=1.9590897790000001 podStartE2EDuration="1.959089779s" podCreationTimestamp="2025-11-27 08:55:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:55:10.923272433 +0000 UTC m=+7349.115316361" watchObservedRunningTime="2025-11-27 08:55:10.959089779 +0000 UTC m=+7349.151133697" Nov 27 08:55:11 crc kubenswrapper[4971]: I1127 08:55:11.418719 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 27 08:55:11 crc kubenswrapper[4971]: I1127 08:55:11.419190 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 27 08:55:11 crc kubenswrapper[4971]: I1127 08:55:11.538601 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 27 08:55:11 crc kubenswrapper[4971]: I1127 08:55:11.538697 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 27 08:55:12 crc kubenswrapper[4971]: I1127 08:55:12.504841 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="6b800971-c06c-43e8-9628-5b85099abe8c" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.80:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 27 08:55:12 crc kubenswrapper[4971]: I1127 08:55:12.504872 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="6b800971-c06c-43e8-9628-5b85099abe8c" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.80:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 27 08:55:12 crc kubenswrapper[4971]: I1127 08:55:12.619743 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="cf5f9d27-578a-4577-810c-c28895116963" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.81:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 27 08:55:12 crc kubenswrapper[4971]: I1127 08:55:12.619794 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="cf5f9d27-578a-4577-810c-c28895116963" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.81:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 27 08:55:13 crc kubenswrapper[4971]: I1127 08:55:13.511568 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Nov 27 08:55:13 crc kubenswrapper[4971]: I1127 08:55:13.555656 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Nov 27 08:55:13 crc kubenswrapper[4971]: I1127 08:55:13.984335 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Nov 27 08:55:15 crc kubenswrapper[4971]: I1127 08:55:15.962885 4971 generic.go:334] "Generic (PLEG): container finished" podID="5bea64ae-37fc-465f-9313-83d1f02236f1" containerID="dfb1afa4f1ab9e1635b8900422da78d85a3b7c2515e7be13b7f15cb1a1a60cd8" exitCode=0 Nov 27 08:55:15 crc kubenswrapper[4971]: I1127 08:55:15.962942 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-zfsxx" event={"ID":"5bea64ae-37fc-465f-9313-83d1f02236f1","Type":"ContainerDied","Data":"dfb1afa4f1ab9e1635b8900422da78d85a3b7c2515e7be13b7f15cb1a1a60cd8"} Nov 27 08:55:17 crc kubenswrapper[4971]: I1127 08:55:17.424019 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-zfsxx" Nov 27 08:55:17 crc kubenswrapper[4971]: I1127 08:55:17.590683 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5bea64ae-37fc-465f-9313-83d1f02236f1-scripts\") pod \"5bea64ae-37fc-465f-9313-83d1f02236f1\" (UID: \"5bea64ae-37fc-465f-9313-83d1f02236f1\") " Nov 27 08:55:17 crc kubenswrapper[4971]: I1127 08:55:17.590894 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bea64ae-37fc-465f-9313-83d1f02236f1-combined-ca-bundle\") pod \"5bea64ae-37fc-465f-9313-83d1f02236f1\" (UID: \"5bea64ae-37fc-465f-9313-83d1f02236f1\") " Nov 27 08:55:17 crc kubenswrapper[4971]: I1127 08:55:17.591114 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dq2sb\" (UniqueName: \"kubernetes.io/projected/5bea64ae-37fc-465f-9313-83d1f02236f1-kube-api-access-dq2sb\") pod \"5bea64ae-37fc-465f-9313-83d1f02236f1\" (UID: \"5bea64ae-37fc-465f-9313-83d1f02236f1\") " Nov 27 08:55:17 crc kubenswrapper[4971]: I1127 08:55:17.591148 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5bea64ae-37fc-465f-9313-83d1f02236f1-config-data\") pod \"5bea64ae-37fc-465f-9313-83d1f02236f1\" (UID: \"5bea64ae-37fc-465f-9313-83d1f02236f1\") " Nov 27 08:55:17 crc kubenswrapper[4971]: I1127 08:55:17.598088 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5bea64ae-37fc-465f-9313-83d1f02236f1-kube-api-access-dq2sb" (OuterVolumeSpecName: "kube-api-access-dq2sb") pod "5bea64ae-37fc-465f-9313-83d1f02236f1" (UID: "5bea64ae-37fc-465f-9313-83d1f02236f1"). InnerVolumeSpecName "kube-api-access-dq2sb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:55:17 crc kubenswrapper[4971]: I1127 08:55:17.598402 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5bea64ae-37fc-465f-9313-83d1f02236f1-scripts" (OuterVolumeSpecName: "scripts") pod "5bea64ae-37fc-465f-9313-83d1f02236f1" (UID: "5bea64ae-37fc-465f-9313-83d1f02236f1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:55:17 crc kubenswrapper[4971]: I1127 08:55:17.623150 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5bea64ae-37fc-465f-9313-83d1f02236f1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5bea64ae-37fc-465f-9313-83d1f02236f1" (UID: "5bea64ae-37fc-465f-9313-83d1f02236f1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:55:17 crc kubenswrapper[4971]: I1127 08:55:17.633286 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5bea64ae-37fc-465f-9313-83d1f02236f1-config-data" (OuterVolumeSpecName: "config-data") pod "5bea64ae-37fc-465f-9313-83d1f02236f1" (UID: "5bea64ae-37fc-465f-9313-83d1f02236f1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:55:17 crc kubenswrapper[4971]: I1127 08:55:17.693868 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dq2sb\" (UniqueName: \"kubernetes.io/projected/5bea64ae-37fc-465f-9313-83d1f02236f1-kube-api-access-dq2sb\") on node \"crc\" DevicePath \"\"" Nov 27 08:55:17 crc kubenswrapper[4971]: I1127 08:55:17.694069 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5bea64ae-37fc-465f-9313-83d1f02236f1-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 08:55:17 crc kubenswrapper[4971]: I1127 08:55:17.694160 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5bea64ae-37fc-465f-9313-83d1f02236f1-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 08:55:17 crc kubenswrapper[4971]: I1127 08:55:17.694218 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bea64ae-37fc-465f-9313-83d1f02236f1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 08:55:17 crc kubenswrapper[4971]: I1127 08:55:17.986573 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-zfsxx" event={"ID":"5bea64ae-37fc-465f-9313-83d1f02236f1","Type":"ContainerDied","Data":"a88eb2abd5fdaf2f61f782ad914da506f7cb1a9aba13333ba56a4dca98d7fe64"} Nov 27 08:55:17 crc kubenswrapper[4971]: I1127 08:55:17.986631 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a88eb2abd5fdaf2f61f782ad914da506f7cb1a9aba13333ba56a4dca98d7fe64" Nov 27 08:55:17 crc kubenswrapper[4971]: I1127 08:55:17.986714 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-zfsxx" Nov 27 08:55:18 crc kubenswrapper[4971]: I1127 08:55:18.170205 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 27 08:55:18 crc kubenswrapper[4971]: I1127 08:55:18.170462 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="cf5f9d27-578a-4577-810c-c28895116963" containerName="nova-api-log" containerID="cri-o://37dc79150eb0089fb08e5e24cb927167b6adb0077b8f4f158d6f464b1a6a5897" gracePeriod=30 Nov 27 08:55:18 crc kubenswrapper[4971]: I1127 08:55:18.170621 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="cf5f9d27-578a-4577-810c-c28895116963" containerName="nova-api-api" containerID="cri-o://f53dc42abf2ddf077a44f008477e95940ec9f25f843f78932fa8e4492a4ee603" gracePeriod=30 Nov 27 08:55:18 crc kubenswrapper[4971]: I1127 08:55:18.191680 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 08:55:18 crc kubenswrapper[4971]: I1127 08:55:18.191931 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="e2bb452e-0870-4054-9b13-b01e8ad1cca6" containerName="nova-scheduler-scheduler" containerID="cri-o://ab4142ac42de530cd48d2557569e52f400fe9335280bb740f6a04efe948fd929" gracePeriod=30 Nov 27 08:55:18 crc kubenswrapper[4971]: I1127 08:55:18.211088 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 08:55:18 crc kubenswrapper[4971]: I1127 08:55:18.211385 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="6b800971-c06c-43e8-9628-5b85099abe8c" containerName="nova-metadata-log" containerID="cri-o://efcf395ed596eded74842cfc57e84d8bb9c125297a7b3fbe255b246b5561fa89" gracePeriod=30 Nov 27 08:55:18 crc kubenswrapper[4971]: I1127 08:55:18.211497 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="6b800971-c06c-43e8-9628-5b85099abe8c" containerName="nova-metadata-metadata" containerID="cri-o://a63f0a28be5f96fa01c7dcbafb9308c736ec7d0ba22158e2d9b2744d93d745f3" gracePeriod=30 Nov 27 08:55:18 crc kubenswrapper[4971]: E1127 08:55:18.514155 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ab4142ac42de530cd48d2557569e52f400fe9335280bb740f6a04efe948fd929" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 27 08:55:18 crc kubenswrapper[4971]: E1127 08:55:18.516104 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ab4142ac42de530cd48d2557569e52f400fe9335280bb740f6a04efe948fd929" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 27 08:55:18 crc kubenswrapper[4971]: E1127 08:55:18.517722 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ab4142ac42de530cd48d2557569e52f400fe9335280bb740f6a04efe948fd929" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 27 08:55:18 crc kubenswrapper[4971]: E1127 08:55:18.517849 4971 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="e2bb452e-0870-4054-9b13-b01e8ad1cca6" containerName="nova-scheduler-scheduler" Nov 27 08:55:19 crc kubenswrapper[4971]: I1127 08:55:19.001434 4971 generic.go:334] "Generic (PLEG): container finished" podID="6b800971-c06c-43e8-9628-5b85099abe8c" containerID="efcf395ed596eded74842cfc57e84d8bb9c125297a7b3fbe255b246b5561fa89" exitCode=143 Nov 27 08:55:19 crc kubenswrapper[4971]: I1127 08:55:19.001542 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6b800971-c06c-43e8-9628-5b85099abe8c","Type":"ContainerDied","Data":"efcf395ed596eded74842cfc57e84d8bb9c125297a7b3fbe255b246b5561fa89"} Nov 27 08:55:19 crc kubenswrapper[4971]: I1127 08:55:19.004168 4971 generic.go:334] "Generic (PLEG): container finished" podID="cf5f9d27-578a-4577-810c-c28895116963" containerID="37dc79150eb0089fb08e5e24cb927167b6adb0077b8f4f158d6f464b1a6a5897" exitCode=143 Nov 27 08:55:19 crc kubenswrapper[4971]: I1127 08:55:19.004266 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cf5f9d27-578a-4577-810c-c28895116963","Type":"ContainerDied","Data":"37dc79150eb0089fb08e5e24cb927167b6adb0077b8f4f158d6f464b1a6a5897"} Nov 27 08:55:19 crc kubenswrapper[4971]: I1127 08:55:19.551000 4971 scope.go:117] "RemoveContainer" containerID="503fed002d2daf941b71890d178a428a5c3cffb1b48a2221858dc92e28178dc4" Nov 27 08:55:19 crc kubenswrapper[4971]: E1127 08:55:19.563179 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 08:55:20 crc kubenswrapper[4971]: E1127 08:55:20.624254 4971 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode2bb452e_0870_4054_9b13_b01e8ad1cca6.slice/crio-ab4142ac42de530cd48d2557569e52f400fe9335280bb740f6a04efe948fd929.scope\": RecentStats: unable to find data in memory cache]" Nov 27 08:55:21 crc kubenswrapper[4971]: I1127 08:55:21.046062 4971 generic.go:334] "Generic (PLEG): container finished" podID="e2bb452e-0870-4054-9b13-b01e8ad1cca6" containerID="ab4142ac42de530cd48d2557569e52f400fe9335280bb740f6a04efe948fd929" exitCode=0 Nov 27 08:55:21 crc kubenswrapper[4971]: I1127 08:55:21.046125 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e2bb452e-0870-4054-9b13-b01e8ad1cca6","Type":"ContainerDied","Data":"ab4142ac42de530cd48d2557569e52f400fe9335280bb740f6a04efe948fd929"} Nov 27 08:55:21 crc kubenswrapper[4971]: I1127 08:55:21.046155 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e2bb452e-0870-4054-9b13-b01e8ad1cca6","Type":"ContainerDied","Data":"0ea4df11e19c8cceef8b146df4959c350680021e010b16e0f4a441024ec1523d"} Nov 27 08:55:21 crc kubenswrapper[4971]: I1127 08:55:21.046167 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0ea4df11e19c8cceef8b146df4959c350680021e010b16e0f4a441024ec1523d" Nov 27 08:55:21 crc kubenswrapper[4971]: I1127 08:55:21.088869 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 27 08:55:21 crc kubenswrapper[4971]: I1127 08:55:21.188299 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-55zq8\" (UniqueName: \"kubernetes.io/projected/e2bb452e-0870-4054-9b13-b01e8ad1cca6-kube-api-access-55zq8\") pod \"e2bb452e-0870-4054-9b13-b01e8ad1cca6\" (UID: \"e2bb452e-0870-4054-9b13-b01e8ad1cca6\") " Nov 27 08:55:21 crc kubenswrapper[4971]: I1127 08:55:21.188478 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2bb452e-0870-4054-9b13-b01e8ad1cca6-config-data\") pod \"e2bb452e-0870-4054-9b13-b01e8ad1cca6\" (UID: \"e2bb452e-0870-4054-9b13-b01e8ad1cca6\") " Nov 27 08:55:21 crc kubenswrapper[4971]: I1127 08:55:21.188580 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2bb452e-0870-4054-9b13-b01e8ad1cca6-combined-ca-bundle\") pod \"e2bb452e-0870-4054-9b13-b01e8ad1cca6\" (UID: \"e2bb452e-0870-4054-9b13-b01e8ad1cca6\") " Nov 27 08:55:21 crc kubenswrapper[4971]: I1127 08:55:21.196236 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2bb452e-0870-4054-9b13-b01e8ad1cca6-kube-api-access-55zq8" (OuterVolumeSpecName: "kube-api-access-55zq8") pod "e2bb452e-0870-4054-9b13-b01e8ad1cca6" (UID: "e2bb452e-0870-4054-9b13-b01e8ad1cca6"). InnerVolumeSpecName "kube-api-access-55zq8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:55:21 crc kubenswrapper[4971]: I1127 08:55:21.223695 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2bb452e-0870-4054-9b13-b01e8ad1cca6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e2bb452e-0870-4054-9b13-b01e8ad1cca6" (UID: "e2bb452e-0870-4054-9b13-b01e8ad1cca6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:55:21 crc kubenswrapper[4971]: I1127 08:55:21.244113 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2bb452e-0870-4054-9b13-b01e8ad1cca6-config-data" (OuterVolumeSpecName: "config-data") pod "e2bb452e-0870-4054-9b13-b01e8ad1cca6" (UID: "e2bb452e-0870-4054-9b13-b01e8ad1cca6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:55:21 crc kubenswrapper[4971]: I1127 08:55:21.291822 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-55zq8\" (UniqueName: \"kubernetes.io/projected/e2bb452e-0870-4054-9b13-b01e8ad1cca6-kube-api-access-55zq8\") on node \"crc\" DevicePath \"\"" Nov 27 08:55:21 crc kubenswrapper[4971]: I1127 08:55:21.291868 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2bb452e-0870-4054-9b13-b01e8ad1cca6-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 08:55:21 crc kubenswrapper[4971]: I1127 08:55:21.291882 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2bb452e-0870-4054-9b13-b01e8ad1cca6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 08:55:21 crc kubenswrapper[4971]: I1127 08:55:21.964266 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 27 08:55:21 crc kubenswrapper[4971]: I1127 08:55:21.972948 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.056627 4971 generic.go:334] "Generic (PLEG): container finished" podID="6b800971-c06c-43e8-9628-5b85099abe8c" containerID="a63f0a28be5f96fa01c7dcbafb9308c736ec7d0ba22158e2d9b2744d93d745f3" exitCode=0 Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.056709 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6b800971-c06c-43e8-9628-5b85099abe8c","Type":"ContainerDied","Data":"a63f0a28be5f96fa01c7dcbafb9308c736ec7d0ba22158e2d9b2744d93d745f3"} Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.056747 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6b800971-c06c-43e8-9628-5b85099abe8c","Type":"ContainerDied","Data":"1cf6c38099f1e9b0930db18f2d3c4cae3114de1d775aecc38646ccb2602c69b0"} Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.056768 4971 scope.go:117] "RemoveContainer" containerID="a63f0a28be5f96fa01c7dcbafb9308c736ec7d0ba22158e2d9b2744d93d745f3" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.056909 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.060100 4971 generic.go:334] "Generic (PLEG): container finished" podID="cf5f9d27-578a-4577-810c-c28895116963" containerID="f53dc42abf2ddf077a44f008477e95940ec9f25f843f78932fa8e4492a4ee603" exitCode=0 Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.060165 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.063059 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.063413 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cf5f9d27-578a-4577-810c-c28895116963","Type":"ContainerDied","Data":"f53dc42abf2ddf077a44f008477e95940ec9f25f843f78932fa8e4492a4ee603"} Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.063524 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cf5f9d27-578a-4577-810c-c28895116963","Type":"ContainerDied","Data":"82ef1b3f694d056c634e717a908fc7fdf999004b6a59683657e460767590dd0e"} Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.092597 4971 scope.go:117] "RemoveContainer" containerID="efcf395ed596eded74842cfc57e84d8bb9c125297a7b3fbe255b246b5561fa89" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.105259 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.106073 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf5f9d27-578a-4577-810c-c28895116963-combined-ca-bundle\") pod \"cf5f9d27-578a-4577-810c-c28895116963\" (UID: \"cf5f9d27-578a-4577-810c-c28895116963\") " Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.106150 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ddlfx\" (UniqueName: \"kubernetes.io/projected/cf5f9d27-578a-4577-810c-c28895116963-kube-api-access-ddlfx\") pod \"cf5f9d27-578a-4577-810c-c28895116963\" (UID: \"cf5f9d27-578a-4577-810c-c28895116963\") " Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.106194 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf5f9d27-578a-4577-810c-c28895116963-logs\") pod \"cf5f9d27-578a-4577-810c-c28895116963\" (UID: \"cf5f9d27-578a-4577-810c-c28895116963\") " Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.106334 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf5f9d27-578a-4577-810c-c28895116963-config-data\") pod \"cf5f9d27-578a-4577-810c-c28895116963\" (UID: \"cf5f9d27-578a-4577-810c-c28895116963\") " Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.106421 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b800971-c06c-43e8-9628-5b85099abe8c-config-data\") pod \"6b800971-c06c-43e8-9628-5b85099abe8c\" (UID: \"6b800971-c06c-43e8-9628-5b85099abe8c\") " Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.106452 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b800971-c06c-43e8-9628-5b85099abe8c-combined-ca-bundle\") pod \"6b800971-c06c-43e8-9628-5b85099abe8c\" (UID: \"6b800971-c06c-43e8-9628-5b85099abe8c\") " Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.106484 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b800971-c06c-43e8-9628-5b85099abe8c-logs\") pod \"6b800971-c06c-43e8-9628-5b85099abe8c\" (UID: \"6b800971-c06c-43e8-9628-5b85099abe8c\") " Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.106547 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k7n4n\" (UniqueName: \"kubernetes.io/projected/6b800971-c06c-43e8-9628-5b85099abe8c-kube-api-access-k7n4n\") pod \"6b800971-c06c-43e8-9628-5b85099abe8c\" (UID: \"6b800971-c06c-43e8-9628-5b85099abe8c\") " Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.107399 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf5f9d27-578a-4577-810c-c28895116963-logs" (OuterVolumeSpecName: "logs") pod "cf5f9d27-578a-4577-810c-c28895116963" (UID: "cf5f9d27-578a-4577-810c-c28895116963"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.107598 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b800971-c06c-43e8-9628-5b85099abe8c-logs" (OuterVolumeSpecName: "logs") pod "6b800971-c06c-43e8-9628-5b85099abe8c" (UID: "6b800971-c06c-43e8-9628-5b85099abe8c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.121589 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf5f9d27-578a-4577-810c-c28895116963-kube-api-access-ddlfx" (OuterVolumeSpecName: "kube-api-access-ddlfx") pod "cf5f9d27-578a-4577-810c-c28895116963" (UID: "cf5f9d27-578a-4577-810c-c28895116963"). InnerVolumeSpecName "kube-api-access-ddlfx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.129068 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b800971-c06c-43e8-9628-5b85099abe8c-kube-api-access-k7n4n" (OuterVolumeSpecName: "kube-api-access-k7n4n") pod "6b800971-c06c-43e8-9628-5b85099abe8c" (UID: "6b800971-c06c-43e8-9628-5b85099abe8c"). InnerVolumeSpecName "kube-api-access-k7n4n". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.134090 4971 scope.go:117] "RemoveContainer" containerID="a63f0a28be5f96fa01c7dcbafb9308c736ec7d0ba22158e2d9b2744d93d745f3" Nov 27 08:55:22 crc kubenswrapper[4971]: E1127 08:55:22.142493 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a63f0a28be5f96fa01c7dcbafb9308c736ec7d0ba22158e2d9b2744d93d745f3\": container with ID starting with a63f0a28be5f96fa01c7dcbafb9308c736ec7d0ba22158e2d9b2744d93d745f3 not found: ID does not exist" containerID="a63f0a28be5f96fa01c7dcbafb9308c736ec7d0ba22158e2d9b2744d93d745f3" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.142576 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a63f0a28be5f96fa01c7dcbafb9308c736ec7d0ba22158e2d9b2744d93d745f3"} err="failed to get container status \"a63f0a28be5f96fa01c7dcbafb9308c736ec7d0ba22158e2d9b2744d93d745f3\": rpc error: code = NotFound desc = could not find container \"a63f0a28be5f96fa01c7dcbafb9308c736ec7d0ba22158e2d9b2744d93d745f3\": container with ID starting with a63f0a28be5f96fa01c7dcbafb9308c736ec7d0ba22158e2d9b2744d93d745f3 not found: ID does not exist" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.143006 4971 scope.go:117] "RemoveContainer" containerID="efcf395ed596eded74842cfc57e84d8bb9c125297a7b3fbe255b246b5561fa89" Nov 27 08:55:22 crc kubenswrapper[4971]: E1127 08:55:22.143586 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"efcf395ed596eded74842cfc57e84d8bb9c125297a7b3fbe255b246b5561fa89\": container with ID starting with efcf395ed596eded74842cfc57e84d8bb9c125297a7b3fbe255b246b5561fa89 not found: ID does not exist" containerID="efcf395ed596eded74842cfc57e84d8bb9c125297a7b3fbe255b246b5561fa89" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.143615 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"efcf395ed596eded74842cfc57e84d8bb9c125297a7b3fbe255b246b5561fa89"} err="failed to get container status \"efcf395ed596eded74842cfc57e84d8bb9c125297a7b3fbe255b246b5561fa89\": rpc error: code = NotFound desc = could not find container \"efcf395ed596eded74842cfc57e84d8bb9c125297a7b3fbe255b246b5561fa89\": container with ID starting with efcf395ed596eded74842cfc57e84d8bb9c125297a7b3fbe255b246b5561fa89 not found: ID does not exist" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.143633 4971 scope.go:117] "RemoveContainer" containerID="f53dc42abf2ddf077a44f008477e95940ec9f25f843f78932fa8e4492a4ee603" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.152805 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b800971-c06c-43e8-9628-5b85099abe8c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6b800971-c06c-43e8-9628-5b85099abe8c" (UID: "6b800971-c06c-43e8-9628-5b85099abe8c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.157826 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf5f9d27-578a-4577-810c-c28895116963-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cf5f9d27-578a-4577-810c-c28895116963" (UID: "cf5f9d27-578a-4577-810c-c28895116963"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.160933 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b800971-c06c-43e8-9628-5b85099abe8c-config-data" (OuterVolumeSpecName: "config-data") pod "6b800971-c06c-43e8-9628-5b85099abe8c" (UID: "6b800971-c06c-43e8-9628-5b85099abe8c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.161722 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.170689 4971 scope.go:117] "RemoveContainer" containerID="37dc79150eb0089fb08e5e24cb927167b6adb0077b8f4f158d6f464b1a6a5897" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.177304 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 08:55:22 crc kubenswrapper[4971]: E1127 08:55:22.178019 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2bb452e-0870-4054-9b13-b01e8ad1cca6" containerName="nova-scheduler-scheduler" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.178047 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2bb452e-0870-4054-9b13-b01e8ad1cca6" containerName="nova-scheduler-scheduler" Nov 27 08:55:22 crc kubenswrapper[4971]: E1127 08:55:22.178096 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf5f9d27-578a-4577-810c-c28895116963" containerName="nova-api-api" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.178106 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf5f9d27-578a-4577-810c-c28895116963" containerName="nova-api-api" Nov 27 08:55:22 crc kubenswrapper[4971]: E1127 08:55:22.178128 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b800971-c06c-43e8-9628-5b85099abe8c" containerName="nova-metadata-metadata" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.178136 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b800971-c06c-43e8-9628-5b85099abe8c" containerName="nova-metadata-metadata" Nov 27 08:55:22 crc kubenswrapper[4971]: E1127 08:55:22.178175 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf5f9d27-578a-4577-810c-c28895116963" containerName="nova-api-log" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.178186 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf5f9d27-578a-4577-810c-c28895116963" containerName="nova-api-log" Nov 27 08:55:22 crc kubenswrapper[4971]: E1127 08:55:22.178200 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b800971-c06c-43e8-9628-5b85099abe8c" containerName="nova-metadata-log" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.178210 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b800971-c06c-43e8-9628-5b85099abe8c" containerName="nova-metadata-log" Nov 27 08:55:22 crc kubenswrapper[4971]: E1127 08:55:22.178252 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5bea64ae-37fc-465f-9313-83d1f02236f1" containerName="nova-manage" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.178262 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="5bea64ae-37fc-465f-9313-83d1f02236f1" containerName="nova-manage" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.178601 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf5f9d27-578a-4577-810c-c28895116963" containerName="nova-api-log" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.178618 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="5bea64ae-37fc-465f-9313-83d1f02236f1" containerName="nova-manage" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.178640 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b800971-c06c-43e8-9628-5b85099abe8c" containerName="nova-metadata-log" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.178656 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b800971-c06c-43e8-9628-5b85099abe8c" containerName="nova-metadata-metadata" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.178678 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2bb452e-0870-4054-9b13-b01e8ad1cca6" containerName="nova-scheduler-scheduler" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.178699 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf5f9d27-578a-4577-810c-c28895116963" containerName="nova-api-api" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.179385 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf5f9d27-578a-4577-810c-c28895116963-config-data" (OuterVolumeSpecName: "config-data") pod "cf5f9d27-578a-4577-810c-c28895116963" (UID: "cf5f9d27-578a-4577-810c-c28895116963"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.179591 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.183446 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.193951 4971 scope.go:117] "RemoveContainer" containerID="f53dc42abf2ddf077a44f008477e95940ec9f25f843f78932fa8e4492a4ee603" Nov 27 08:55:22 crc kubenswrapper[4971]: E1127 08:55:22.194389 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f53dc42abf2ddf077a44f008477e95940ec9f25f843f78932fa8e4492a4ee603\": container with ID starting with f53dc42abf2ddf077a44f008477e95940ec9f25f843f78932fa8e4492a4ee603 not found: ID does not exist" containerID="f53dc42abf2ddf077a44f008477e95940ec9f25f843f78932fa8e4492a4ee603" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.194429 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f53dc42abf2ddf077a44f008477e95940ec9f25f843f78932fa8e4492a4ee603"} err="failed to get container status \"f53dc42abf2ddf077a44f008477e95940ec9f25f843f78932fa8e4492a4ee603\": rpc error: code = NotFound desc = could not find container \"f53dc42abf2ddf077a44f008477e95940ec9f25f843f78932fa8e4492a4ee603\": container with ID starting with f53dc42abf2ddf077a44f008477e95940ec9f25f843f78932fa8e4492a4ee603 not found: ID does not exist" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.194461 4971 scope.go:117] "RemoveContainer" containerID="37dc79150eb0089fb08e5e24cb927167b6adb0077b8f4f158d6f464b1a6a5897" Nov 27 08:55:22 crc kubenswrapper[4971]: E1127 08:55:22.195088 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37dc79150eb0089fb08e5e24cb927167b6adb0077b8f4f158d6f464b1a6a5897\": container with ID starting with 37dc79150eb0089fb08e5e24cb927167b6adb0077b8f4f158d6f464b1a6a5897 not found: ID does not exist" containerID="37dc79150eb0089fb08e5e24cb927167b6adb0077b8f4f158d6f464b1a6a5897" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.195158 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37dc79150eb0089fb08e5e24cb927167b6adb0077b8f4f158d6f464b1a6a5897"} err="failed to get container status \"37dc79150eb0089fb08e5e24cb927167b6adb0077b8f4f158d6f464b1a6a5897\": rpc error: code = NotFound desc = could not find container \"37dc79150eb0089fb08e5e24cb927167b6adb0077b8f4f158d6f464b1a6a5897\": container with ID starting with 37dc79150eb0089fb08e5e24cb927167b6adb0077b8f4f158d6f464b1a6a5897 not found: ID does not exist" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.200418 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.208918 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf5f9d27-578a-4577-810c-c28895116963-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.209128 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b800971-c06c-43e8-9628-5b85099abe8c-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.209247 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b800971-c06c-43e8-9628-5b85099abe8c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.209309 4971 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b800971-c06c-43e8-9628-5b85099abe8c-logs\") on node \"crc\" DevicePath \"\"" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.209776 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k7n4n\" (UniqueName: \"kubernetes.io/projected/6b800971-c06c-43e8-9628-5b85099abe8c-kube-api-access-k7n4n\") on node \"crc\" DevicePath \"\"" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.209866 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf5f9d27-578a-4577-810c-c28895116963-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.209922 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ddlfx\" (UniqueName: \"kubernetes.io/projected/cf5f9d27-578a-4577-810c-c28895116963-kube-api-access-ddlfx\") on node \"crc\" DevicePath \"\"" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.209988 4971 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf5f9d27-578a-4577-810c-c28895116963-logs\") on node \"crc\" DevicePath \"\"" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.311388 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mwvn4\" (UniqueName: \"kubernetes.io/projected/69d43348-4b62-4dc4-a920-2f702816dd3c-kube-api-access-mwvn4\") pod \"nova-scheduler-0\" (UID: \"69d43348-4b62-4dc4-a920-2f702816dd3c\") " pod="openstack/nova-scheduler-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.311473 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69d43348-4b62-4dc4-a920-2f702816dd3c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"69d43348-4b62-4dc4-a920-2f702816dd3c\") " pod="openstack/nova-scheduler-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.311601 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69d43348-4b62-4dc4-a920-2f702816dd3c-config-data\") pod \"nova-scheduler-0\" (UID: \"69d43348-4b62-4dc4-a920-2f702816dd3c\") " pod="openstack/nova-scheduler-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.413151 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69d43348-4b62-4dc4-a920-2f702816dd3c-config-data\") pod \"nova-scheduler-0\" (UID: \"69d43348-4b62-4dc4-a920-2f702816dd3c\") " pod="openstack/nova-scheduler-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.413877 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mwvn4\" (UniqueName: \"kubernetes.io/projected/69d43348-4b62-4dc4-a920-2f702816dd3c-kube-api-access-mwvn4\") pod \"nova-scheduler-0\" (UID: \"69d43348-4b62-4dc4-a920-2f702816dd3c\") " pod="openstack/nova-scheduler-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.414083 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69d43348-4b62-4dc4-a920-2f702816dd3c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"69d43348-4b62-4dc4-a920-2f702816dd3c\") " pod="openstack/nova-scheduler-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.419498 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69d43348-4b62-4dc4-a920-2f702816dd3c-config-data\") pod \"nova-scheduler-0\" (UID: \"69d43348-4b62-4dc4-a920-2f702816dd3c\") " pod="openstack/nova-scheduler-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.423976 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69d43348-4b62-4dc4-a920-2f702816dd3c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"69d43348-4b62-4dc4-a920-2f702816dd3c\") " pod="openstack/nova-scheduler-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.437268 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.452278 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mwvn4\" (UniqueName: \"kubernetes.io/projected/69d43348-4b62-4dc4-a920-2f702816dd3c-kube-api-access-mwvn4\") pod \"nova-scheduler-0\" (UID: \"69d43348-4b62-4dc4-a920-2f702816dd3c\") " pod="openstack/nova-scheduler-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.474350 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.491156 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.509346 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.514115 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.524369 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.528464 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.532166 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.535919 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.538648 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.542001 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.546965 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.571296 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b800971-c06c-43e8-9628-5b85099abe8c" path="/var/lib/kubelet/pods/6b800971-c06c-43e8-9628-5b85099abe8c/volumes" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.572046 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf5f9d27-578a-4577-810c-c28895116963" path="/var/lib/kubelet/pods/cf5f9d27-578a-4577-810c-c28895116963/volumes" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.572653 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2bb452e-0870-4054-9b13-b01e8ad1cca6" path="/var/lib/kubelet/pods/e2bb452e-0870-4054-9b13-b01e8ad1cca6/volumes" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.573944 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.627415 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a96dcaa9-3e6d-44ea-a535-3864fa826d2c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a96dcaa9-3e6d-44ea-a535-3864fa826d2c\") " pod="openstack/nova-api-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.627918 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8dbbn\" (UniqueName: \"kubernetes.io/projected/e8288baf-24dc-4fa9-9666-98ac1be3312b-kube-api-access-8dbbn\") pod \"nova-metadata-0\" (UID: \"e8288baf-24dc-4fa9-9666-98ac1be3312b\") " pod="openstack/nova-metadata-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.627987 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a96dcaa9-3e6d-44ea-a535-3864fa826d2c-config-data\") pod \"nova-api-0\" (UID: \"a96dcaa9-3e6d-44ea-a535-3864fa826d2c\") " pod="openstack/nova-api-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.628025 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8288baf-24dc-4fa9-9666-98ac1be3312b-config-data\") pod \"nova-metadata-0\" (UID: \"e8288baf-24dc-4fa9-9666-98ac1be3312b\") " pod="openstack/nova-metadata-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.628081 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8288baf-24dc-4fa9-9666-98ac1be3312b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e8288baf-24dc-4fa9-9666-98ac1be3312b\") " pod="openstack/nova-metadata-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.628108 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e8288baf-24dc-4fa9-9666-98ac1be3312b-logs\") pod \"nova-metadata-0\" (UID: \"e8288baf-24dc-4fa9-9666-98ac1be3312b\") " pod="openstack/nova-metadata-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.628145 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a96dcaa9-3e6d-44ea-a535-3864fa826d2c-logs\") pod \"nova-api-0\" (UID: \"a96dcaa9-3e6d-44ea-a535-3864fa826d2c\") " pod="openstack/nova-api-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.628180 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwsfp\" (UniqueName: \"kubernetes.io/projected/a96dcaa9-3e6d-44ea-a535-3864fa826d2c-kube-api-access-hwsfp\") pod \"nova-api-0\" (UID: \"a96dcaa9-3e6d-44ea-a535-3864fa826d2c\") " pod="openstack/nova-api-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.730208 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8dbbn\" (UniqueName: \"kubernetes.io/projected/e8288baf-24dc-4fa9-9666-98ac1be3312b-kube-api-access-8dbbn\") pod \"nova-metadata-0\" (UID: \"e8288baf-24dc-4fa9-9666-98ac1be3312b\") " pod="openstack/nova-metadata-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.730307 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a96dcaa9-3e6d-44ea-a535-3864fa826d2c-config-data\") pod \"nova-api-0\" (UID: \"a96dcaa9-3e6d-44ea-a535-3864fa826d2c\") " pod="openstack/nova-api-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.730801 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8288baf-24dc-4fa9-9666-98ac1be3312b-config-data\") pod \"nova-metadata-0\" (UID: \"e8288baf-24dc-4fa9-9666-98ac1be3312b\") " pod="openstack/nova-metadata-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.730996 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8288baf-24dc-4fa9-9666-98ac1be3312b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e8288baf-24dc-4fa9-9666-98ac1be3312b\") " pod="openstack/nova-metadata-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.731036 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e8288baf-24dc-4fa9-9666-98ac1be3312b-logs\") pod \"nova-metadata-0\" (UID: \"e8288baf-24dc-4fa9-9666-98ac1be3312b\") " pod="openstack/nova-metadata-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.731223 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a96dcaa9-3e6d-44ea-a535-3864fa826d2c-logs\") pod \"nova-api-0\" (UID: \"a96dcaa9-3e6d-44ea-a535-3864fa826d2c\") " pod="openstack/nova-api-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.731336 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwsfp\" (UniqueName: \"kubernetes.io/projected/a96dcaa9-3e6d-44ea-a535-3864fa826d2c-kube-api-access-hwsfp\") pod \"nova-api-0\" (UID: \"a96dcaa9-3e6d-44ea-a535-3864fa826d2c\") " pod="openstack/nova-api-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.731651 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a96dcaa9-3e6d-44ea-a535-3864fa826d2c-logs\") pod \"nova-api-0\" (UID: \"a96dcaa9-3e6d-44ea-a535-3864fa826d2c\") " pod="openstack/nova-api-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.731773 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a96dcaa9-3e6d-44ea-a535-3864fa826d2c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a96dcaa9-3e6d-44ea-a535-3864fa826d2c\") " pod="openstack/nova-api-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.733170 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e8288baf-24dc-4fa9-9666-98ac1be3312b-logs\") pod \"nova-metadata-0\" (UID: \"e8288baf-24dc-4fa9-9666-98ac1be3312b\") " pod="openstack/nova-metadata-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.747381 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a96dcaa9-3e6d-44ea-a535-3864fa826d2c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a96dcaa9-3e6d-44ea-a535-3864fa826d2c\") " pod="openstack/nova-api-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.747566 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8288baf-24dc-4fa9-9666-98ac1be3312b-config-data\") pod \"nova-metadata-0\" (UID: \"e8288baf-24dc-4fa9-9666-98ac1be3312b\") " pod="openstack/nova-metadata-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.749661 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a96dcaa9-3e6d-44ea-a535-3864fa826d2c-config-data\") pod \"nova-api-0\" (UID: \"a96dcaa9-3e6d-44ea-a535-3864fa826d2c\") " pod="openstack/nova-api-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.749732 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8288baf-24dc-4fa9-9666-98ac1be3312b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e8288baf-24dc-4fa9-9666-98ac1be3312b\") " pod="openstack/nova-metadata-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.752223 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwsfp\" (UniqueName: \"kubernetes.io/projected/a96dcaa9-3e6d-44ea-a535-3864fa826d2c-kube-api-access-hwsfp\") pod \"nova-api-0\" (UID: \"a96dcaa9-3e6d-44ea-a535-3864fa826d2c\") " pod="openstack/nova-api-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.755235 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8dbbn\" (UniqueName: \"kubernetes.io/projected/e8288baf-24dc-4fa9-9666-98ac1be3312b-kube-api-access-8dbbn\") pod \"nova-metadata-0\" (UID: \"e8288baf-24dc-4fa9-9666-98ac1be3312b\") " pod="openstack/nova-metadata-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.862857 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 27 08:55:22 crc kubenswrapper[4971]: I1127 08:55:22.871432 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 27 08:55:23 crc kubenswrapper[4971]: I1127 08:55:23.022428 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 08:55:23 crc kubenswrapper[4971]: W1127 08:55:23.050370 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod69d43348_4b62_4dc4_a920_2f702816dd3c.slice/crio-d7bb9fbf2ea777e17a2002f01640d4f404ade7057fd73f642b4b5c9e69503386 WatchSource:0}: Error finding container d7bb9fbf2ea777e17a2002f01640d4f404ade7057fd73f642b4b5c9e69503386: Status 404 returned error can't find the container with id d7bb9fbf2ea777e17a2002f01640d4f404ade7057fd73f642b4b5c9e69503386 Nov 27 08:55:23 crc kubenswrapper[4971]: I1127 08:55:23.106761 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"69d43348-4b62-4dc4-a920-2f702816dd3c","Type":"ContainerStarted","Data":"d7bb9fbf2ea777e17a2002f01640d4f404ade7057fd73f642b4b5c9e69503386"} Nov 27 08:55:23 crc kubenswrapper[4971]: I1127 08:55:23.417461 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 08:55:23 crc kubenswrapper[4971]: W1127 08:55:23.421751 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode8288baf_24dc_4fa9_9666_98ac1be3312b.slice/crio-06b9fc5963aeae0f2e702aca8b47a131b11340928ffeb54d7d2ede579769bc8f WatchSource:0}: Error finding container 06b9fc5963aeae0f2e702aca8b47a131b11340928ffeb54d7d2ede579769bc8f: Status 404 returned error can't find the container with id 06b9fc5963aeae0f2e702aca8b47a131b11340928ffeb54d7d2ede579769bc8f Nov 27 08:55:23 crc kubenswrapper[4971]: I1127 08:55:23.490068 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 27 08:55:24 crc kubenswrapper[4971]: I1127 08:55:24.118270 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"69d43348-4b62-4dc4-a920-2f702816dd3c","Type":"ContainerStarted","Data":"d1b8cee8569e919cc0a8a040dc0b054f0e8a5012d38019e1d96fe9168cbcc864"} Nov 27 08:55:24 crc kubenswrapper[4971]: I1127 08:55:24.121075 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e8288baf-24dc-4fa9-9666-98ac1be3312b","Type":"ContainerStarted","Data":"288824eb43528bd17e403543131e5673dcc7825b9dcebaeabb1361d48721f47d"} Nov 27 08:55:24 crc kubenswrapper[4971]: I1127 08:55:24.121160 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e8288baf-24dc-4fa9-9666-98ac1be3312b","Type":"ContainerStarted","Data":"0ba46ea69f37b27dcd1932a9f82ac7a1e654e126a0a73c730126480358a3e44a"} Nov 27 08:55:24 crc kubenswrapper[4971]: I1127 08:55:24.121183 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e8288baf-24dc-4fa9-9666-98ac1be3312b","Type":"ContainerStarted","Data":"06b9fc5963aeae0f2e702aca8b47a131b11340928ffeb54d7d2ede579769bc8f"} Nov 27 08:55:24 crc kubenswrapper[4971]: I1127 08:55:24.123779 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a96dcaa9-3e6d-44ea-a535-3864fa826d2c","Type":"ContainerStarted","Data":"0b5ee625010941a0a8680ea975925421402f231a7f759a98be193b60b2b9f054"} Nov 27 08:55:24 crc kubenswrapper[4971]: I1127 08:55:24.123822 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a96dcaa9-3e6d-44ea-a535-3864fa826d2c","Type":"ContainerStarted","Data":"b6d2a31d08d150a164b1452a51573733e32c939ca73b89cb0afe0eba8742b9fc"} Nov 27 08:55:24 crc kubenswrapper[4971]: I1127 08:55:24.123838 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a96dcaa9-3e6d-44ea-a535-3864fa826d2c","Type":"ContainerStarted","Data":"7f51571f556dc49bce216d14aee811f1fe68822e9064e374537f9d87e81dc43b"} Nov 27 08:55:24 crc kubenswrapper[4971]: I1127 08:55:24.144435 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.144409211 podStartE2EDuration="2.144409211s" podCreationTimestamp="2025-11-27 08:55:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:55:24.137365039 +0000 UTC m=+7362.329408957" watchObservedRunningTime="2025-11-27 08:55:24.144409211 +0000 UTC m=+7362.336453149" Nov 27 08:55:24 crc kubenswrapper[4971]: I1127 08:55:24.196335 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.196318967 podStartE2EDuration="2.196318967s" podCreationTimestamp="2025-11-27 08:55:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:55:24.193753874 +0000 UTC m=+7362.385797792" watchObservedRunningTime="2025-11-27 08:55:24.196318967 +0000 UTC m=+7362.388362885" Nov 27 08:55:24 crc kubenswrapper[4971]: I1127 08:55:24.197837 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.197831551 podStartE2EDuration="2.197831551s" podCreationTimestamp="2025-11-27 08:55:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:55:24.168802799 +0000 UTC m=+7362.360846707" watchObservedRunningTime="2025-11-27 08:55:24.197831551 +0000 UTC m=+7362.389875469" Nov 27 08:55:27 crc kubenswrapper[4971]: I1127 08:55:27.511641 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Nov 27 08:55:27 crc kubenswrapper[4971]: I1127 08:55:27.864318 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 27 08:55:27 crc kubenswrapper[4971]: I1127 08:55:27.865580 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 27 08:55:32 crc kubenswrapper[4971]: I1127 08:55:32.511595 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Nov 27 08:55:32 crc kubenswrapper[4971]: I1127 08:55:32.540220 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Nov 27 08:55:32 crc kubenswrapper[4971]: I1127 08:55:32.863752 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 27 08:55:32 crc kubenswrapper[4971]: I1127 08:55:32.863805 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 27 08:55:32 crc kubenswrapper[4971]: I1127 08:55:32.872115 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 27 08:55:32 crc kubenswrapper[4971]: I1127 08:55:32.872193 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 27 08:55:33 crc kubenswrapper[4971]: I1127 08:55:33.240407 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Nov 27 08:55:33 crc kubenswrapper[4971]: I1127 08:55:33.550228 4971 scope.go:117] "RemoveContainer" containerID="503fed002d2daf941b71890d178a428a5c3cffb1b48a2221858dc92e28178dc4" Nov 27 08:55:33 crc kubenswrapper[4971]: I1127 08:55:33.998915 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="e8288baf-24dc-4fa9-9666-98ac1be3312b" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.85:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 27 08:55:34 crc kubenswrapper[4971]: I1127 08:55:34.040907 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="e8288baf-24dc-4fa9-9666-98ac1be3312b" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.85:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 27 08:55:34 crc kubenswrapper[4971]: I1127 08:55:34.041007 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="a96dcaa9-3e6d-44ea-a535-3864fa826d2c" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.86:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 27 08:55:34 crc kubenswrapper[4971]: I1127 08:55:34.040856 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="a96dcaa9-3e6d-44ea-a535-3864fa826d2c" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.86:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 27 08:55:34 crc kubenswrapper[4971]: I1127 08:55:34.217652 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"4de37cdeabf174e17fecf55eb37e927f401aeb831db4b1fc4ee5997316920a31"} Nov 27 08:55:42 crc kubenswrapper[4971]: I1127 08:55:42.868245 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 27 08:55:42 crc kubenswrapper[4971]: I1127 08:55:42.870714 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 27 08:55:42 crc kubenswrapper[4971]: I1127 08:55:42.874482 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 27 08:55:42 crc kubenswrapper[4971]: I1127 08:55:42.876083 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 27 08:55:42 crc kubenswrapper[4971]: I1127 08:55:42.877322 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 27 08:55:42 crc kubenswrapper[4971]: I1127 08:55:42.878085 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 27 08:55:42 crc kubenswrapper[4971]: I1127 08:55:42.878706 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 27 08:55:42 crc kubenswrapper[4971]: I1127 08:55:42.881908 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 27 08:55:43 crc kubenswrapper[4971]: I1127 08:55:43.307751 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 27 08:55:43 crc kubenswrapper[4971]: I1127 08:55:43.312798 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 27 08:55:43 crc kubenswrapper[4971]: I1127 08:55:43.531247 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5dfc79c745-rfx9v"] Nov 27 08:55:43 crc kubenswrapper[4971]: I1127 08:55:43.533148 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5dfc79c745-rfx9v" Nov 27 08:55:43 crc kubenswrapper[4971]: I1127 08:55:43.551406 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5dfc79c745-rfx9v"] Nov 27 08:55:43 crc kubenswrapper[4971]: I1127 08:55:43.654068 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3dd49bba-d34d-4f52-9d67-b946a4c2dca3-dns-svc\") pod \"dnsmasq-dns-5dfc79c745-rfx9v\" (UID: \"3dd49bba-d34d-4f52-9d67-b946a4c2dca3\") " pod="openstack/dnsmasq-dns-5dfc79c745-rfx9v" Nov 27 08:55:43 crc kubenswrapper[4971]: I1127 08:55:43.654142 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nt92v\" (UniqueName: \"kubernetes.io/projected/3dd49bba-d34d-4f52-9d67-b946a4c2dca3-kube-api-access-nt92v\") pod \"dnsmasq-dns-5dfc79c745-rfx9v\" (UID: \"3dd49bba-d34d-4f52-9d67-b946a4c2dca3\") " pod="openstack/dnsmasq-dns-5dfc79c745-rfx9v" Nov 27 08:55:43 crc kubenswrapper[4971]: I1127 08:55:43.654392 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3dd49bba-d34d-4f52-9d67-b946a4c2dca3-ovsdbserver-nb\") pod \"dnsmasq-dns-5dfc79c745-rfx9v\" (UID: \"3dd49bba-d34d-4f52-9d67-b946a4c2dca3\") " pod="openstack/dnsmasq-dns-5dfc79c745-rfx9v" Nov 27 08:55:43 crc kubenswrapper[4971]: I1127 08:55:43.654505 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3dd49bba-d34d-4f52-9d67-b946a4c2dca3-config\") pod \"dnsmasq-dns-5dfc79c745-rfx9v\" (UID: \"3dd49bba-d34d-4f52-9d67-b946a4c2dca3\") " pod="openstack/dnsmasq-dns-5dfc79c745-rfx9v" Nov 27 08:55:43 crc kubenswrapper[4971]: I1127 08:55:43.654600 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3dd49bba-d34d-4f52-9d67-b946a4c2dca3-ovsdbserver-sb\") pod \"dnsmasq-dns-5dfc79c745-rfx9v\" (UID: \"3dd49bba-d34d-4f52-9d67-b946a4c2dca3\") " pod="openstack/dnsmasq-dns-5dfc79c745-rfx9v" Nov 27 08:55:43 crc kubenswrapper[4971]: I1127 08:55:43.757619 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3dd49bba-d34d-4f52-9d67-b946a4c2dca3-dns-svc\") pod \"dnsmasq-dns-5dfc79c745-rfx9v\" (UID: \"3dd49bba-d34d-4f52-9d67-b946a4c2dca3\") " pod="openstack/dnsmasq-dns-5dfc79c745-rfx9v" Nov 27 08:55:43 crc kubenswrapper[4971]: I1127 08:55:43.758041 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nt92v\" (UniqueName: \"kubernetes.io/projected/3dd49bba-d34d-4f52-9d67-b946a4c2dca3-kube-api-access-nt92v\") pod \"dnsmasq-dns-5dfc79c745-rfx9v\" (UID: \"3dd49bba-d34d-4f52-9d67-b946a4c2dca3\") " pod="openstack/dnsmasq-dns-5dfc79c745-rfx9v" Nov 27 08:55:43 crc kubenswrapper[4971]: I1127 08:55:43.758106 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3dd49bba-d34d-4f52-9d67-b946a4c2dca3-ovsdbserver-nb\") pod \"dnsmasq-dns-5dfc79c745-rfx9v\" (UID: \"3dd49bba-d34d-4f52-9d67-b946a4c2dca3\") " pod="openstack/dnsmasq-dns-5dfc79c745-rfx9v" Nov 27 08:55:43 crc kubenswrapper[4971]: I1127 08:55:43.758137 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3dd49bba-d34d-4f52-9d67-b946a4c2dca3-config\") pod \"dnsmasq-dns-5dfc79c745-rfx9v\" (UID: \"3dd49bba-d34d-4f52-9d67-b946a4c2dca3\") " pod="openstack/dnsmasq-dns-5dfc79c745-rfx9v" Nov 27 08:55:43 crc kubenswrapper[4971]: I1127 08:55:43.758172 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3dd49bba-d34d-4f52-9d67-b946a4c2dca3-ovsdbserver-sb\") pod \"dnsmasq-dns-5dfc79c745-rfx9v\" (UID: \"3dd49bba-d34d-4f52-9d67-b946a4c2dca3\") " pod="openstack/dnsmasq-dns-5dfc79c745-rfx9v" Nov 27 08:55:43 crc kubenswrapper[4971]: I1127 08:55:43.759434 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3dd49bba-d34d-4f52-9d67-b946a4c2dca3-ovsdbserver-sb\") pod \"dnsmasq-dns-5dfc79c745-rfx9v\" (UID: \"3dd49bba-d34d-4f52-9d67-b946a4c2dca3\") " pod="openstack/dnsmasq-dns-5dfc79c745-rfx9v" Nov 27 08:55:43 crc kubenswrapper[4971]: I1127 08:55:43.759662 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3dd49bba-d34d-4f52-9d67-b946a4c2dca3-ovsdbserver-nb\") pod \"dnsmasq-dns-5dfc79c745-rfx9v\" (UID: \"3dd49bba-d34d-4f52-9d67-b946a4c2dca3\") " pod="openstack/dnsmasq-dns-5dfc79c745-rfx9v" Nov 27 08:55:43 crc kubenswrapper[4971]: I1127 08:55:43.759993 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3dd49bba-d34d-4f52-9d67-b946a4c2dca3-dns-svc\") pod \"dnsmasq-dns-5dfc79c745-rfx9v\" (UID: \"3dd49bba-d34d-4f52-9d67-b946a4c2dca3\") " pod="openstack/dnsmasq-dns-5dfc79c745-rfx9v" Nov 27 08:55:43 crc kubenswrapper[4971]: I1127 08:55:43.760487 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3dd49bba-d34d-4f52-9d67-b946a4c2dca3-config\") pod \"dnsmasq-dns-5dfc79c745-rfx9v\" (UID: \"3dd49bba-d34d-4f52-9d67-b946a4c2dca3\") " pod="openstack/dnsmasq-dns-5dfc79c745-rfx9v" Nov 27 08:55:43 crc kubenswrapper[4971]: I1127 08:55:43.787132 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nt92v\" (UniqueName: \"kubernetes.io/projected/3dd49bba-d34d-4f52-9d67-b946a4c2dca3-kube-api-access-nt92v\") pod \"dnsmasq-dns-5dfc79c745-rfx9v\" (UID: \"3dd49bba-d34d-4f52-9d67-b946a4c2dca3\") " pod="openstack/dnsmasq-dns-5dfc79c745-rfx9v" Nov 27 08:55:43 crc kubenswrapper[4971]: I1127 08:55:43.856204 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5dfc79c745-rfx9v" Nov 27 08:55:44 crc kubenswrapper[4971]: W1127 08:55:44.359027 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3dd49bba_d34d_4f52_9d67_b946a4c2dca3.slice/crio-362388fa47c2f9e6c431bf432732c8b672b84562d8f536a4867714643da0d90a WatchSource:0}: Error finding container 362388fa47c2f9e6c431bf432732c8b672b84562d8f536a4867714643da0d90a: Status 404 returned error can't find the container with id 362388fa47c2f9e6c431bf432732c8b672b84562d8f536a4867714643da0d90a Nov 27 08:55:44 crc kubenswrapper[4971]: I1127 08:55:44.362628 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5dfc79c745-rfx9v"] Nov 27 08:55:45 crc kubenswrapper[4971]: I1127 08:55:45.328320 4971 generic.go:334] "Generic (PLEG): container finished" podID="3dd49bba-d34d-4f52-9d67-b946a4c2dca3" containerID="6b25d7d16bf2022100d04fc2dd53820ede96b5c36adc91afca9fc7a50a1d3e18" exitCode=0 Nov 27 08:55:45 crc kubenswrapper[4971]: I1127 08:55:45.330623 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5dfc79c745-rfx9v" event={"ID":"3dd49bba-d34d-4f52-9d67-b946a4c2dca3","Type":"ContainerDied","Data":"6b25d7d16bf2022100d04fc2dd53820ede96b5c36adc91afca9fc7a50a1d3e18"} Nov 27 08:55:45 crc kubenswrapper[4971]: I1127 08:55:45.330742 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5dfc79c745-rfx9v" event={"ID":"3dd49bba-d34d-4f52-9d67-b946a4c2dca3","Type":"ContainerStarted","Data":"362388fa47c2f9e6c431bf432732c8b672b84562d8f536a4867714643da0d90a"} Nov 27 08:55:46 crc kubenswrapper[4971]: I1127 08:55:46.341700 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5dfc79c745-rfx9v" event={"ID":"3dd49bba-d34d-4f52-9d67-b946a4c2dca3","Type":"ContainerStarted","Data":"ff236db198e68dbd0100c5e2302337c1a494db52e0c9ba11d78715c9636f3e79"} Nov 27 08:55:46 crc kubenswrapper[4971]: I1127 08:55:46.342231 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5dfc79c745-rfx9v" Nov 27 08:55:46 crc kubenswrapper[4971]: I1127 08:55:46.367662 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5dfc79c745-rfx9v" podStartSLOduration=3.367631727 podStartE2EDuration="3.367631727s" podCreationTimestamp="2025-11-27 08:55:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:55:46.362212072 +0000 UTC m=+7384.554256000" watchObservedRunningTime="2025-11-27 08:55:46.367631727 +0000 UTC m=+7384.559675645" Nov 27 08:55:53 crc kubenswrapper[4971]: I1127 08:55:53.858713 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5dfc79c745-rfx9v" Nov 27 08:55:53 crc kubenswrapper[4971]: I1127 08:55:53.950622 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d8fdb9c79-qxcdk"] Nov 27 08:55:53 crc kubenswrapper[4971]: I1127 08:55:53.950890 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5d8fdb9c79-qxcdk" podUID="f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e" containerName="dnsmasq-dns" containerID="cri-o://dc4a5a7ba4e0734b4a4819b8af4682864054b3b22e1bea26f0e720371fa4ff31" gracePeriod=10 Nov 27 08:55:54 crc kubenswrapper[4971]: I1127 08:55:54.437328 4971 generic.go:334] "Generic (PLEG): container finished" podID="f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e" containerID="dc4a5a7ba4e0734b4a4819b8af4682864054b3b22e1bea26f0e720371fa4ff31" exitCode=0 Nov 27 08:55:54 crc kubenswrapper[4971]: I1127 08:55:54.437449 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d8fdb9c79-qxcdk" event={"ID":"f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e","Type":"ContainerDied","Data":"dc4a5a7ba4e0734b4a4819b8af4682864054b3b22e1bea26f0e720371fa4ff31"} Nov 27 08:55:54 crc kubenswrapper[4971]: I1127 08:55:54.437685 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d8fdb9c79-qxcdk" event={"ID":"f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e","Type":"ContainerDied","Data":"4dd5d5bdf1e10b0ee7341b77aa5ef778efbc1eb5cccfd97949507418952d5c39"} Nov 27 08:55:54 crc kubenswrapper[4971]: I1127 08:55:54.437708 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4dd5d5bdf1e10b0ee7341b77aa5ef778efbc1eb5cccfd97949507418952d5c39" Nov 27 08:55:54 crc kubenswrapper[4971]: I1127 08:55:54.467575 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d8fdb9c79-qxcdk" Nov 27 08:55:54 crc kubenswrapper[4971]: I1127 08:55:54.606331 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e-config\") pod \"f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e\" (UID: \"f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e\") " Nov 27 08:55:54 crc kubenswrapper[4971]: I1127 08:55:54.606393 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e-ovsdbserver-sb\") pod \"f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e\" (UID: \"f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e\") " Nov 27 08:55:54 crc kubenswrapper[4971]: I1127 08:55:54.606565 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4kwxc\" (UniqueName: \"kubernetes.io/projected/f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e-kube-api-access-4kwxc\") pod \"f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e\" (UID: \"f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e\") " Nov 27 08:55:54 crc kubenswrapper[4971]: I1127 08:55:54.606596 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e-dns-svc\") pod \"f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e\" (UID: \"f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e\") " Nov 27 08:55:54 crc kubenswrapper[4971]: I1127 08:55:54.606636 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e-ovsdbserver-nb\") pod \"f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e\" (UID: \"f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e\") " Nov 27 08:55:54 crc kubenswrapper[4971]: I1127 08:55:54.627924 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e-kube-api-access-4kwxc" (OuterVolumeSpecName: "kube-api-access-4kwxc") pod "f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e" (UID: "f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e"). InnerVolumeSpecName "kube-api-access-4kwxc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:55:54 crc kubenswrapper[4971]: I1127 08:55:54.658440 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e" (UID: "f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:55:54 crc kubenswrapper[4971]: I1127 08:55:54.667503 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e" (UID: "f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:55:54 crc kubenswrapper[4971]: I1127 08:55:54.668085 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e-config" (OuterVolumeSpecName: "config") pod "f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e" (UID: "f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:55:54 crc kubenswrapper[4971]: I1127 08:55:54.671720 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e" (UID: "f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:55:54 crc kubenswrapper[4971]: I1127 08:55:54.710121 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 27 08:55:54 crc kubenswrapper[4971]: I1127 08:55:54.710157 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e-config\") on node \"crc\" DevicePath \"\"" Nov 27 08:55:54 crc kubenswrapper[4971]: I1127 08:55:54.710167 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 27 08:55:54 crc kubenswrapper[4971]: I1127 08:55:54.710177 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4kwxc\" (UniqueName: \"kubernetes.io/projected/f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e-kube-api-access-4kwxc\") on node \"crc\" DevicePath \"\"" Nov 27 08:55:54 crc kubenswrapper[4971]: I1127 08:55:54.710187 4971 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 27 08:55:55 crc kubenswrapper[4971]: I1127 08:55:55.446481 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d8fdb9c79-qxcdk" Nov 27 08:55:55 crc kubenswrapper[4971]: I1127 08:55:55.499982 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d8fdb9c79-qxcdk"] Nov 27 08:55:55 crc kubenswrapper[4971]: I1127 08:55:55.508704 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5d8fdb9c79-qxcdk"] Nov 27 08:55:56 crc kubenswrapper[4971]: I1127 08:55:56.574102 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e" path="/var/lib/kubelet/pods/f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e/volumes" Nov 27 08:55:56 crc kubenswrapper[4971]: I1127 08:55:56.656015 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-t7qgx"] Nov 27 08:55:56 crc kubenswrapper[4971]: E1127 08:55:56.656684 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e" containerName="init" Nov 27 08:55:56 crc kubenswrapper[4971]: I1127 08:55:56.656703 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e" containerName="init" Nov 27 08:55:56 crc kubenswrapper[4971]: E1127 08:55:56.656725 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e" containerName="dnsmasq-dns" Nov 27 08:55:56 crc kubenswrapper[4971]: I1127 08:55:56.656732 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e" containerName="dnsmasq-dns" Nov 27 08:55:56 crc kubenswrapper[4971]: I1127 08:55:56.656989 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6eec4a6-7510-4ab9-8deb-0b3ee37c7c9e" containerName="dnsmasq-dns" Nov 27 08:55:56 crc kubenswrapper[4971]: I1127 08:55:56.657879 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-t7qgx" Nov 27 08:55:56 crc kubenswrapper[4971]: I1127 08:55:56.670376 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-t7qgx"] Nov 27 08:55:56 crc kubenswrapper[4971]: I1127 08:55:56.748269 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/adaff279-cded-4a90-9cb8-d6e29cc2b11e-operator-scripts\") pod \"cinder-db-create-t7qgx\" (UID: \"adaff279-cded-4a90-9cb8-d6e29cc2b11e\") " pod="openstack/cinder-db-create-t7qgx" Nov 27 08:55:56 crc kubenswrapper[4971]: I1127 08:55:56.748372 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5wzn\" (UniqueName: \"kubernetes.io/projected/adaff279-cded-4a90-9cb8-d6e29cc2b11e-kube-api-access-j5wzn\") pod \"cinder-db-create-t7qgx\" (UID: \"adaff279-cded-4a90-9cb8-d6e29cc2b11e\") " pod="openstack/cinder-db-create-t7qgx" Nov 27 08:55:56 crc kubenswrapper[4971]: I1127 08:55:56.755046 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-87c3-account-create-update-wwwnv"] Nov 27 08:55:56 crc kubenswrapper[4971]: I1127 08:55:56.757315 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-87c3-account-create-update-wwwnv" Nov 27 08:55:56 crc kubenswrapper[4971]: I1127 08:55:56.762675 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Nov 27 08:55:56 crc kubenswrapper[4971]: I1127 08:55:56.769759 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-87c3-account-create-update-wwwnv"] Nov 27 08:55:56 crc kubenswrapper[4971]: I1127 08:55:56.850358 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c2d8103f-0a96-40d7-afe9-2d87072e305c-operator-scripts\") pod \"cinder-87c3-account-create-update-wwwnv\" (UID: \"c2d8103f-0a96-40d7-afe9-2d87072e305c\") " pod="openstack/cinder-87c3-account-create-update-wwwnv" Nov 27 08:55:56 crc kubenswrapper[4971]: I1127 08:55:56.850435 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/adaff279-cded-4a90-9cb8-d6e29cc2b11e-operator-scripts\") pod \"cinder-db-create-t7qgx\" (UID: \"adaff279-cded-4a90-9cb8-d6e29cc2b11e\") " pod="openstack/cinder-db-create-t7qgx" Nov 27 08:55:56 crc kubenswrapper[4971]: I1127 08:55:56.850490 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5wzn\" (UniqueName: \"kubernetes.io/projected/adaff279-cded-4a90-9cb8-d6e29cc2b11e-kube-api-access-j5wzn\") pod \"cinder-db-create-t7qgx\" (UID: \"adaff279-cded-4a90-9cb8-d6e29cc2b11e\") " pod="openstack/cinder-db-create-t7qgx" Nov 27 08:55:56 crc kubenswrapper[4971]: I1127 08:55:56.850573 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2qkl5\" (UniqueName: \"kubernetes.io/projected/c2d8103f-0a96-40d7-afe9-2d87072e305c-kube-api-access-2qkl5\") pod \"cinder-87c3-account-create-update-wwwnv\" (UID: \"c2d8103f-0a96-40d7-afe9-2d87072e305c\") " pod="openstack/cinder-87c3-account-create-update-wwwnv" Nov 27 08:55:56 crc kubenswrapper[4971]: I1127 08:55:56.851672 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/adaff279-cded-4a90-9cb8-d6e29cc2b11e-operator-scripts\") pod \"cinder-db-create-t7qgx\" (UID: \"adaff279-cded-4a90-9cb8-d6e29cc2b11e\") " pod="openstack/cinder-db-create-t7qgx" Nov 27 08:55:56 crc kubenswrapper[4971]: I1127 08:55:56.875348 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5wzn\" (UniqueName: \"kubernetes.io/projected/adaff279-cded-4a90-9cb8-d6e29cc2b11e-kube-api-access-j5wzn\") pod \"cinder-db-create-t7qgx\" (UID: \"adaff279-cded-4a90-9cb8-d6e29cc2b11e\") " pod="openstack/cinder-db-create-t7qgx" Nov 27 08:55:56 crc kubenswrapper[4971]: I1127 08:55:56.952779 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2qkl5\" (UniqueName: \"kubernetes.io/projected/c2d8103f-0a96-40d7-afe9-2d87072e305c-kube-api-access-2qkl5\") pod \"cinder-87c3-account-create-update-wwwnv\" (UID: \"c2d8103f-0a96-40d7-afe9-2d87072e305c\") " pod="openstack/cinder-87c3-account-create-update-wwwnv" Nov 27 08:55:56 crc kubenswrapper[4971]: I1127 08:55:56.952903 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c2d8103f-0a96-40d7-afe9-2d87072e305c-operator-scripts\") pod \"cinder-87c3-account-create-update-wwwnv\" (UID: \"c2d8103f-0a96-40d7-afe9-2d87072e305c\") " pod="openstack/cinder-87c3-account-create-update-wwwnv" Nov 27 08:55:56 crc kubenswrapper[4971]: I1127 08:55:56.953786 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c2d8103f-0a96-40d7-afe9-2d87072e305c-operator-scripts\") pod \"cinder-87c3-account-create-update-wwwnv\" (UID: \"c2d8103f-0a96-40d7-afe9-2d87072e305c\") " pod="openstack/cinder-87c3-account-create-update-wwwnv" Nov 27 08:55:56 crc kubenswrapper[4971]: I1127 08:55:56.987368 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2qkl5\" (UniqueName: \"kubernetes.io/projected/c2d8103f-0a96-40d7-afe9-2d87072e305c-kube-api-access-2qkl5\") pod \"cinder-87c3-account-create-update-wwwnv\" (UID: \"c2d8103f-0a96-40d7-afe9-2d87072e305c\") " pod="openstack/cinder-87c3-account-create-update-wwwnv" Nov 27 08:55:56 crc kubenswrapper[4971]: I1127 08:55:56.988191 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-t7qgx" Nov 27 08:55:57 crc kubenswrapper[4971]: I1127 08:55:57.090027 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-87c3-account-create-update-wwwnv" Nov 27 08:55:57 crc kubenswrapper[4971]: I1127 08:55:57.653848 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-t7qgx"] Nov 27 08:55:57 crc kubenswrapper[4971]: I1127 08:55:57.756837 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-87c3-account-create-update-wwwnv"] Nov 27 08:55:57 crc kubenswrapper[4971]: W1127 08:55:57.757800 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc2d8103f_0a96_40d7_afe9_2d87072e305c.slice/crio-e72395ac4f895a52f5be6894cd99f39e4b91031120e93ac7eae170c66872764c WatchSource:0}: Error finding container e72395ac4f895a52f5be6894cd99f39e4b91031120e93ac7eae170c66872764c: Status 404 returned error can't find the container with id e72395ac4f895a52f5be6894cd99f39e4b91031120e93ac7eae170c66872764c Nov 27 08:55:58 crc kubenswrapper[4971]: I1127 08:55:58.489893 4971 generic.go:334] "Generic (PLEG): container finished" podID="c2d8103f-0a96-40d7-afe9-2d87072e305c" containerID="ce2a5274d731d2ead02a7bc5663e52e170b92ce82f26bd63930a767849a94564" exitCode=0 Nov 27 08:55:58 crc kubenswrapper[4971]: I1127 08:55:58.489964 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-87c3-account-create-update-wwwnv" event={"ID":"c2d8103f-0a96-40d7-afe9-2d87072e305c","Type":"ContainerDied","Data":"ce2a5274d731d2ead02a7bc5663e52e170b92ce82f26bd63930a767849a94564"} Nov 27 08:55:58 crc kubenswrapper[4971]: I1127 08:55:58.489995 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-87c3-account-create-update-wwwnv" event={"ID":"c2d8103f-0a96-40d7-afe9-2d87072e305c","Type":"ContainerStarted","Data":"e72395ac4f895a52f5be6894cd99f39e4b91031120e93ac7eae170c66872764c"} Nov 27 08:55:58 crc kubenswrapper[4971]: I1127 08:55:58.492049 4971 generic.go:334] "Generic (PLEG): container finished" podID="adaff279-cded-4a90-9cb8-d6e29cc2b11e" containerID="10577e750b4cd111d9891dc9d5b4720c0e5bd96f687fa9efca1a269f543ca1b0" exitCode=0 Nov 27 08:55:58 crc kubenswrapper[4971]: I1127 08:55:58.492084 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-t7qgx" event={"ID":"adaff279-cded-4a90-9cb8-d6e29cc2b11e","Type":"ContainerDied","Data":"10577e750b4cd111d9891dc9d5b4720c0e5bd96f687fa9efca1a269f543ca1b0"} Nov 27 08:55:58 crc kubenswrapper[4971]: I1127 08:55:58.492103 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-t7qgx" event={"ID":"adaff279-cded-4a90-9cb8-d6e29cc2b11e","Type":"ContainerStarted","Data":"aae1f8bb4163a54721471773f484e88a7a9e5335fdf2878395964b1c23a6ed7c"} Nov 27 08:56:00 crc kubenswrapper[4971]: I1127 08:56:00.205827 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-t7qgx" Nov 27 08:56:00 crc kubenswrapper[4971]: I1127 08:56:00.213911 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-87c3-account-create-update-wwwnv" Nov 27 08:56:00 crc kubenswrapper[4971]: I1127 08:56:00.357440 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j5wzn\" (UniqueName: \"kubernetes.io/projected/adaff279-cded-4a90-9cb8-d6e29cc2b11e-kube-api-access-j5wzn\") pod \"adaff279-cded-4a90-9cb8-d6e29cc2b11e\" (UID: \"adaff279-cded-4a90-9cb8-d6e29cc2b11e\") " Nov 27 08:56:00 crc kubenswrapper[4971]: I1127 08:56:00.357652 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2qkl5\" (UniqueName: \"kubernetes.io/projected/c2d8103f-0a96-40d7-afe9-2d87072e305c-kube-api-access-2qkl5\") pod \"c2d8103f-0a96-40d7-afe9-2d87072e305c\" (UID: \"c2d8103f-0a96-40d7-afe9-2d87072e305c\") " Nov 27 08:56:00 crc kubenswrapper[4971]: I1127 08:56:00.357734 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/adaff279-cded-4a90-9cb8-d6e29cc2b11e-operator-scripts\") pod \"adaff279-cded-4a90-9cb8-d6e29cc2b11e\" (UID: \"adaff279-cded-4a90-9cb8-d6e29cc2b11e\") " Nov 27 08:56:00 crc kubenswrapper[4971]: I1127 08:56:00.357800 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c2d8103f-0a96-40d7-afe9-2d87072e305c-operator-scripts\") pod \"c2d8103f-0a96-40d7-afe9-2d87072e305c\" (UID: \"c2d8103f-0a96-40d7-afe9-2d87072e305c\") " Nov 27 08:56:00 crc kubenswrapper[4971]: I1127 08:56:00.358346 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/adaff279-cded-4a90-9cb8-d6e29cc2b11e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "adaff279-cded-4a90-9cb8-d6e29cc2b11e" (UID: "adaff279-cded-4a90-9cb8-d6e29cc2b11e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:56:00 crc kubenswrapper[4971]: I1127 08:56:00.358548 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/adaff279-cded-4a90-9cb8-d6e29cc2b11e-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 08:56:00 crc kubenswrapper[4971]: I1127 08:56:00.359075 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2d8103f-0a96-40d7-afe9-2d87072e305c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c2d8103f-0a96-40d7-afe9-2d87072e305c" (UID: "c2d8103f-0a96-40d7-afe9-2d87072e305c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:56:00 crc kubenswrapper[4971]: I1127 08:56:00.364978 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2d8103f-0a96-40d7-afe9-2d87072e305c-kube-api-access-2qkl5" (OuterVolumeSpecName: "kube-api-access-2qkl5") pod "c2d8103f-0a96-40d7-afe9-2d87072e305c" (UID: "c2d8103f-0a96-40d7-afe9-2d87072e305c"). InnerVolumeSpecName "kube-api-access-2qkl5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:56:00 crc kubenswrapper[4971]: I1127 08:56:00.365042 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/adaff279-cded-4a90-9cb8-d6e29cc2b11e-kube-api-access-j5wzn" (OuterVolumeSpecName: "kube-api-access-j5wzn") pod "adaff279-cded-4a90-9cb8-d6e29cc2b11e" (UID: "adaff279-cded-4a90-9cb8-d6e29cc2b11e"). InnerVolumeSpecName "kube-api-access-j5wzn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:56:00 crc kubenswrapper[4971]: I1127 08:56:00.460797 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2qkl5\" (UniqueName: \"kubernetes.io/projected/c2d8103f-0a96-40d7-afe9-2d87072e305c-kube-api-access-2qkl5\") on node \"crc\" DevicePath \"\"" Nov 27 08:56:00 crc kubenswrapper[4971]: I1127 08:56:00.460843 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c2d8103f-0a96-40d7-afe9-2d87072e305c-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 08:56:00 crc kubenswrapper[4971]: I1127 08:56:00.460856 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j5wzn\" (UniqueName: \"kubernetes.io/projected/adaff279-cded-4a90-9cb8-d6e29cc2b11e-kube-api-access-j5wzn\") on node \"crc\" DevicePath \"\"" Nov 27 08:56:00 crc kubenswrapper[4971]: I1127 08:56:00.740792 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-t7qgx" Nov 27 08:56:00 crc kubenswrapper[4971]: I1127 08:56:00.741424 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-t7qgx" event={"ID":"adaff279-cded-4a90-9cb8-d6e29cc2b11e","Type":"ContainerDied","Data":"aae1f8bb4163a54721471773f484e88a7a9e5335fdf2878395964b1c23a6ed7c"} Nov 27 08:56:00 crc kubenswrapper[4971]: I1127 08:56:00.741484 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aae1f8bb4163a54721471773f484e88a7a9e5335fdf2878395964b1c23a6ed7c" Nov 27 08:56:00 crc kubenswrapper[4971]: I1127 08:56:00.743593 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-87c3-account-create-update-wwwnv" event={"ID":"c2d8103f-0a96-40d7-afe9-2d87072e305c","Type":"ContainerDied","Data":"e72395ac4f895a52f5be6894cd99f39e4b91031120e93ac7eae170c66872764c"} Nov 27 08:56:00 crc kubenswrapper[4971]: I1127 08:56:00.743620 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e72395ac4f895a52f5be6894cd99f39e4b91031120e93ac7eae170c66872764c" Nov 27 08:56:00 crc kubenswrapper[4971]: I1127 08:56:00.743641 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-87c3-account-create-update-wwwnv" Nov 27 08:56:01 crc kubenswrapper[4971]: I1127 08:56:01.893350 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-hxk8s"] Nov 27 08:56:01 crc kubenswrapper[4971]: E1127 08:56:01.895209 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="adaff279-cded-4a90-9cb8-d6e29cc2b11e" containerName="mariadb-database-create" Nov 27 08:56:01 crc kubenswrapper[4971]: I1127 08:56:01.895284 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="adaff279-cded-4a90-9cb8-d6e29cc2b11e" containerName="mariadb-database-create" Nov 27 08:56:01 crc kubenswrapper[4971]: E1127 08:56:01.895362 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2d8103f-0a96-40d7-afe9-2d87072e305c" containerName="mariadb-account-create-update" Nov 27 08:56:01 crc kubenswrapper[4971]: I1127 08:56:01.895414 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2d8103f-0a96-40d7-afe9-2d87072e305c" containerName="mariadb-account-create-update" Nov 27 08:56:01 crc kubenswrapper[4971]: I1127 08:56:01.895740 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2d8103f-0a96-40d7-afe9-2d87072e305c" containerName="mariadb-account-create-update" Nov 27 08:56:01 crc kubenswrapper[4971]: I1127 08:56:01.895865 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="adaff279-cded-4a90-9cb8-d6e29cc2b11e" containerName="mariadb-database-create" Nov 27 08:56:01 crc kubenswrapper[4971]: I1127 08:56:01.896937 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-hxk8s" Nov 27 08:56:01 crc kubenswrapper[4971]: I1127 08:56:01.899889 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Nov 27 08:56:01 crc kubenswrapper[4971]: I1127 08:56:01.900670 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Nov 27 08:56:01 crc kubenswrapper[4971]: I1127 08:56:01.901907 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-kk72s" Nov 27 08:56:01 crc kubenswrapper[4971]: I1127 08:56:01.915361 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-hxk8s"] Nov 27 08:56:02 crc kubenswrapper[4971]: I1127 08:56:02.000449 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd-config-data\") pod \"cinder-db-sync-hxk8s\" (UID: \"d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd\") " pod="openstack/cinder-db-sync-hxk8s" Nov 27 08:56:02 crc kubenswrapper[4971]: I1127 08:56:02.000660 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd-combined-ca-bundle\") pod \"cinder-db-sync-hxk8s\" (UID: \"d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd\") " pod="openstack/cinder-db-sync-hxk8s" Nov 27 08:56:02 crc kubenswrapper[4971]: I1127 08:56:02.000863 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd-scripts\") pod \"cinder-db-sync-hxk8s\" (UID: \"d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd\") " pod="openstack/cinder-db-sync-hxk8s" Nov 27 08:56:02 crc kubenswrapper[4971]: I1127 08:56:02.000912 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd-etc-machine-id\") pod \"cinder-db-sync-hxk8s\" (UID: \"d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd\") " pod="openstack/cinder-db-sync-hxk8s" Nov 27 08:56:02 crc kubenswrapper[4971]: I1127 08:56:02.000972 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7p2qs\" (UniqueName: \"kubernetes.io/projected/d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd-kube-api-access-7p2qs\") pod \"cinder-db-sync-hxk8s\" (UID: \"d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd\") " pod="openstack/cinder-db-sync-hxk8s" Nov 27 08:56:02 crc kubenswrapper[4971]: I1127 08:56:02.001066 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd-db-sync-config-data\") pod \"cinder-db-sync-hxk8s\" (UID: \"d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd\") " pod="openstack/cinder-db-sync-hxk8s" Nov 27 08:56:02 crc kubenswrapper[4971]: I1127 08:56:02.103192 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd-config-data\") pod \"cinder-db-sync-hxk8s\" (UID: \"d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd\") " pod="openstack/cinder-db-sync-hxk8s" Nov 27 08:56:02 crc kubenswrapper[4971]: I1127 08:56:02.103283 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd-combined-ca-bundle\") pod \"cinder-db-sync-hxk8s\" (UID: \"d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd\") " pod="openstack/cinder-db-sync-hxk8s" Nov 27 08:56:02 crc kubenswrapper[4971]: I1127 08:56:02.103340 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd-scripts\") pod \"cinder-db-sync-hxk8s\" (UID: \"d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd\") " pod="openstack/cinder-db-sync-hxk8s" Nov 27 08:56:02 crc kubenswrapper[4971]: I1127 08:56:02.103365 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd-etc-machine-id\") pod \"cinder-db-sync-hxk8s\" (UID: \"d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd\") " pod="openstack/cinder-db-sync-hxk8s" Nov 27 08:56:02 crc kubenswrapper[4971]: I1127 08:56:02.103394 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7p2qs\" (UniqueName: \"kubernetes.io/projected/d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd-kube-api-access-7p2qs\") pod \"cinder-db-sync-hxk8s\" (UID: \"d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd\") " pod="openstack/cinder-db-sync-hxk8s" Nov 27 08:56:02 crc kubenswrapper[4971]: I1127 08:56:02.103423 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd-db-sync-config-data\") pod \"cinder-db-sync-hxk8s\" (UID: \"d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd\") " pod="openstack/cinder-db-sync-hxk8s" Nov 27 08:56:02 crc kubenswrapper[4971]: I1127 08:56:02.103623 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd-etc-machine-id\") pod \"cinder-db-sync-hxk8s\" (UID: \"d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd\") " pod="openstack/cinder-db-sync-hxk8s" Nov 27 08:56:02 crc kubenswrapper[4971]: I1127 08:56:02.111377 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd-scripts\") pod \"cinder-db-sync-hxk8s\" (UID: \"d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd\") " pod="openstack/cinder-db-sync-hxk8s" Nov 27 08:56:02 crc kubenswrapper[4971]: I1127 08:56:02.111526 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd-combined-ca-bundle\") pod \"cinder-db-sync-hxk8s\" (UID: \"d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd\") " pod="openstack/cinder-db-sync-hxk8s" Nov 27 08:56:02 crc kubenswrapper[4971]: I1127 08:56:02.112771 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd-db-sync-config-data\") pod \"cinder-db-sync-hxk8s\" (UID: \"d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd\") " pod="openstack/cinder-db-sync-hxk8s" Nov 27 08:56:02 crc kubenswrapper[4971]: I1127 08:56:02.114562 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd-config-data\") pod \"cinder-db-sync-hxk8s\" (UID: \"d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd\") " pod="openstack/cinder-db-sync-hxk8s" Nov 27 08:56:02 crc kubenswrapper[4971]: I1127 08:56:02.121819 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7p2qs\" (UniqueName: \"kubernetes.io/projected/d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd-kube-api-access-7p2qs\") pod \"cinder-db-sync-hxk8s\" (UID: \"d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd\") " pod="openstack/cinder-db-sync-hxk8s" Nov 27 08:56:02 crc kubenswrapper[4971]: I1127 08:56:02.220486 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-hxk8s" Nov 27 08:56:02 crc kubenswrapper[4971]: I1127 08:56:02.716610 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-hxk8s"] Nov 27 08:56:02 crc kubenswrapper[4971]: I1127 08:56:02.767461 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-hxk8s" event={"ID":"d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd","Type":"ContainerStarted","Data":"dcdfc7ec55997d6f58535033366b25c774c9ae21f6cc523bcc7d38fb01f7a0aa"} Nov 27 08:56:21 crc kubenswrapper[4971]: I1127 08:56:21.953062 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-hxk8s" event={"ID":"d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd","Type":"ContainerStarted","Data":"79e1cff6761b5ec238e16d9670cf9df84497397534cd528b4b9c76fbd10352bb"} Nov 27 08:56:21 crc kubenswrapper[4971]: I1127 08:56:21.979816 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-hxk8s" podStartSLOduration=2.76857087 podStartE2EDuration="20.979794665s" podCreationTimestamp="2025-11-27 08:56:01 +0000 UTC" firstStartedPulling="2025-11-27 08:56:02.720432198 +0000 UTC m=+7400.912476116" lastFinishedPulling="2025-11-27 08:56:20.931655993 +0000 UTC m=+7419.123699911" observedRunningTime="2025-11-27 08:56:21.970433667 +0000 UTC m=+7420.162477585" watchObservedRunningTime="2025-11-27 08:56:21.979794665 +0000 UTC m=+7420.171838593" Nov 27 08:56:23 crc kubenswrapper[4971]: I1127 08:56:23.976052 4971 generic.go:334] "Generic (PLEG): container finished" podID="d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd" containerID="79e1cff6761b5ec238e16d9670cf9df84497397534cd528b4b9c76fbd10352bb" exitCode=0 Nov 27 08:56:23 crc kubenswrapper[4971]: I1127 08:56:23.976115 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-hxk8s" event={"ID":"d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd","Type":"ContainerDied","Data":"79e1cff6761b5ec238e16d9670cf9df84497397534cd528b4b9c76fbd10352bb"} Nov 27 08:56:25 crc kubenswrapper[4971]: I1127 08:56:25.346755 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-hxk8s" Nov 27 08:56:25 crc kubenswrapper[4971]: I1127 08:56:25.420107 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd-scripts\") pod \"d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd\" (UID: \"d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd\") " Nov 27 08:56:25 crc kubenswrapper[4971]: I1127 08:56:25.420165 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd-etc-machine-id\") pod \"d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd\" (UID: \"d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd\") " Nov 27 08:56:25 crc kubenswrapper[4971]: I1127 08:56:25.420202 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7p2qs\" (UniqueName: \"kubernetes.io/projected/d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd-kube-api-access-7p2qs\") pod \"d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd\" (UID: \"d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd\") " Nov 27 08:56:25 crc kubenswrapper[4971]: I1127 08:56:25.420335 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd" (UID: "d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 08:56:25 crc kubenswrapper[4971]: I1127 08:56:25.420383 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd-db-sync-config-data\") pod \"d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd\" (UID: \"d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd\") " Nov 27 08:56:25 crc kubenswrapper[4971]: I1127 08:56:25.420578 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd-combined-ca-bundle\") pod \"d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd\" (UID: \"d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd\") " Nov 27 08:56:25 crc kubenswrapper[4971]: I1127 08:56:25.420620 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd-config-data\") pod \"d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd\" (UID: \"d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd\") " Nov 27 08:56:25 crc kubenswrapper[4971]: I1127 08:56:25.421808 4971 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 27 08:56:25 crc kubenswrapper[4971]: I1127 08:56:25.425782 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd-scripts" (OuterVolumeSpecName: "scripts") pod "d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd" (UID: "d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:56:25 crc kubenswrapper[4971]: I1127 08:56:25.425886 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd" (UID: "d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:56:25 crc kubenswrapper[4971]: I1127 08:56:25.426105 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd-kube-api-access-7p2qs" (OuterVolumeSpecName: "kube-api-access-7p2qs") pod "d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd" (UID: "d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd"). InnerVolumeSpecName "kube-api-access-7p2qs". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:56:25 crc kubenswrapper[4971]: I1127 08:56:25.449185 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd" (UID: "d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:56:25 crc kubenswrapper[4971]: I1127 08:56:25.471057 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd-config-data" (OuterVolumeSpecName: "config-data") pod "d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd" (UID: "d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:56:25 crc kubenswrapper[4971]: I1127 08:56:25.523703 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 08:56:25 crc kubenswrapper[4971]: I1127 08:56:25.523733 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7p2qs\" (UniqueName: \"kubernetes.io/projected/d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd-kube-api-access-7p2qs\") on node \"crc\" DevicePath \"\"" Nov 27 08:56:25 crc kubenswrapper[4971]: I1127 08:56:25.523748 4971 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 08:56:25 crc kubenswrapper[4971]: I1127 08:56:25.523761 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 08:56:25 crc kubenswrapper[4971]: I1127 08:56:25.523771 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 08:56:25 crc kubenswrapper[4971]: I1127 08:56:25.995740 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-hxk8s" event={"ID":"d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd","Type":"ContainerDied","Data":"dcdfc7ec55997d6f58535033366b25c774c9ae21f6cc523bcc7d38fb01f7a0aa"} Nov 27 08:56:25 crc kubenswrapper[4971]: I1127 08:56:25.996036 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dcdfc7ec55997d6f58535033366b25c774c9ae21f6cc523bcc7d38fb01f7a0aa" Nov 27 08:56:25 crc kubenswrapper[4971]: I1127 08:56:25.995792 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-hxk8s" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.411557 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86bf89b475-tb5r5"] Nov 27 08:56:26 crc kubenswrapper[4971]: E1127 08:56:26.412057 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd" containerName="cinder-db-sync" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.412072 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd" containerName="cinder-db-sync" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.412291 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd" containerName="cinder-db-sync" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.413465 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86bf89b475-tb5r5" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.423665 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86bf89b475-tb5r5"] Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.454205 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d92253a9-9aa5-4930-bc0c-de495914f7c1-dns-svc\") pod \"dnsmasq-dns-86bf89b475-tb5r5\" (UID: \"d92253a9-9aa5-4930-bc0c-de495914f7c1\") " pod="openstack/dnsmasq-dns-86bf89b475-tb5r5" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.454294 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d92253a9-9aa5-4930-bc0c-de495914f7c1-ovsdbserver-sb\") pod \"dnsmasq-dns-86bf89b475-tb5r5\" (UID: \"d92253a9-9aa5-4930-bc0c-de495914f7c1\") " pod="openstack/dnsmasq-dns-86bf89b475-tb5r5" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.454356 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d92253a9-9aa5-4930-bc0c-de495914f7c1-config\") pod \"dnsmasq-dns-86bf89b475-tb5r5\" (UID: \"d92253a9-9aa5-4930-bc0c-de495914f7c1\") " pod="openstack/dnsmasq-dns-86bf89b475-tb5r5" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.454380 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d92253a9-9aa5-4930-bc0c-de495914f7c1-ovsdbserver-nb\") pod \"dnsmasq-dns-86bf89b475-tb5r5\" (UID: \"d92253a9-9aa5-4930-bc0c-de495914f7c1\") " pod="openstack/dnsmasq-dns-86bf89b475-tb5r5" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.454420 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfmcx\" (UniqueName: \"kubernetes.io/projected/d92253a9-9aa5-4930-bc0c-de495914f7c1-kube-api-access-tfmcx\") pod \"dnsmasq-dns-86bf89b475-tb5r5\" (UID: \"d92253a9-9aa5-4930-bc0c-de495914f7c1\") " pod="openstack/dnsmasq-dns-86bf89b475-tb5r5" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.557016 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d92253a9-9aa5-4930-bc0c-de495914f7c1-ovsdbserver-nb\") pod \"dnsmasq-dns-86bf89b475-tb5r5\" (UID: \"d92253a9-9aa5-4930-bc0c-de495914f7c1\") " pod="openstack/dnsmasq-dns-86bf89b475-tb5r5" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.557090 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfmcx\" (UniqueName: \"kubernetes.io/projected/d92253a9-9aa5-4930-bc0c-de495914f7c1-kube-api-access-tfmcx\") pod \"dnsmasq-dns-86bf89b475-tb5r5\" (UID: \"d92253a9-9aa5-4930-bc0c-de495914f7c1\") " pod="openstack/dnsmasq-dns-86bf89b475-tb5r5" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.557177 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d92253a9-9aa5-4930-bc0c-de495914f7c1-dns-svc\") pod \"dnsmasq-dns-86bf89b475-tb5r5\" (UID: \"d92253a9-9aa5-4930-bc0c-de495914f7c1\") " pod="openstack/dnsmasq-dns-86bf89b475-tb5r5" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.557215 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d92253a9-9aa5-4930-bc0c-de495914f7c1-ovsdbserver-sb\") pod \"dnsmasq-dns-86bf89b475-tb5r5\" (UID: \"d92253a9-9aa5-4930-bc0c-de495914f7c1\") " pod="openstack/dnsmasq-dns-86bf89b475-tb5r5" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.557284 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d92253a9-9aa5-4930-bc0c-de495914f7c1-config\") pod \"dnsmasq-dns-86bf89b475-tb5r5\" (UID: \"d92253a9-9aa5-4930-bc0c-de495914f7c1\") " pod="openstack/dnsmasq-dns-86bf89b475-tb5r5" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.558140 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d92253a9-9aa5-4930-bc0c-de495914f7c1-dns-svc\") pod \"dnsmasq-dns-86bf89b475-tb5r5\" (UID: \"d92253a9-9aa5-4930-bc0c-de495914f7c1\") " pod="openstack/dnsmasq-dns-86bf89b475-tb5r5" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.558267 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d92253a9-9aa5-4930-bc0c-de495914f7c1-ovsdbserver-sb\") pod \"dnsmasq-dns-86bf89b475-tb5r5\" (UID: \"d92253a9-9aa5-4930-bc0c-de495914f7c1\") " pod="openstack/dnsmasq-dns-86bf89b475-tb5r5" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.559329 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d92253a9-9aa5-4930-bc0c-de495914f7c1-config\") pod \"dnsmasq-dns-86bf89b475-tb5r5\" (UID: \"d92253a9-9aa5-4930-bc0c-de495914f7c1\") " pod="openstack/dnsmasq-dns-86bf89b475-tb5r5" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.559524 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d92253a9-9aa5-4930-bc0c-de495914f7c1-ovsdbserver-nb\") pod \"dnsmasq-dns-86bf89b475-tb5r5\" (UID: \"d92253a9-9aa5-4930-bc0c-de495914f7c1\") " pod="openstack/dnsmasq-dns-86bf89b475-tb5r5" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.564574 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.566937 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.569002 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-kk72s" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.569205 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.571722 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.571983 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.575083 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.585989 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfmcx\" (UniqueName: \"kubernetes.io/projected/d92253a9-9aa5-4930-bc0c-de495914f7c1-kube-api-access-tfmcx\") pod \"dnsmasq-dns-86bf89b475-tb5r5\" (UID: \"d92253a9-9aa5-4930-bc0c-de495914f7c1\") " pod="openstack/dnsmasq-dns-86bf89b475-tb5r5" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.658317 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d609d64-7197-43f6-820c-cdfafd7bcfe8-scripts\") pod \"cinder-api-0\" (UID: \"0d609d64-7197-43f6-820c-cdfafd7bcfe8\") " pod="openstack/cinder-api-0" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.658390 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vdrj\" (UniqueName: \"kubernetes.io/projected/0d609d64-7197-43f6-820c-cdfafd7bcfe8-kube-api-access-4vdrj\") pod \"cinder-api-0\" (UID: \"0d609d64-7197-43f6-820c-cdfafd7bcfe8\") " pod="openstack/cinder-api-0" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.658423 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0d609d64-7197-43f6-820c-cdfafd7bcfe8-config-data-custom\") pod \"cinder-api-0\" (UID: \"0d609d64-7197-43f6-820c-cdfafd7bcfe8\") " pod="openstack/cinder-api-0" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.658466 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d609d64-7197-43f6-820c-cdfafd7bcfe8-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"0d609d64-7197-43f6-820c-cdfafd7bcfe8\") " pod="openstack/cinder-api-0" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.658543 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0d609d64-7197-43f6-820c-cdfafd7bcfe8-etc-machine-id\") pod \"cinder-api-0\" (UID: \"0d609d64-7197-43f6-820c-cdfafd7bcfe8\") " pod="openstack/cinder-api-0" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.658583 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d609d64-7197-43f6-820c-cdfafd7bcfe8-config-data\") pod \"cinder-api-0\" (UID: \"0d609d64-7197-43f6-820c-cdfafd7bcfe8\") " pod="openstack/cinder-api-0" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.658598 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d609d64-7197-43f6-820c-cdfafd7bcfe8-logs\") pod \"cinder-api-0\" (UID: \"0d609d64-7197-43f6-820c-cdfafd7bcfe8\") " pod="openstack/cinder-api-0" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.760193 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vdrj\" (UniqueName: \"kubernetes.io/projected/0d609d64-7197-43f6-820c-cdfafd7bcfe8-kube-api-access-4vdrj\") pod \"cinder-api-0\" (UID: \"0d609d64-7197-43f6-820c-cdfafd7bcfe8\") " pod="openstack/cinder-api-0" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.760257 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0d609d64-7197-43f6-820c-cdfafd7bcfe8-config-data-custom\") pod \"cinder-api-0\" (UID: \"0d609d64-7197-43f6-820c-cdfafd7bcfe8\") " pod="openstack/cinder-api-0" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.760306 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d609d64-7197-43f6-820c-cdfafd7bcfe8-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"0d609d64-7197-43f6-820c-cdfafd7bcfe8\") " pod="openstack/cinder-api-0" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.760366 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0d609d64-7197-43f6-820c-cdfafd7bcfe8-etc-machine-id\") pod \"cinder-api-0\" (UID: \"0d609d64-7197-43f6-820c-cdfafd7bcfe8\") " pod="openstack/cinder-api-0" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.760392 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d609d64-7197-43f6-820c-cdfafd7bcfe8-config-data\") pod \"cinder-api-0\" (UID: \"0d609d64-7197-43f6-820c-cdfafd7bcfe8\") " pod="openstack/cinder-api-0" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.760408 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d609d64-7197-43f6-820c-cdfafd7bcfe8-logs\") pod \"cinder-api-0\" (UID: \"0d609d64-7197-43f6-820c-cdfafd7bcfe8\") " pod="openstack/cinder-api-0" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.760456 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d609d64-7197-43f6-820c-cdfafd7bcfe8-scripts\") pod \"cinder-api-0\" (UID: \"0d609d64-7197-43f6-820c-cdfafd7bcfe8\") " pod="openstack/cinder-api-0" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.761090 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0d609d64-7197-43f6-820c-cdfafd7bcfe8-etc-machine-id\") pod \"cinder-api-0\" (UID: \"0d609d64-7197-43f6-820c-cdfafd7bcfe8\") " pod="openstack/cinder-api-0" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.761583 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d609d64-7197-43f6-820c-cdfafd7bcfe8-logs\") pod \"cinder-api-0\" (UID: \"0d609d64-7197-43f6-820c-cdfafd7bcfe8\") " pod="openstack/cinder-api-0" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.765435 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0d609d64-7197-43f6-820c-cdfafd7bcfe8-config-data-custom\") pod \"cinder-api-0\" (UID: \"0d609d64-7197-43f6-820c-cdfafd7bcfe8\") " pod="openstack/cinder-api-0" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.765753 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d609d64-7197-43f6-820c-cdfafd7bcfe8-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"0d609d64-7197-43f6-820c-cdfafd7bcfe8\") " pod="openstack/cinder-api-0" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.780253 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d609d64-7197-43f6-820c-cdfafd7bcfe8-config-data\") pod \"cinder-api-0\" (UID: \"0d609d64-7197-43f6-820c-cdfafd7bcfe8\") " pod="openstack/cinder-api-0" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.785077 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d609d64-7197-43f6-820c-cdfafd7bcfe8-scripts\") pod \"cinder-api-0\" (UID: \"0d609d64-7197-43f6-820c-cdfafd7bcfe8\") " pod="openstack/cinder-api-0" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.787935 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vdrj\" (UniqueName: \"kubernetes.io/projected/0d609d64-7197-43f6-820c-cdfafd7bcfe8-kube-api-access-4vdrj\") pod \"cinder-api-0\" (UID: \"0d609d64-7197-43f6-820c-cdfafd7bcfe8\") " pod="openstack/cinder-api-0" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.792937 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86bf89b475-tb5r5" Nov 27 08:56:26 crc kubenswrapper[4971]: I1127 08:56:26.950442 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 27 08:56:27 crc kubenswrapper[4971]: I1127 08:56:27.348993 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86bf89b475-tb5r5"] Nov 27 08:56:27 crc kubenswrapper[4971]: I1127 08:56:27.467126 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Nov 27 08:56:27 crc kubenswrapper[4971]: W1127 08:56:27.494018 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0d609d64_7197_43f6_820c_cdfafd7bcfe8.slice/crio-99e7a4178fa4e35b16f09acc3a4e4eaba0a53acfda61f4f373c844d439862901 WatchSource:0}: Error finding container 99e7a4178fa4e35b16f09acc3a4e4eaba0a53acfda61f4f373c844d439862901: Status 404 returned error can't find the container with id 99e7a4178fa4e35b16f09acc3a4e4eaba0a53acfda61f4f373c844d439862901 Nov 27 08:56:28 crc kubenswrapper[4971]: I1127 08:56:28.036679 4971 generic.go:334] "Generic (PLEG): container finished" podID="d92253a9-9aa5-4930-bc0c-de495914f7c1" containerID="640e546478bd7c57526f61c01b070a9fa4ac8fcaf26972eedce4831a59593436" exitCode=0 Nov 27 08:56:28 crc kubenswrapper[4971]: I1127 08:56:28.036749 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86bf89b475-tb5r5" event={"ID":"d92253a9-9aa5-4930-bc0c-de495914f7c1","Type":"ContainerDied","Data":"640e546478bd7c57526f61c01b070a9fa4ac8fcaf26972eedce4831a59593436"} Nov 27 08:56:28 crc kubenswrapper[4971]: I1127 08:56:28.037214 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86bf89b475-tb5r5" event={"ID":"d92253a9-9aa5-4930-bc0c-de495914f7c1","Type":"ContainerStarted","Data":"226012ccd9c70d044476255ffa0423c2f6b369db4415753b5a302b8cf0d1d041"} Nov 27 08:56:28 crc kubenswrapper[4971]: I1127 08:56:28.042813 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0d609d64-7197-43f6-820c-cdfafd7bcfe8","Type":"ContainerStarted","Data":"99e7a4178fa4e35b16f09acc3a4e4eaba0a53acfda61f4f373c844d439862901"} Nov 27 08:56:29 crc kubenswrapper[4971]: I1127 08:56:29.059251 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86bf89b475-tb5r5" event={"ID":"d92253a9-9aa5-4930-bc0c-de495914f7c1","Type":"ContainerStarted","Data":"3a4ecb10e9f79f8f865d8cf9452ba163b667d4a242a6806e00617e57ad36756c"} Nov 27 08:56:29 crc kubenswrapper[4971]: I1127 08:56:29.059582 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86bf89b475-tb5r5" Nov 27 08:56:29 crc kubenswrapper[4971]: I1127 08:56:29.063426 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0d609d64-7197-43f6-820c-cdfafd7bcfe8","Type":"ContainerStarted","Data":"fa836b024b0c8aaddf48007e345a15e9fb10f7606b0222c3bdeaea669bc620ab"} Nov 27 08:56:29 crc kubenswrapper[4971]: I1127 08:56:29.063458 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0d609d64-7197-43f6-820c-cdfafd7bcfe8","Type":"ContainerStarted","Data":"692e20e108bbc1595c1ecee5d8d9a890a48fb05164378b3dab73527d66185071"} Nov 27 08:56:29 crc kubenswrapper[4971]: I1127 08:56:29.063684 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Nov 27 08:56:29 crc kubenswrapper[4971]: I1127 08:56:29.085952 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-86bf89b475-tb5r5" podStartSLOduration=3.085915074 podStartE2EDuration="3.085915074s" podCreationTimestamp="2025-11-27 08:56:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:56:29.081519809 +0000 UTC m=+7427.273563737" watchObservedRunningTime="2025-11-27 08:56:29.085915074 +0000 UTC m=+7427.277958992" Nov 27 08:56:36 crc kubenswrapper[4971]: I1127 08:56:36.795722 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-86bf89b475-tb5r5" Nov 27 08:56:36 crc kubenswrapper[4971]: I1127 08:56:36.815882 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=10.815863656 podStartE2EDuration="10.815863656s" podCreationTimestamp="2025-11-27 08:56:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:56:29.107481912 +0000 UTC m=+7427.299525830" watchObservedRunningTime="2025-11-27 08:56:36.815863656 +0000 UTC m=+7435.007907574" Nov 27 08:56:36 crc kubenswrapper[4971]: I1127 08:56:36.872675 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5dfc79c745-rfx9v"] Nov 27 08:56:36 crc kubenswrapper[4971]: I1127 08:56:36.875511 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5dfc79c745-rfx9v" podUID="3dd49bba-d34d-4f52-9d67-b946a4c2dca3" containerName="dnsmasq-dns" containerID="cri-o://ff236db198e68dbd0100c5e2302337c1a494db52e0c9ba11d78715c9636f3e79" gracePeriod=10 Nov 27 08:56:37 crc kubenswrapper[4971]: I1127 08:56:37.167754 4971 generic.go:334] "Generic (PLEG): container finished" podID="3dd49bba-d34d-4f52-9d67-b946a4c2dca3" containerID="ff236db198e68dbd0100c5e2302337c1a494db52e0c9ba11d78715c9636f3e79" exitCode=0 Nov 27 08:56:37 crc kubenswrapper[4971]: I1127 08:56:37.167811 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5dfc79c745-rfx9v" event={"ID":"3dd49bba-d34d-4f52-9d67-b946a4c2dca3","Type":"ContainerDied","Data":"ff236db198e68dbd0100c5e2302337c1a494db52e0c9ba11d78715c9636f3e79"} Nov 27 08:56:37 crc kubenswrapper[4971]: I1127 08:56:37.459636 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5dfc79c745-rfx9v" Nov 27 08:56:37 crc kubenswrapper[4971]: I1127 08:56:37.508735 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3dd49bba-d34d-4f52-9d67-b946a4c2dca3-config\") pod \"3dd49bba-d34d-4f52-9d67-b946a4c2dca3\" (UID: \"3dd49bba-d34d-4f52-9d67-b946a4c2dca3\") " Nov 27 08:56:37 crc kubenswrapper[4971]: I1127 08:56:37.511099 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nt92v\" (UniqueName: \"kubernetes.io/projected/3dd49bba-d34d-4f52-9d67-b946a4c2dca3-kube-api-access-nt92v\") pod \"3dd49bba-d34d-4f52-9d67-b946a4c2dca3\" (UID: \"3dd49bba-d34d-4f52-9d67-b946a4c2dca3\") " Nov 27 08:56:37 crc kubenswrapper[4971]: I1127 08:56:37.511229 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3dd49bba-d34d-4f52-9d67-b946a4c2dca3-ovsdbserver-nb\") pod \"3dd49bba-d34d-4f52-9d67-b946a4c2dca3\" (UID: \"3dd49bba-d34d-4f52-9d67-b946a4c2dca3\") " Nov 27 08:56:37 crc kubenswrapper[4971]: I1127 08:56:37.511271 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3dd49bba-d34d-4f52-9d67-b946a4c2dca3-ovsdbserver-sb\") pod \"3dd49bba-d34d-4f52-9d67-b946a4c2dca3\" (UID: \"3dd49bba-d34d-4f52-9d67-b946a4c2dca3\") " Nov 27 08:56:37 crc kubenswrapper[4971]: I1127 08:56:37.511410 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3dd49bba-d34d-4f52-9d67-b946a4c2dca3-dns-svc\") pod \"3dd49bba-d34d-4f52-9d67-b946a4c2dca3\" (UID: \"3dd49bba-d34d-4f52-9d67-b946a4c2dca3\") " Nov 27 08:56:37 crc kubenswrapper[4971]: I1127 08:56:37.519902 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3dd49bba-d34d-4f52-9d67-b946a4c2dca3-kube-api-access-nt92v" (OuterVolumeSpecName: "kube-api-access-nt92v") pod "3dd49bba-d34d-4f52-9d67-b946a4c2dca3" (UID: "3dd49bba-d34d-4f52-9d67-b946a4c2dca3"). InnerVolumeSpecName "kube-api-access-nt92v". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:56:37 crc kubenswrapper[4971]: I1127 08:56:37.583479 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3dd49bba-d34d-4f52-9d67-b946a4c2dca3-config" (OuterVolumeSpecName: "config") pod "3dd49bba-d34d-4f52-9d67-b946a4c2dca3" (UID: "3dd49bba-d34d-4f52-9d67-b946a4c2dca3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:56:37 crc kubenswrapper[4971]: I1127 08:56:37.596175 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3dd49bba-d34d-4f52-9d67-b946a4c2dca3-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3dd49bba-d34d-4f52-9d67-b946a4c2dca3" (UID: "3dd49bba-d34d-4f52-9d67-b946a4c2dca3"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:56:37 crc kubenswrapper[4971]: I1127 08:56:37.597107 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3dd49bba-d34d-4f52-9d67-b946a4c2dca3-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3dd49bba-d34d-4f52-9d67-b946a4c2dca3" (UID: "3dd49bba-d34d-4f52-9d67-b946a4c2dca3"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:56:37 crc kubenswrapper[4971]: I1127 08:56:37.615507 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nt92v\" (UniqueName: \"kubernetes.io/projected/3dd49bba-d34d-4f52-9d67-b946a4c2dca3-kube-api-access-nt92v\") on node \"crc\" DevicePath \"\"" Nov 27 08:56:37 crc kubenswrapper[4971]: I1127 08:56:37.615614 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3dd49bba-d34d-4f52-9d67-b946a4c2dca3-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 27 08:56:37 crc kubenswrapper[4971]: I1127 08:56:37.615885 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3dd49bba-d34d-4f52-9d67-b946a4c2dca3-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 27 08:56:37 crc kubenswrapper[4971]: I1127 08:56:37.616437 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3dd49bba-d34d-4f52-9d67-b946a4c2dca3-config\") on node \"crc\" DevicePath \"\"" Nov 27 08:56:37 crc kubenswrapper[4971]: I1127 08:56:37.628938 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3dd49bba-d34d-4f52-9d67-b946a4c2dca3-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3dd49bba-d34d-4f52-9d67-b946a4c2dca3" (UID: "3dd49bba-d34d-4f52-9d67-b946a4c2dca3"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:56:37 crc kubenswrapper[4971]: I1127 08:56:37.718907 4971 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3dd49bba-d34d-4f52-9d67-b946a4c2dca3-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 27 08:56:38 crc kubenswrapper[4971]: I1127 08:56:38.180089 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5dfc79c745-rfx9v" event={"ID":"3dd49bba-d34d-4f52-9d67-b946a4c2dca3","Type":"ContainerDied","Data":"362388fa47c2f9e6c431bf432732c8b672b84562d8f536a4867714643da0d90a"} Nov 27 08:56:38 crc kubenswrapper[4971]: I1127 08:56:38.180158 4971 scope.go:117] "RemoveContainer" containerID="ff236db198e68dbd0100c5e2302337c1a494db52e0c9ba11d78715c9636f3e79" Nov 27 08:56:38 crc kubenswrapper[4971]: I1127 08:56:38.180242 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5dfc79c745-rfx9v" Nov 27 08:56:38 crc kubenswrapper[4971]: I1127 08:56:38.213574 4971 scope.go:117] "RemoveContainer" containerID="6b25d7d16bf2022100d04fc2dd53820ede96b5c36adc91afca9fc7a50a1d3e18" Nov 27 08:56:38 crc kubenswrapper[4971]: I1127 08:56:38.224689 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5dfc79c745-rfx9v"] Nov 27 08:56:38 crc kubenswrapper[4971]: I1127 08:56:38.257227 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5dfc79c745-rfx9v"] Nov 27 08:56:38 crc kubenswrapper[4971]: I1127 08:56:38.338395 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 27 08:56:38 crc kubenswrapper[4971]: I1127 08:56:38.338719 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="e7b90c66-3028-4daa-8df5-27c0ce1e10e6" containerName="nova-cell0-conductor-conductor" containerID="cri-o://e86c45d44a5760a15fe885ab091934392be1791d755e86910b677a716db7449b" gracePeriod=30 Nov 27 08:56:38 crc kubenswrapper[4971]: I1127 08:56:38.399322 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 27 08:56:38 crc kubenswrapper[4971]: I1127 08:56:38.399628 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="a96dcaa9-3e6d-44ea-a535-3864fa826d2c" containerName="nova-api-log" containerID="cri-o://b6d2a31d08d150a164b1452a51573733e32c939ca73b89cb0afe0eba8742b9fc" gracePeriod=30 Nov 27 08:56:38 crc kubenswrapper[4971]: I1127 08:56:38.399779 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="a96dcaa9-3e6d-44ea-a535-3864fa826d2c" containerName="nova-api-api" containerID="cri-o://0b5ee625010941a0a8680ea975925421402f231a7f759a98be193b60b2b9f054" gracePeriod=30 Nov 27 08:56:38 crc kubenswrapper[4971]: I1127 08:56:38.418452 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 08:56:38 crc kubenswrapper[4971]: I1127 08:56:38.419027 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="e8288baf-24dc-4fa9-9666-98ac1be3312b" containerName="nova-metadata-log" containerID="cri-o://0ba46ea69f37b27dcd1932a9f82ac7a1e654e126a0a73c730126480358a3e44a" gracePeriod=30 Nov 27 08:56:38 crc kubenswrapper[4971]: I1127 08:56:38.419142 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="e8288baf-24dc-4fa9-9666-98ac1be3312b" containerName="nova-metadata-metadata" containerID="cri-o://288824eb43528bd17e403543131e5673dcc7825b9dcebaeabb1361d48721f47d" gracePeriod=30 Nov 27 08:56:38 crc kubenswrapper[4971]: I1127 08:56:38.433482 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 08:56:38 crc kubenswrapper[4971]: I1127 08:56:38.433875 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="69d43348-4b62-4dc4-a920-2f702816dd3c" containerName="nova-scheduler-scheduler" containerID="cri-o://d1b8cee8569e919cc0a8a040dc0b054f0e8a5012d38019e1d96fe9168cbcc864" gracePeriod=30 Nov 27 08:56:38 crc kubenswrapper[4971]: I1127 08:56:38.444231 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 27 08:56:38 crc kubenswrapper[4971]: I1127 08:56:38.444562 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="3009b067-05e5-435b-af96-a3fd67beb32e" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://049af22b0c7d1906d48b6bef890039f31781624d2adf0c4753528ef38a67777f" gracePeriod=30 Nov 27 08:56:38 crc kubenswrapper[4971]: I1127 08:56:38.562486 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3dd49bba-d34d-4f52-9d67-b946a4c2dca3" path="/var/lib/kubelet/pods/3dd49bba-d34d-4f52-9d67-b946a4c2dca3/volumes" Nov 27 08:56:39 crc kubenswrapper[4971]: I1127 08:56:39.150850 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Nov 27 08:56:39 crc kubenswrapper[4971]: I1127 08:56:39.222464 4971 generic.go:334] "Generic (PLEG): container finished" podID="e8288baf-24dc-4fa9-9666-98ac1be3312b" containerID="0ba46ea69f37b27dcd1932a9f82ac7a1e654e126a0a73c730126480358a3e44a" exitCode=143 Nov 27 08:56:39 crc kubenswrapper[4971]: I1127 08:56:39.222723 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e8288baf-24dc-4fa9-9666-98ac1be3312b","Type":"ContainerDied","Data":"0ba46ea69f37b27dcd1932a9f82ac7a1e654e126a0a73c730126480358a3e44a"} Nov 27 08:56:39 crc kubenswrapper[4971]: I1127 08:56:39.230218 4971 generic.go:334] "Generic (PLEG): container finished" podID="a96dcaa9-3e6d-44ea-a535-3864fa826d2c" containerID="b6d2a31d08d150a164b1452a51573733e32c939ca73b89cb0afe0eba8742b9fc" exitCode=143 Nov 27 08:56:39 crc kubenswrapper[4971]: I1127 08:56:39.230298 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a96dcaa9-3e6d-44ea-a535-3864fa826d2c","Type":"ContainerDied","Data":"b6d2a31d08d150a164b1452a51573733e32c939ca73b89cb0afe0eba8742b9fc"} Nov 27 08:56:39 crc kubenswrapper[4971]: I1127 08:56:39.232751 4971 generic.go:334] "Generic (PLEG): container finished" podID="3009b067-05e5-435b-af96-a3fd67beb32e" containerID="049af22b0c7d1906d48b6bef890039f31781624d2adf0c4753528ef38a67777f" exitCode=0 Nov 27 08:56:39 crc kubenswrapper[4971]: I1127 08:56:39.232810 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"3009b067-05e5-435b-af96-a3fd67beb32e","Type":"ContainerDied","Data":"049af22b0c7d1906d48b6bef890039f31781624d2adf0c4753528ef38a67777f"} Nov 27 08:56:39 crc kubenswrapper[4971]: E1127 08:56:39.388672 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e86c45d44a5760a15fe885ab091934392be1791d755e86910b677a716db7449b" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 27 08:56:39 crc kubenswrapper[4971]: E1127 08:56:39.390823 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e86c45d44a5760a15fe885ab091934392be1791d755e86910b677a716db7449b" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 27 08:56:39 crc kubenswrapper[4971]: E1127 08:56:39.392478 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e86c45d44a5760a15fe885ab091934392be1791d755e86910b677a716db7449b" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 27 08:56:39 crc kubenswrapper[4971]: E1127 08:56:39.392526 4971 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="e7b90c66-3028-4daa-8df5-27c0ce1e10e6" containerName="nova-cell0-conductor-conductor" Nov 27 08:56:39 crc kubenswrapper[4971]: I1127 08:56:39.554322 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 27 08:56:39 crc kubenswrapper[4971]: I1127 08:56:39.667791 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3009b067-05e5-435b-af96-a3fd67beb32e-config-data\") pod \"3009b067-05e5-435b-af96-a3fd67beb32e\" (UID: \"3009b067-05e5-435b-af96-a3fd67beb32e\") " Nov 27 08:56:39 crc kubenswrapper[4971]: I1127 08:56:39.668100 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3009b067-05e5-435b-af96-a3fd67beb32e-combined-ca-bundle\") pod \"3009b067-05e5-435b-af96-a3fd67beb32e\" (UID: \"3009b067-05e5-435b-af96-a3fd67beb32e\") " Nov 27 08:56:39 crc kubenswrapper[4971]: I1127 08:56:39.668188 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5pcdh\" (UniqueName: \"kubernetes.io/projected/3009b067-05e5-435b-af96-a3fd67beb32e-kube-api-access-5pcdh\") pod \"3009b067-05e5-435b-af96-a3fd67beb32e\" (UID: \"3009b067-05e5-435b-af96-a3fd67beb32e\") " Nov 27 08:56:39 crc kubenswrapper[4971]: I1127 08:56:39.675066 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3009b067-05e5-435b-af96-a3fd67beb32e-kube-api-access-5pcdh" (OuterVolumeSpecName: "kube-api-access-5pcdh") pod "3009b067-05e5-435b-af96-a3fd67beb32e" (UID: "3009b067-05e5-435b-af96-a3fd67beb32e"). InnerVolumeSpecName "kube-api-access-5pcdh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:56:39 crc kubenswrapper[4971]: I1127 08:56:39.695920 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3009b067-05e5-435b-af96-a3fd67beb32e-config-data" (OuterVolumeSpecName: "config-data") pod "3009b067-05e5-435b-af96-a3fd67beb32e" (UID: "3009b067-05e5-435b-af96-a3fd67beb32e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:56:39 crc kubenswrapper[4971]: I1127 08:56:39.698418 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3009b067-05e5-435b-af96-a3fd67beb32e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3009b067-05e5-435b-af96-a3fd67beb32e" (UID: "3009b067-05e5-435b-af96-a3fd67beb32e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:56:39 crc kubenswrapper[4971]: I1127 08:56:39.772434 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3009b067-05e5-435b-af96-a3fd67beb32e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 08:56:39 crc kubenswrapper[4971]: I1127 08:56:39.772796 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5pcdh\" (UniqueName: \"kubernetes.io/projected/3009b067-05e5-435b-af96-a3fd67beb32e-kube-api-access-5pcdh\") on node \"crc\" DevicePath \"\"" Nov 27 08:56:39 crc kubenswrapper[4971]: I1127 08:56:39.772908 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3009b067-05e5-435b-af96-a3fd67beb32e-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 08:56:40 crc kubenswrapper[4971]: I1127 08:56:40.258782 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"3009b067-05e5-435b-af96-a3fd67beb32e","Type":"ContainerDied","Data":"794799ee64e08f4b73c5c5d48704acb8349a7bbe50fd4f2dc87fd90ddc289ec7"} Nov 27 08:56:40 crc kubenswrapper[4971]: I1127 08:56:40.258867 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 27 08:56:40 crc kubenswrapper[4971]: I1127 08:56:40.258876 4971 scope.go:117] "RemoveContainer" containerID="049af22b0c7d1906d48b6bef890039f31781624d2adf0c4753528ef38a67777f" Nov 27 08:56:40 crc kubenswrapper[4971]: I1127 08:56:40.332746 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 27 08:56:40 crc kubenswrapper[4971]: I1127 08:56:40.333120 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 27 08:56:40 crc kubenswrapper[4971]: I1127 08:56:40.346984 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 27 08:56:40 crc kubenswrapper[4971]: E1127 08:56:40.347398 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dd49bba-d34d-4f52-9d67-b946a4c2dca3" containerName="dnsmasq-dns" Nov 27 08:56:40 crc kubenswrapper[4971]: I1127 08:56:40.347411 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dd49bba-d34d-4f52-9d67-b946a4c2dca3" containerName="dnsmasq-dns" Nov 27 08:56:40 crc kubenswrapper[4971]: E1127 08:56:40.347432 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dd49bba-d34d-4f52-9d67-b946a4c2dca3" containerName="init" Nov 27 08:56:40 crc kubenswrapper[4971]: I1127 08:56:40.347441 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dd49bba-d34d-4f52-9d67-b946a4c2dca3" containerName="init" Nov 27 08:56:40 crc kubenswrapper[4971]: E1127 08:56:40.347471 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3009b067-05e5-435b-af96-a3fd67beb32e" containerName="nova-cell1-novncproxy-novncproxy" Nov 27 08:56:40 crc kubenswrapper[4971]: I1127 08:56:40.347478 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="3009b067-05e5-435b-af96-a3fd67beb32e" containerName="nova-cell1-novncproxy-novncproxy" Nov 27 08:56:40 crc kubenswrapper[4971]: I1127 08:56:40.348277 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="3009b067-05e5-435b-af96-a3fd67beb32e" containerName="nova-cell1-novncproxy-novncproxy" Nov 27 08:56:40 crc kubenswrapper[4971]: I1127 08:56:40.348307 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="3dd49bba-d34d-4f52-9d67-b946a4c2dca3" containerName="dnsmasq-dns" Nov 27 08:56:40 crc kubenswrapper[4971]: I1127 08:56:40.348893 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 27 08:56:40 crc kubenswrapper[4971]: I1127 08:56:40.348987 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 27 08:56:40 crc kubenswrapper[4971]: I1127 08:56:40.354431 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Nov 27 08:56:40 crc kubenswrapper[4971]: I1127 08:56:40.385871 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43d941be-9782-4b3b-b9ff-b01aa66e612b-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"43d941be-9782-4b3b-b9ff-b01aa66e612b\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 08:56:40 crc kubenswrapper[4971]: I1127 08:56:40.386146 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43d941be-9782-4b3b-b9ff-b01aa66e612b-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"43d941be-9782-4b3b-b9ff-b01aa66e612b\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 08:56:40 crc kubenswrapper[4971]: I1127 08:56:40.386280 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2slx\" (UniqueName: \"kubernetes.io/projected/43d941be-9782-4b3b-b9ff-b01aa66e612b-kube-api-access-f2slx\") pod \"nova-cell1-novncproxy-0\" (UID: \"43d941be-9782-4b3b-b9ff-b01aa66e612b\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 08:56:40 crc kubenswrapper[4971]: I1127 08:56:40.488400 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43d941be-9782-4b3b-b9ff-b01aa66e612b-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"43d941be-9782-4b3b-b9ff-b01aa66e612b\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 08:56:40 crc kubenswrapper[4971]: I1127 08:56:40.488498 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2slx\" (UniqueName: \"kubernetes.io/projected/43d941be-9782-4b3b-b9ff-b01aa66e612b-kube-api-access-f2slx\") pod \"nova-cell1-novncproxy-0\" (UID: \"43d941be-9782-4b3b-b9ff-b01aa66e612b\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 08:56:40 crc kubenswrapper[4971]: I1127 08:56:40.488608 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43d941be-9782-4b3b-b9ff-b01aa66e612b-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"43d941be-9782-4b3b-b9ff-b01aa66e612b\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 08:56:40 crc kubenswrapper[4971]: I1127 08:56:40.493184 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43d941be-9782-4b3b-b9ff-b01aa66e612b-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"43d941be-9782-4b3b-b9ff-b01aa66e612b\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 08:56:40 crc kubenswrapper[4971]: I1127 08:56:40.498093 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43d941be-9782-4b3b-b9ff-b01aa66e612b-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"43d941be-9782-4b3b-b9ff-b01aa66e612b\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 08:56:40 crc kubenswrapper[4971]: I1127 08:56:40.512347 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2slx\" (UniqueName: \"kubernetes.io/projected/43d941be-9782-4b3b-b9ff-b01aa66e612b-kube-api-access-f2slx\") pod \"nova-cell1-novncproxy-0\" (UID: \"43d941be-9782-4b3b-b9ff-b01aa66e612b\") " pod="openstack/nova-cell1-novncproxy-0" Nov 27 08:56:40 crc kubenswrapper[4971]: I1127 08:56:40.566774 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3009b067-05e5-435b-af96-a3fd67beb32e" path="/var/lib/kubelet/pods/3009b067-05e5-435b-af96-a3fd67beb32e/volumes" Nov 27 08:56:40 crc kubenswrapper[4971]: I1127 08:56:40.683583 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 27 08:56:41 crc kubenswrapper[4971]: W1127 08:56:41.184858 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod43d941be_9782_4b3b_b9ff_b01aa66e612b.slice/crio-bf11c36c5bf6fb5950adabbbda1670cca538b13141df369370b6b4d441e968e2 WatchSource:0}: Error finding container bf11c36c5bf6fb5950adabbbda1670cca538b13141df369370b6b4d441e968e2: Status 404 returned error can't find the container with id bf11c36c5bf6fb5950adabbbda1670cca538b13141df369370b6b4d441e968e2 Nov 27 08:56:41 crc kubenswrapper[4971]: I1127 08:56:41.186775 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 27 08:56:41 crc kubenswrapper[4971]: I1127 08:56:41.274171 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"43d941be-9782-4b3b-b9ff-b01aa66e612b","Type":"ContainerStarted","Data":"bf11c36c5bf6fb5950adabbbda1670cca538b13141df369370b6b4d441e968e2"} Nov 27 08:56:41 crc kubenswrapper[4971]: I1127 08:56:41.656212 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 27 08:56:41 crc kubenswrapper[4971]: I1127 08:56:41.656778 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="c119bf30-716a-491e-ae7a-225b2bdf42fb" containerName="nova-cell1-conductor-conductor" containerID="cri-o://7c13bb87dc0a13e3e813b43c0aaf1db573a4ae5141fd66eef87b70d22e57de29" gracePeriod=30 Nov 27 08:56:42 crc kubenswrapper[4971]: I1127 08:56:42.299946 4971 generic.go:334] "Generic (PLEG): container finished" podID="69d43348-4b62-4dc4-a920-2f702816dd3c" containerID="d1b8cee8569e919cc0a8a040dc0b054f0e8a5012d38019e1d96fe9168cbcc864" exitCode=0 Nov 27 08:56:42 crc kubenswrapper[4971]: I1127 08:56:42.300063 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"69d43348-4b62-4dc4-a920-2f702816dd3c","Type":"ContainerDied","Data":"d1b8cee8569e919cc0a8a040dc0b054f0e8a5012d38019e1d96fe9168cbcc864"} Nov 27 08:56:42 crc kubenswrapper[4971]: I1127 08:56:42.303608 4971 generic.go:334] "Generic (PLEG): container finished" podID="e8288baf-24dc-4fa9-9666-98ac1be3312b" containerID="288824eb43528bd17e403543131e5673dcc7825b9dcebaeabb1361d48721f47d" exitCode=0 Nov 27 08:56:42 crc kubenswrapper[4971]: I1127 08:56:42.303684 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e8288baf-24dc-4fa9-9666-98ac1be3312b","Type":"ContainerDied","Data":"288824eb43528bd17e403543131e5673dcc7825b9dcebaeabb1361d48721f47d"} Nov 27 08:56:42 crc kubenswrapper[4971]: I1127 08:56:42.306002 4971 generic.go:334] "Generic (PLEG): container finished" podID="a96dcaa9-3e6d-44ea-a535-3864fa826d2c" containerID="0b5ee625010941a0a8680ea975925421402f231a7f759a98be193b60b2b9f054" exitCode=0 Nov 27 08:56:42 crc kubenswrapper[4971]: I1127 08:56:42.306032 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a96dcaa9-3e6d-44ea-a535-3864fa826d2c","Type":"ContainerDied","Data":"0b5ee625010941a0a8680ea975925421402f231a7f759a98be193b60b2b9f054"} Nov 27 08:56:42 crc kubenswrapper[4971]: E1127 08:56:42.512178 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d1b8cee8569e919cc0a8a040dc0b054f0e8a5012d38019e1d96fe9168cbcc864 is running failed: container process not found" containerID="d1b8cee8569e919cc0a8a040dc0b054f0e8a5012d38019e1d96fe9168cbcc864" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 27 08:56:42 crc kubenswrapper[4971]: E1127 08:56:42.512631 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d1b8cee8569e919cc0a8a040dc0b054f0e8a5012d38019e1d96fe9168cbcc864 is running failed: container process not found" containerID="d1b8cee8569e919cc0a8a040dc0b054f0e8a5012d38019e1d96fe9168cbcc864" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 27 08:56:42 crc kubenswrapper[4971]: E1127 08:56:42.512901 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d1b8cee8569e919cc0a8a040dc0b054f0e8a5012d38019e1d96fe9168cbcc864 is running failed: container process not found" containerID="d1b8cee8569e919cc0a8a040dc0b054f0e8a5012d38019e1d96fe9168cbcc864" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 27 08:56:42 crc kubenswrapper[4971]: E1127 08:56:42.512949 4971 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d1b8cee8569e919cc0a8a040dc0b054f0e8a5012d38019e1d96fe9168cbcc864 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="69d43348-4b62-4dc4-a920-2f702816dd3c" containerName="nova-scheduler-scheduler" Nov 27 08:56:42 crc kubenswrapper[4971]: I1127 08:56:42.866365 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="e8288baf-24dc-4fa9-9666-98ac1be3312b" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.85:8775/\": dial tcp 10.217.1.85:8775: connect: connection refused" Nov 27 08:56:42 crc kubenswrapper[4971]: I1127 08:56:42.866956 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="e8288baf-24dc-4fa9-9666-98ac1be3312b" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.85:8775/\": dial tcp 10.217.1.85:8775: connect: connection refused" Nov 27 08:56:43 crc kubenswrapper[4971]: I1127 08:56:43.325309 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"43d941be-9782-4b3b-b9ff-b01aa66e612b","Type":"ContainerStarted","Data":"04ad42cccc6700437ae99e5fa00afe9641911fcf130b673e0d8fd52f9180b3fd"} Nov 27 08:56:43 crc kubenswrapper[4971]: I1127 08:56:43.353413 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=3.353393614 podStartE2EDuration="3.353393614s" podCreationTimestamp="2025-11-27 08:56:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:56:43.344750326 +0000 UTC m=+7441.536794254" watchObservedRunningTime="2025-11-27 08:56:43.353393614 +0000 UTC m=+7441.545437532" Nov 27 08:56:43 crc kubenswrapper[4971]: I1127 08:56:43.613969 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 27 08:56:43 crc kubenswrapper[4971]: I1127 08:56:43.700036 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69d43348-4b62-4dc4-a920-2f702816dd3c-config-data\") pod \"69d43348-4b62-4dc4-a920-2f702816dd3c\" (UID: \"69d43348-4b62-4dc4-a920-2f702816dd3c\") " Nov 27 08:56:43 crc kubenswrapper[4971]: I1127 08:56:43.700282 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69d43348-4b62-4dc4-a920-2f702816dd3c-combined-ca-bundle\") pod \"69d43348-4b62-4dc4-a920-2f702816dd3c\" (UID: \"69d43348-4b62-4dc4-a920-2f702816dd3c\") " Nov 27 08:56:43 crc kubenswrapper[4971]: I1127 08:56:43.700493 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mwvn4\" (UniqueName: \"kubernetes.io/projected/69d43348-4b62-4dc4-a920-2f702816dd3c-kube-api-access-mwvn4\") pod \"69d43348-4b62-4dc4-a920-2f702816dd3c\" (UID: \"69d43348-4b62-4dc4-a920-2f702816dd3c\") " Nov 27 08:56:43 crc kubenswrapper[4971]: I1127 08:56:43.707607 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69d43348-4b62-4dc4-a920-2f702816dd3c-kube-api-access-mwvn4" (OuterVolumeSpecName: "kube-api-access-mwvn4") pod "69d43348-4b62-4dc4-a920-2f702816dd3c" (UID: "69d43348-4b62-4dc4-a920-2f702816dd3c"). InnerVolumeSpecName "kube-api-access-mwvn4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:56:43 crc kubenswrapper[4971]: E1127 08:56:43.739701 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7c13bb87dc0a13e3e813b43c0aaf1db573a4ae5141fd66eef87b70d22e57de29" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 27 08:56:43 crc kubenswrapper[4971]: I1127 08:56:43.740789 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69d43348-4b62-4dc4-a920-2f702816dd3c-config-data" (OuterVolumeSpecName: "config-data") pod "69d43348-4b62-4dc4-a920-2f702816dd3c" (UID: "69d43348-4b62-4dc4-a920-2f702816dd3c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:56:43 crc kubenswrapper[4971]: E1127 08:56:43.744517 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7c13bb87dc0a13e3e813b43c0aaf1db573a4ae5141fd66eef87b70d22e57de29" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 27 08:56:43 crc kubenswrapper[4971]: I1127 08:56:43.751800 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69d43348-4b62-4dc4-a920-2f702816dd3c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "69d43348-4b62-4dc4-a920-2f702816dd3c" (UID: "69d43348-4b62-4dc4-a920-2f702816dd3c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:56:43 crc kubenswrapper[4971]: E1127 08:56:43.754756 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7c13bb87dc0a13e3e813b43c0aaf1db573a4ae5141fd66eef87b70d22e57de29" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 27 08:56:43 crc kubenswrapper[4971]: E1127 08:56:43.754852 4971 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="c119bf30-716a-491e-ae7a-225b2bdf42fb" containerName="nova-cell1-conductor-conductor" Nov 27 08:56:43 crc kubenswrapper[4971]: I1127 08:56:43.803332 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mwvn4\" (UniqueName: \"kubernetes.io/projected/69d43348-4b62-4dc4-a920-2f702816dd3c-kube-api-access-mwvn4\") on node \"crc\" DevicePath \"\"" Nov 27 08:56:43 crc kubenswrapper[4971]: I1127 08:56:43.803373 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69d43348-4b62-4dc4-a920-2f702816dd3c-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 08:56:43 crc kubenswrapper[4971]: I1127 08:56:43.803385 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69d43348-4b62-4dc4-a920-2f702816dd3c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 08:56:43 crc kubenswrapper[4971]: I1127 08:56:43.890736 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 27 08:56:43 crc kubenswrapper[4971]: I1127 08:56:43.900558 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.016025 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8288baf-24dc-4fa9-9666-98ac1be3312b-config-data\") pod \"e8288baf-24dc-4fa9-9666-98ac1be3312b\" (UID: \"e8288baf-24dc-4fa9-9666-98ac1be3312b\") " Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.016163 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a96dcaa9-3e6d-44ea-a535-3864fa826d2c-combined-ca-bundle\") pod \"a96dcaa9-3e6d-44ea-a535-3864fa826d2c\" (UID: \"a96dcaa9-3e6d-44ea-a535-3864fa826d2c\") " Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.016301 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hwsfp\" (UniqueName: \"kubernetes.io/projected/a96dcaa9-3e6d-44ea-a535-3864fa826d2c-kube-api-access-hwsfp\") pod \"a96dcaa9-3e6d-44ea-a535-3864fa826d2c\" (UID: \"a96dcaa9-3e6d-44ea-a535-3864fa826d2c\") " Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.016381 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a96dcaa9-3e6d-44ea-a535-3864fa826d2c-config-data\") pod \"a96dcaa9-3e6d-44ea-a535-3864fa826d2c\" (UID: \"a96dcaa9-3e6d-44ea-a535-3864fa826d2c\") " Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.016581 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a96dcaa9-3e6d-44ea-a535-3864fa826d2c-logs\") pod \"a96dcaa9-3e6d-44ea-a535-3864fa826d2c\" (UID: \"a96dcaa9-3e6d-44ea-a535-3864fa826d2c\") " Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.016667 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e8288baf-24dc-4fa9-9666-98ac1be3312b-logs\") pod \"e8288baf-24dc-4fa9-9666-98ac1be3312b\" (UID: \"e8288baf-24dc-4fa9-9666-98ac1be3312b\") " Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.016724 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8288baf-24dc-4fa9-9666-98ac1be3312b-combined-ca-bundle\") pod \"e8288baf-24dc-4fa9-9666-98ac1be3312b\" (UID: \"e8288baf-24dc-4fa9-9666-98ac1be3312b\") " Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.016794 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8dbbn\" (UniqueName: \"kubernetes.io/projected/e8288baf-24dc-4fa9-9666-98ac1be3312b-kube-api-access-8dbbn\") pod \"e8288baf-24dc-4fa9-9666-98ac1be3312b\" (UID: \"e8288baf-24dc-4fa9-9666-98ac1be3312b\") " Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.018229 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a96dcaa9-3e6d-44ea-a535-3864fa826d2c-logs" (OuterVolumeSpecName: "logs") pod "a96dcaa9-3e6d-44ea-a535-3864fa826d2c" (UID: "a96dcaa9-3e6d-44ea-a535-3864fa826d2c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.019037 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e8288baf-24dc-4fa9-9666-98ac1be3312b-logs" (OuterVolumeSpecName: "logs") pod "e8288baf-24dc-4fa9-9666-98ac1be3312b" (UID: "e8288baf-24dc-4fa9-9666-98ac1be3312b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.027394 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8288baf-24dc-4fa9-9666-98ac1be3312b-kube-api-access-8dbbn" (OuterVolumeSpecName: "kube-api-access-8dbbn") pod "e8288baf-24dc-4fa9-9666-98ac1be3312b" (UID: "e8288baf-24dc-4fa9-9666-98ac1be3312b"). InnerVolumeSpecName "kube-api-access-8dbbn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.035177 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a96dcaa9-3e6d-44ea-a535-3864fa826d2c-kube-api-access-hwsfp" (OuterVolumeSpecName: "kube-api-access-hwsfp") pod "a96dcaa9-3e6d-44ea-a535-3864fa826d2c" (UID: "a96dcaa9-3e6d-44ea-a535-3864fa826d2c"). InnerVolumeSpecName "kube-api-access-hwsfp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.081448 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8288baf-24dc-4fa9-9666-98ac1be3312b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e8288baf-24dc-4fa9-9666-98ac1be3312b" (UID: "e8288baf-24dc-4fa9-9666-98ac1be3312b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.087064 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8288baf-24dc-4fa9-9666-98ac1be3312b-config-data" (OuterVolumeSpecName: "config-data") pod "e8288baf-24dc-4fa9-9666-98ac1be3312b" (UID: "e8288baf-24dc-4fa9-9666-98ac1be3312b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.100194 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a96dcaa9-3e6d-44ea-a535-3864fa826d2c-config-data" (OuterVolumeSpecName: "config-data") pod "a96dcaa9-3e6d-44ea-a535-3864fa826d2c" (UID: "a96dcaa9-3e6d-44ea-a535-3864fa826d2c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.120025 4971 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a96dcaa9-3e6d-44ea-a535-3864fa826d2c-logs\") on node \"crc\" DevicePath \"\"" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.120061 4971 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e8288baf-24dc-4fa9-9666-98ac1be3312b-logs\") on node \"crc\" DevicePath \"\"" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.120074 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8288baf-24dc-4fa9-9666-98ac1be3312b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.120091 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8dbbn\" (UniqueName: \"kubernetes.io/projected/e8288baf-24dc-4fa9-9666-98ac1be3312b-kube-api-access-8dbbn\") on node \"crc\" DevicePath \"\"" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.120103 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8288baf-24dc-4fa9-9666-98ac1be3312b-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.120114 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hwsfp\" (UniqueName: \"kubernetes.io/projected/a96dcaa9-3e6d-44ea-a535-3864fa826d2c-kube-api-access-hwsfp\") on node \"crc\" DevicePath \"\"" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.120123 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a96dcaa9-3e6d-44ea-a535-3864fa826d2c-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.123750 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a96dcaa9-3e6d-44ea-a535-3864fa826d2c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a96dcaa9-3e6d-44ea-a535-3864fa826d2c" (UID: "a96dcaa9-3e6d-44ea-a535-3864fa826d2c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.222079 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a96dcaa9-3e6d-44ea-a535-3864fa826d2c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.272217 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.407107 4971 generic.go:334] "Generic (PLEG): container finished" podID="e7b90c66-3028-4daa-8df5-27c0ce1e10e6" containerID="e86c45d44a5760a15fe885ab091934392be1791d755e86910b677a716db7449b" exitCode=0 Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.407195 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"e7b90c66-3028-4daa-8df5-27c0ce1e10e6","Type":"ContainerDied","Data":"e86c45d44a5760a15fe885ab091934392be1791d755e86910b677a716db7449b"} Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.407249 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"e7b90c66-3028-4daa-8df5-27c0ce1e10e6","Type":"ContainerDied","Data":"3b04dda16fba15a0fe67cf18cabdc7c209c54872b4b0d9a90cfb8297fa07d6af"} Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.407179 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.407275 4971 scope.go:117] "RemoveContainer" containerID="e86c45d44a5760a15fe885ab091934392be1791d755e86910b677a716db7449b" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.419267 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.420990 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"69d43348-4b62-4dc4-a920-2f702816dd3c","Type":"ContainerDied","Data":"d7bb9fbf2ea777e17a2002f01640d4f404ade7057fd73f642b4b5c9e69503386"} Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.427869 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e8288baf-24dc-4fa9-9666-98ac1be3312b","Type":"ContainerDied","Data":"06b9fc5963aeae0f2e702aca8b47a131b11340928ffeb54d7d2ede579769bc8f"} Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.427970 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.431514 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7b90c66-3028-4daa-8df5-27c0ce1e10e6-combined-ca-bundle\") pod \"e7b90c66-3028-4daa-8df5-27c0ce1e10e6\" (UID: \"e7b90c66-3028-4daa-8df5-27c0ce1e10e6\") " Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.431626 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7b90c66-3028-4daa-8df5-27c0ce1e10e6-config-data\") pod \"e7b90c66-3028-4daa-8df5-27c0ce1e10e6\" (UID: \"e7b90c66-3028-4daa-8df5-27c0ce1e10e6\") " Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.431697 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4skxh\" (UniqueName: \"kubernetes.io/projected/e7b90c66-3028-4daa-8df5-27c0ce1e10e6-kube-api-access-4skxh\") pod \"e7b90c66-3028-4daa-8df5-27c0ce1e10e6\" (UID: \"e7b90c66-3028-4daa-8df5-27c0ce1e10e6\") " Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.439191 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7b90c66-3028-4daa-8df5-27c0ce1e10e6-kube-api-access-4skxh" (OuterVolumeSpecName: "kube-api-access-4skxh") pod "e7b90c66-3028-4daa-8df5-27c0ce1e10e6" (UID: "e7b90c66-3028-4daa-8df5-27c0ce1e10e6"). InnerVolumeSpecName "kube-api-access-4skxh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.455414 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a96dcaa9-3e6d-44ea-a535-3864fa826d2c","Type":"ContainerDied","Data":"7f51571f556dc49bce216d14aee811f1fe68822e9064e374537f9d87e81dc43b"} Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.455524 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.467100 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7b90c66-3028-4daa-8df5-27c0ce1e10e6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e7b90c66-3028-4daa-8df5-27c0ce1e10e6" (UID: "e7b90c66-3028-4daa-8df5-27c0ce1e10e6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.468077 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7b90c66-3028-4daa-8df5-27c0ce1e10e6-config-data" (OuterVolumeSpecName: "config-data") pod "e7b90c66-3028-4daa-8df5-27c0ce1e10e6" (UID: "e7b90c66-3028-4daa-8df5-27c0ce1e10e6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.469751 4971 generic.go:334] "Generic (PLEG): container finished" podID="c119bf30-716a-491e-ae7a-225b2bdf42fb" containerID="7c13bb87dc0a13e3e813b43c0aaf1db573a4ae5141fd66eef87b70d22e57de29" exitCode=0 Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.469837 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"c119bf30-716a-491e-ae7a-225b2bdf42fb","Type":"ContainerDied","Data":"7c13bb87dc0a13e3e813b43c0aaf1db573a4ae5141fd66eef87b70d22e57de29"} Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.479561 4971 scope.go:117] "RemoveContainer" containerID="e86c45d44a5760a15fe885ab091934392be1791d755e86910b677a716db7449b" Nov 27 08:56:44 crc kubenswrapper[4971]: E1127 08:56:44.481789 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e86c45d44a5760a15fe885ab091934392be1791d755e86910b677a716db7449b\": container with ID starting with e86c45d44a5760a15fe885ab091934392be1791d755e86910b677a716db7449b not found: ID does not exist" containerID="e86c45d44a5760a15fe885ab091934392be1791d755e86910b677a716db7449b" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.481856 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e86c45d44a5760a15fe885ab091934392be1791d755e86910b677a716db7449b"} err="failed to get container status \"e86c45d44a5760a15fe885ab091934392be1791d755e86910b677a716db7449b\": rpc error: code = NotFound desc = could not find container \"e86c45d44a5760a15fe885ab091934392be1791d755e86910b677a716db7449b\": container with ID starting with e86c45d44a5760a15fe885ab091934392be1791d755e86910b677a716db7449b not found: ID does not exist" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.481888 4971 scope.go:117] "RemoveContainer" containerID="d1b8cee8569e919cc0a8a040dc0b054f0e8a5012d38019e1d96fe9168cbcc864" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.533958 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7b90c66-3028-4daa-8df5-27c0ce1e10e6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.534023 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7b90c66-3028-4daa-8df5-27c0ce1e10e6-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.534038 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4skxh\" (UniqueName: \"kubernetes.io/projected/e7b90c66-3028-4daa-8df5-27c0ce1e10e6-kube-api-access-4skxh\") on node \"crc\" DevicePath \"\"" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.659714 4971 scope.go:117] "RemoveContainer" containerID="288824eb43528bd17e403543131e5673dcc7825b9dcebaeabb1361d48721f47d" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.661370 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.673630 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.723636 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.735607 4971 scope.go:117] "RemoveContainer" containerID="0ba46ea69f37b27dcd1932a9f82ac7a1e654e126a0a73c730126480358a3e44a" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.737185 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.763234 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 27 08:56:44 crc kubenswrapper[4971]: E1127 08:56:44.763864 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69d43348-4b62-4dc4-a920-2f702816dd3c" containerName="nova-scheduler-scheduler" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.763888 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="69d43348-4b62-4dc4-a920-2f702816dd3c" containerName="nova-scheduler-scheduler" Nov 27 08:56:44 crc kubenswrapper[4971]: E1127 08:56:44.763919 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a96dcaa9-3e6d-44ea-a535-3864fa826d2c" containerName="nova-api-log" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.763931 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="a96dcaa9-3e6d-44ea-a535-3864fa826d2c" containerName="nova-api-log" Nov 27 08:56:44 crc kubenswrapper[4971]: E1127 08:56:44.763944 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8288baf-24dc-4fa9-9666-98ac1be3312b" containerName="nova-metadata-metadata" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.763953 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8288baf-24dc-4fa9-9666-98ac1be3312b" containerName="nova-metadata-metadata" Nov 27 08:56:44 crc kubenswrapper[4971]: E1127 08:56:44.763972 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7b90c66-3028-4daa-8df5-27c0ce1e10e6" containerName="nova-cell0-conductor-conductor" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.763982 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7b90c66-3028-4daa-8df5-27c0ce1e10e6" containerName="nova-cell0-conductor-conductor" Nov 27 08:56:44 crc kubenswrapper[4971]: E1127 08:56:44.763997 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8288baf-24dc-4fa9-9666-98ac1be3312b" containerName="nova-metadata-log" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.764005 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8288baf-24dc-4fa9-9666-98ac1be3312b" containerName="nova-metadata-log" Nov 27 08:56:44 crc kubenswrapper[4971]: E1127 08:56:44.764018 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a96dcaa9-3e6d-44ea-a535-3864fa826d2c" containerName="nova-api-api" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.764026 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="a96dcaa9-3e6d-44ea-a535-3864fa826d2c" containerName="nova-api-api" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.764256 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7b90c66-3028-4daa-8df5-27c0ce1e10e6" containerName="nova-cell0-conductor-conductor" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.764275 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="a96dcaa9-3e6d-44ea-a535-3864fa826d2c" containerName="nova-api-log" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.764293 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8288baf-24dc-4fa9-9666-98ac1be3312b" containerName="nova-metadata-log" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.764306 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8288baf-24dc-4fa9-9666-98ac1be3312b" containerName="nova-metadata-metadata" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.764321 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="69d43348-4b62-4dc4-a920-2f702816dd3c" containerName="nova-scheduler-scheduler" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.764346 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="a96dcaa9-3e6d-44ea-a535-3864fa826d2c" containerName="nova-api-api" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.765898 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.773967 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.782109 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.816424 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.816965 4971 scope.go:117] "RemoveContainer" containerID="0b5ee625010941a0a8680ea975925421402f231a7f759a98be193b60b2b9f054" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.826551 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.830571 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.843071 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.856637 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.865240 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.870517 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/55562639-e816-4faf-8c2b-7124cd156a85-logs\") pod \"nova-metadata-0\" (UID: \"55562639-e816-4faf-8c2b-7124cd156a85\") " pod="openstack/nova-metadata-0" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.870577 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55562639-e816-4faf-8c2b-7124cd156a85-config-data\") pod \"nova-metadata-0\" (UID: \"55562639-e816-4faf-8c2b-7124cd156a85\") " pod="openstack/nova-metadata-0" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.870643 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqfsn\" (UniqueName: \"kubernetes.io/projected/55562639-e816-4faf-8c2b-7124cd156a85-kube-api-access-tqfsn\") pod \"nova-metadata-0\" (UID: \"55562639-e816-4faf-8c2b-7124cd156a85\") " pod="openstack/nova-metadata-0" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.870686 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55562639-e816-4faf-8c2b-7124cd156a85-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"55562639-e816-4faf-8c2b-7124cd156a85\") " pod="openstack/nova-metadata-0" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.874635 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.876808 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.878824 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.883516 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.895913 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.909640 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.922415 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.922630 4971 scope.go:117] "RemoveContainer" containerID="b6d2a31d08d150a164b1452a51573733e32c939ca73b89cb0afe0eba8742b9fc" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.924513 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.927693 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.928988 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.938794 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.973138 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c011972-ac6d-497d-9fd5-b5eed11ef507-logs\") pod \"nova-api-0\" (UID: \"5c011972-ac6d-497d-9fd5-b5eed11ef507\") " pod="openstack/nova-api-0" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.973263 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/55562639-e816-4faf-8c2b-7124cd156a85-logs\") pod \"nova-metadata-0\" (UID: \"55562639-e816-4faf-8c2b-7124cd156a85\") " pod="openstack/nova-metadata-0" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.973304 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c011972-ac6d-497d-9fd5-b5eed11ef507-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5c011972-ac6d-497d-9fd5-b5eed11ef507\") " pod="openstack/nova-api-0" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.973620 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55562639-e816-4faf-8c2b-7124cd156a85-config-data\") pod \"nova-metadata-0\" (UID: \"55562639-e816-4faf-8c2b-7124cd156a85\") " pod="openstack/nova-metadata-0" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.973655 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c011972-ac6d-497d-9fd5-b5eed11ef507-config-data\") pod \"nova-api-0\" (UID: \"5c011972-ac6d-497d-9fd5-b5eed11ef507\") " pod="openstack/nova-api-0" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.973747 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36b2d44b-b297-4a2a-b166-841b28acf914-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"36b2d44b-b297-4a2a-b166-841b28acf914\") " pod="openstack/nova-scheduler-0" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.973782 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqfsn\" (UniqueName: \"kubernetes.io/projected/55562639-e816-4faf-8c2b-7124cd156a85-kube-api-access-tqfsn\") pod \"nova-metadata-0\" (UID: \"55562639-e816-4faf-8c2b-7124cd156a85\") " pod="openstack/nova-metadata-0" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.973797 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/55562639-e816-4faf-8c2b-7124cd156a85-logs\") pod \"nova-metadata-0\" (UID: \"55562639-e816-4faf-8c2b-7124cd156a85\") " pod="openstack/nova-metadata-0" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.973818 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvkj7\" (UniqueName: \"kubernetes.io/projected/5c011972-ac6d-497d-9fd5-b5eed11ef507-kube-api-access-rvkj7\") pod \"nova-api-0\" (UID: \"5c011972-ac6d-497d-9fd5-b5eed11ef507\") " pod="openstack/nova-api-0" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.973850 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55562639-e816-4faf-8c2b-7124cd156a85-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"55562639-e816-4faf-8c2b-7124cd156a85\") " pod="openstack/nova-metadata-0" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.973877 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxznk\" (UniqueName: \"kubernetes.io/projected/36b2d44b-b297-4a2a-b166-841b28acf914-kube-api-access-kxznk\") pod \"nova-scheduler-0\" (UID: \"36b2d44b-b297-4a2a-b166-841b28acf914\") " pod="openstack/nova-scheduler-0" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.973903 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36b2d44b-b297-4a2a-b166-841b28acf914-config-data\") pod \"nova-scheduler-0\" (UID: \"36b2d44b-b297-4a2a-b166-841b28acf914\") " pod="openstack/nova-scheduler-0" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.987327 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55562639-e816-4faf-8c2b-7124cd156a85-config-data\") pod \"nova-metadata-0\" (UID: \"55562639-e816-4faf-8c2b-7124cd156a85\") " pod="openstack/nova-metadata-0" Nov 27 08:56:44 crc kubenswrapper[4971]: I1127 08:56:44.996614 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55562639-e816-4faf-8c2b-7124cd156a85-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"55562639-e816-4faf-8c2b-7124cd156a85\") " pod="openstack/nova-metadata-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.005451 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqfsn\" (UniqueName: \"kubernetes.io/projected/55562639-e816-4faf-8c2b-7124cd156a85-kube-api-access-tqfsn\") pod \"nova-metadata-0\" (UID: \"55562639-e816-4faf-8c2b-7124cd156a85\") " pod="openstack/nova-metadata-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.075261 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c119bf30-716a-491e-ae7a-225b2bdf42fb-combined-ca-bundle\") pod \"c119bf30-716a-491e-ae7a-225b2bdf42fb\" (UID: \"c119bf30-716a-491e-ae7a-225b2bdf42fb\") " Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.075312 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c119bf30-716a-491e-ae7a-225b2bdf42fb-config-data\") pod \"c119bf30-716a-491e-ae7a-225b2bdf42fb\" (UID: \"c119bf30-716a-491e-ae7a-225b2bdf42fb\") " Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.075503 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hs8mj\" (UniqueName: \"kubernetes.io/projected/c119bf30-716a-491e-ae7a-225b2bdf42fb-kube-api-access-hs8mj\") pod \"c119bf30-716a-491e-ae7a-225b2bdf42fb\" (UID: \"c119bf30-716a-491e-ae7a-225b2bdf42fb\") " Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.075882 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c011972-ac6d-497d-9fd5-b5eed11ef507-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5c011972-ac6d-497d-9fd5-b5eed11ef507\") " pod="openstack/nova-api-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.075910 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c011972-ac6d-497d-9fd5-b5eed11ef507-config-data\") pod \"nova-api-0\" (UID: \"5c011972-ac6d-497d-9fd5-b5eed11ef507\") " pod="openstack/nova-api-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.075942 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f\") " pod="openstack/nova-cell0-conductor-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.075997 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36b2d44b-b297-4a2a-b166-841b28acf914-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"36b2d44b-b297-4a2a-b166-841b28acf914\") " pod="openstack/nova-scheduler-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.076031 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvkj7\" (UniqueName: \"kubernetes.io/projected/5c011972-ac6d-497d-9fd5-b5eed11ef507-kube-api-access-rvkj7\") pod \"nova-api-0\" (UID: \"5c011972-ac6d-497d-9fd5-b5eed11ef507\") " pod="openstack/nova-api-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.076050 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f\") " pod="openstack/nova-cell0-conductor-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.076078 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxznk\" (UniqueName: \"kubernetes.io/projected/36b2d44b-b297-4a2a-b166-841b28acf914-kube-api-access-kxznk\") pod \"nova-scheduler-0\" (UID: \"36b2d44b-b297-4a2a-b166-841b28acf914\") " pod="openstack/nova-scheduler-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.076097 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36b2d44b-b297-4a2a-b166-841b28acf914-config-data\") pod \"nova-scheduler-0\" (UID: \"36b2d44b-b297-4a2a-b166-841b28acf914\") " pod="openstack/nova-scheduler-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.076131 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmqxr\" (UniqueName: \"kubernetes.io/projected/9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f-kube-api-access-xmqxr\") pod \"nova-cell0-conductor-0\" (UID: \"9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f\") " pod="openstack/nova-cell0-conductor-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.076159 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c011972-ac6d-497d-9fd5-b5eed11ef507-logs\") pod \"nova-api-0\" (UID: \"5c011972-ac6d-497d-9fd5-b5eed11ef507\") " pod="openstack/nova-api-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.076699 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c011972-ac6d-497d-9fd5-b5eed11ef507-logs\") pod \"nova-api-0\" (UID: \"5c011972-ac6d-497d-9fd5-b5eed11ef507\") " pod="openstack/nova-api-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.080473 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36b2d44b-b297-4a2a-b166-841b28acf914-config-data\") pod \"nova-scheduler-0\" (UID: \"36b2d44b-b297-4a2a-b166-841b28acf914\") " pod="openstack/nova-scheduler-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.080958 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c119bf30-716a-491e-ae7a-225b2bdf42fb-kube-api-access-hs8mj" (OuterVolumeSpecName: "kube-api-access-hs8mj") pod "c119bf30-716a-491e-ae7a-225b2bdf42fb" (UID: "c119bf30-716a-491e-ae7a-225b2bdf42fb"). InnerVolumeSpecName "kube-api-access-hs8mj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.082721 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c011972-ac6d-497d-9fd5-b5eed11ef507-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5c011972-ac6d-497d-9fd5-b5eed11ef507\") " pod="openstack/nova-api-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.083452 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36b2d44b-b297-4a2a-b166-841b28acf914-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"36b2d44b-b297-4a2a-b166-841b28acf914\") " pod="openstack/nova-scheduler-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.087333 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c011972-ac6d-497d-9fd5-b5eed11ef507-config-data\") pod \"nova-api-0\" (UID: \"5c011972-ac6d-497d-9fd5-b5eed11ef507\") " pod="openstack/nova-api-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.092251 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxznk\" (UniqueName: \"kubernetes.io/projected/36b2d44b-b297-4a2a-b166-841b28acf914-kube-api-access-kxznk\") pod \"nova-scheduler-0\" (UID: \"36b2d44b-b297-4a2a-b166-841b28acf914\") " pod="openstack/nova-scheduler-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.096846 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvkj7\" (UniqueName: \"kubernetes.io/projected/5c011972-ac6d-497d-9fd5-b5eed11ef507-kube-api-access-rvkj7\") pod \"nova-api-0\" (UID: \"5c011972-ac6d-497d-9fd5-b5eed11ef507\") " pod="openstack/nova-api-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.121129 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.128808 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c119bf30-716a-491e-ae7a-225b2bdf42fb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c119bf30-716a-491e-ae7a-225b2bdf42fb" (UID: "c119bf30-716a-491e-ae7a-225b2bdf42fb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.145012 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c119bf30-716a-491e-ae7a-225b2bdf42fb-config-data" (OuterVolumeSpecName: "config-data") pod "c119bf30-716a-491e-ae7a-225b2bdf42fb" (UID: "c119bf30-716a-491e-ae7a-225b2bdf42fb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.180614 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f\") " pod="openstack/nova-cell0-conductor-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.180788 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f\") " pod="openstack/nova-cell0-conductor-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.180854 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmqxr\" (UniqueName: \"kubernetes.io/projected/9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f-kube-api-access-xmqxr\") pod \"nova-cell0-conductor-0\" (UID: \"9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f\") " pod="openstack/nova-cell0-conductor-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.180958 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c119bf30-716a-491e-ae7a-225b2bdf42fb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.180978 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c119bf30-716a-491e-ae7a-225b2bdf42fb-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.180989 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hs8mj\" (UniqueName: \"kubernetes.io/projected/c119bf30-716a-491e-ae7a-225b2bdf42fb-kube-api-access-hs8mj\") on node \"crc\" DevicePath \"\"" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.192460 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f\") " pod="openstack/nova-cell0-conductor-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.196321 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f\") " pod="openstack/nova-cell0-conductor-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.207389 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmqxr\" (UniqueName: \"kubernetes.io/projected/9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f-kube-api-access-xmqxr\") pod \"nova-cell0-conductor-0\" (UID: \"9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f\") " pod="openstack/nova-cell0-conductor-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.227072 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.242489 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.256017 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.568254 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"c119bf30-716a-491e-ae7a-225b2bdf42fb","Type":"ContainerDied","Data":"9ea4dbcb6c7d34a6c86ab400925ca9228919bbc592ae5e600bf2983b3681660c"} Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.569259 4971 scope.go:117] "RemoveContainer" containerID="7c13bb87dc0a13e3e813b43c0aaf1db573a4ae5141fd66eef87b70d22e57de29" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.568805 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.629635 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.644317 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.684359 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.686454 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 27 08:56:45 crc kubenswrapper[4971]: E1127 08:56:45.686922 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c119bf30-716a-491e-ae7a-225b2bdf42fb" containerName="nova-cell1-conductor-conductor" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.686943 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="c119bf30-716a-491e-ae7a-225b2bdf42fb" containerName="nova-cell1-conductor-conductor" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.687125 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="c119bf30-716a-491e-ae7a-225b2bdf42fb" containerName="nova-cell1-conductor-conductor" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.687785 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.690736 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.696183 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.793432 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dlhdr\" (UniqueName: \"kubernetes.io/projected/6d59e37d-03b0-47d2-9e61-7976f397d2a8-kube-api-access-dlhdr\") pod \"nova-cell1-conductor-0\" (UID: \"6d59e37d-03b0-47d2-9e61-7976f397d2a8\") " pod="openstack/nova-cell1-conductor-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.793618 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d59e37d-03b0-47d2-9e61-7976f397d2a8-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"6d59e37d-03b0-47d2-9e61-7976f397d2a8\") " pod="openstack/nova-cell1-conductor-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.795173 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d59e37d-03b0-47d2-9e61-7976f397d2a8-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"6d59e37d-03b0-47d2-9e61-7976f397d2a8\") " pod="openstack/nova-cell1-conductor-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.827206 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.900810 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d59e37d-03b0-47d2-9e61-7976f397d2a8-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"6d59e37d-03b0-47d2-9e61-7976f397d2a8\") " pod="openstack/nova-cell1-conductor-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.900958 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dlhdr\" (UniqueName: \"kubernetes.io/projected/6d59e37d-03b0-47d2-9e61-7976f397d2a8-kube-api-access-dlhdr\") pod \"nova-cell1-conductor-0\" (UID: \"6d59e37d-03b0-47d2-9e61-7976f397d2a8\") " pod="openstack/nova-cell1-conductor-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.901061 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d59e37d-03b0-47d2-9e61-7976f397d2a8-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"6d59e37d-03b0-47d2-9e61-7976f397d2a8\") " pod="openstack/nova-cell1-conductor-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.908776 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d59e37d-03b0-47d2-9e61-7976f397d2a8-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"6d59e37d-03b0-47d2-9e61-7976f397d2a8\") " pod="openstack/nova-cell1-conductor-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.909168 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d59e37d-03b0-47d2-9e61-7976f397d2a8-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"6d59e37d-03b0-47d2-9e61-7976f397d2a8\") " pod="openstack/nova-cell1-conductor-0" Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.926309 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dlhdr\" (UniqueName: \"kubernetes.io/projected/6d59e37d-03b0-47d2-9e61-7976f397d2a8-kube-api-access-dlhdr\") pod \"nova-cell1-conductor-0\" (UID: \"6d59e37d-03b0-47d2-9e61-7976f397d2a8\") " pod="openstack/nova-cell1-conductor-0" Nov 27 08:56:45 crc kubenswrapper[4971]: W1127 08:56:45.980646 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod36b2d44b_b297_4a2a_b166_841b28acf914.slice/crio-0b759b6c67f492898f95dce6ce923c589a6f49add133afe599b81ff10fa29a90 WatchSource:0}: Error finding container 0b759b6c67f492898f95dce6ce923c589a6f49add133afe599b81ff10fa29a90: Status 404 returned error can't find the container with id 0b759b6c67f492898f95dce6ce923c589a6f49add133afe599b81ff10fa29a90 Nov 27 08:56:45 crc kubenswrapper[4971]: I1127 08:56:45.982381 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 08:56:46 crc kubenswrapper[4971]: I1127 08:56:46.014745 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 27 08:56:46 crc kubenswrapper[4971]: I1127 08:56:46.074268 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 27 08:56:46 crc kubenswrapper[4971]: I1127 08:56:46.080892 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 27 08:56:46 crc kubenswrapper[4971]: I1127 08:56:46.579910 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69d43348-4b62-4dc4-a920-2f702816dd3c" path="/var/lib/kubelet/pods/69d43348-4b62-4dc4-a920-2f702816dd3c/volumes" Nov 27 08:56:46 crc kubenswrapper[4971]: I1127 08:56:46.581045 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a96dcaa9-3e6d-44ea-a535-3864fa826d2c" path="/var/lib/kubelet/pods/a96dcaa9-3e6d-44ea-a535-3864fa826d2c/volumes" Nov 27 08:56:46 crc kubenswrapper[4971]: I1127 08:56:46.581797 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c119bf30-716a-491e-ae7a-225b2bdf42fb" path="/var/lib/kubelet/pods/c119bf30-716a-491e-ae7a-225b2bdf42fb/volumes" Nov 27 08:56:46 crc kubenswrapper[4971]: I1127 08:56:46.583000 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7b90c66-3028-4daa-8df5-27c0ce1e10e6" path="/var/lib/kubelet/pods/e7b90c66-3028-4daa-8df5-27c0ce1e10e6/volumes" Nov 27 08:56:46 crc kubenswrapper[4971]: I1127 08:56:46.583569 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e8288baf-24dc-4fa9-9666-98ac1be3312b" path="/var/lib/kubelet/pods/e8288baf-24dc-4fa9-9666-98ac1be3312b/volumes" Nov 27 08:56:46 crc kubenswrapper[4971]: I1127 08:56:46.584289 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"36b2d44b-b297-4a2a-b166-841b28acf914","Type":"ContainerStarted","Data":"016d6d9d10b45a1274f9cc99c96b25c645d557075858776c04cdf40371a30253"} Nov 27 08:56:46 crc kubenswrapper[4971]: I1127 08:56:46.584324 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"36b2d44b-b297-4a2a-b166-841b28acf914","Type":"ContainerStarted","Data":"0b759b6c67f492898f95dce6ce923c589a6f49add133afe599b81ff10fa29a90"} Nov 27 08:56:46 crc kubenswrapper[4971]: I1127 08:56:46.586173 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"55562639-e816-4faf-8c2b-7124cd156a85","Type":"ContainerStarted","Data":"68315fba2e7808c887d15af24f2991b56487f29af6f65f6f9debb64df02b0d37"} Nov 27 08:56:46 crc kubenswrapper[4971]: I1127 08:56:46.586316 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"55562639-e816-4faf-8c2b-7124cd156a85","Type":"ContainerStarted","Data":"1df28c45fcfa92c627f267b10956aa0fae302e93e9a77d39a043fc9873783e9d"} Nov 27 08:56:46 crc kubenswrapper[4971]: I1127 08:56:46.586380 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"55562639-e816-4faf-8c2b-7124cd156a85","Type":"ContainerStarted","Data":"910f58a10b6ff346cf763441827931d43bfe0a5f1973cf8bc559ff97e96a0d6e"} Nov 27 08:56:46 crc kubenswrapper[4971]: I1127 08:56:46.591298 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f","Type":"ContainerStarted","Data":"ad34b8b5148d73ba7a1d4d233cd94e40511831af3da591fa83dd75b18a287a2b"} Nov 27 08:56:46 crc kubenswrapper[4971]: I1127 08:56:46.591358 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f","Type":"ContainerStarted","Data":"27427448fa0d84dc131238024ee11c290a58d391f4c017bfd9effe5fcbc03ccc"} Nov 27 08:56:46 crc kubenswrapper[4971]: I1127 08:56:46.592137 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Nov 27 08:56:46 crc kubenswrapper[4971]: I1127 08:56:46.605447 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5c011972-ac6d-497d-9fd5-b5eed11ef507","Type":"ContainerStarted","Data":"cda5e5686740453b3094623d07ae2dcf3a09ceba6d1b35aa43b64e96455b225e"} Nov 27 08:56:46 crc kubenswrapper[4971]: I1127 08:56:46.605501 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5c011972-ac6d-497d-9fd5-b5eed11ef507","Type":"ContainerStarted","Data":"bde6d0a98a06bd0e77828dade5f46fd57295440557006a26c3b1d9005be9ab07"} Nov 27 08:56:46 crc kubenswrapper[4971]: I1127 08:56:46.615805 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.61577514 podStartE2EDuration="2.61577514s" podCreationTimestamp="2025-11-27 08:56:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:56:46.605774113 +0000 UTC m=+7444.797818031" watchObservedRunningTime="2025-11-27 08:56:46.61577514 +0000 UTC m=+7444.807819058" Nov 27 08:56:46 crc kubenswrapper[4971]: I1127 08:56:46.631224 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.631192761 podStartE2EDuration="2.631192761s" podCreationTimestamp="2025-11-27 08:56:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:56:46.631121449 +0000 UTC m=+7444.823165367" watchObservedRunningTime="2025-11-27 08:56:46.631192761 +0000 UTC m=+7444.823236679" Nov 27 08:56:46 crc kubenswrapper[4971]: W1127 08:56:46.675726 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6d59e37d_03b0_47d2_9e61_7976f397d2a8.slice/crio-f7d585826069730538858b97d3e99c8f7b20d29bb40b2e72efc2964f1182a53a WatchSource:0}: Error finding container f7d585826069730538858b97d3e99c8f7b20d29bb40b2e72efc2964f1182a53a: Status 404 returned error can't find the container with id f7d585826069730538858b97d3e99c8f7b20d29bb40b2e72efc2964f1182a53a Nov 27 08:56:46 crc kubenswrapper[4971]: I1127 08:56:46.676023 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 27 08:56:46 crc kubenswrapper[4971]: I1127 08:56:46.684492 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.684461626 podStartE2EDuration="2.684461626s" podCreationTimestamp="2025-11-27 08:56:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:56:46.659741649 +0000 UTC m=+7444.851785587" watchObservedRunningTime="2025-11-27 08:56:46.684461626 +0000 UTC m=+7444.876505544" Nov 27 08:56:47 crc kubenswrapper[4971]: I1127 08:56:47.618498 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5c011972-ac6d-497d-9fd5-b5eed11ef507","Type":"ContainerStarted","Data":"5049d8a6c9ddf9e828beac44153eb7f5791e0a7cb1ebf7c2a3377062621684d3"} Nov 27 08:56:47 crc kubenswrapper[4971]: I1127 08:56:47.621950 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"6d59e37d-03b0-47d2-9e61-7976f397d2a8","Type":"ContainerStarted","Data":"b9495126122a5659954daa77d5bb7bfa0a719ee215ba76a44ce1102e2ca3d70c"} Nov 27 08:56:47 crc kubenswrapper[4971]: I1127 08:56:47.622021 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"6d59e37d-03b0-47d2-9e61-7976f397d2a8","Type":"ContainerStarted","Data":"f7d585826069730538858b97d3e99c8f7b20d29bb40b2e72efc2964f1182a53a"} Nov 27 08:56:47 crc kubenswrapper[4971]: I1127 08:56:47.622856 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Nov 27 08:56:47 crc kubenswrapper[4971]: I1127 08:56:47.643664 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.6436410219999997 podStartE2EDuration="3.643641022s" podCreationTimestamp="2025-11-27 08:56:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:56:47.639603926 +0000 UTC m=+7445.831647844" watchObservedRunningTime="2025-11-27 08:56:47.643641022 +0000 UTC m=+7445.835684940" Nov 27 08:56:47 crc kubenswrapper[4971]: I1127 08:56:47.660923 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.660897716 podStartE2EDuration="2.660897716s" podCreationTimestamp="2025-11-27 08:56:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:56:47.660027161 +0000 UTC m=+7445.852071089" watchObservedRunningTime="2025-11-27 08:56:47.660897716 +0000 UTC m=+7445.852941624" Nov 27 08:56:50 crc kubenswrapper[4971]: I1127 08:56:50.122610 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 27 08:56:50 crc kubenswrapper[4971]: I1127 08:56:50.123176 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 27 08:56:50 crc kubenswrapper[4971]: I1127 08:56:50.227270 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Nov 27 08:56:50 crc kubenswrapper[4971]: I1127 08:56:50.684060 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Nov 27 08:56:50 crc kubenswrapper[4971]: I1127 08:56:50.698025 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Nov 27 08:56:51 crc kubenswrapper[4971]: I1127 08:56:51.041451 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Nov 27 08:56:51 crc kubenswrapper[4971]: I1127 08:56:51.691630 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Nov 27 08:56:53 crc kubenswrapper[4971]: I1127 08:56:53.582575 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-gq85d"] Nov 27 08:56:53 crc kubenswrapper[4971]: I1127 08:56:53.586983 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gq85d" Nov 27 08:56:53 crc kubenswrapper[4971]: I1127 08:56:53.595069 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gq85d"] Nov 27 08:56:53 crc kubenswrapper[4971]: I1127 08:56:53.729817 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b70cf8bb-240b-4336-ba16-2010c3ccdbea-catalog-content\") pod \"community-operators-gq85d\" (UID: \"b70cf8bb-240b-4336-ba16-2010c3ccdbea\") " pod="openshift-marketplace/community-operators-gq85d" Nov 27 08:56:53 crc kubenswrapper[4971]: I1127 08:56:53.729875 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9wvgq\" (UniqueName: \"kubernetes.io/projected/b70cf8bb-240b-4336-ba16-2010c3ccdbea-kube-api-access-9wvgq\") pod \"community-operators-gq85d\" (UID: \"b70cf8bb-240b-4336-ba16-2010c3ccdbea\") " pod="openshift-marketplace/community-operators-gq85d" Nov 27 08:56:53 crc kubenswrapper[4971]: I1127 08:56:53.729967 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b70cf8bb-240b-4336-ba16-2010c3ccdbea-utilities\") pod \"community-operators-gq85d\" (UID: \"b70cf8bb-240b-4336-ba16-2010c3ccdbea\") " pod="openshift-marketplace/community-operators-gq85d" Nov 27 08:56:53 crc kubenswrapper[4971]: I1127 08:56:53.832263 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b70cf8bb-240b-4336-ba16-2010c3ccdbea-catalog-content\") pod \"community-operators-gq85d\" (UID: \"b70cf8bb-240b-4336-ba16-2010c3ccdbea\") " pod="openshift-marketplace/community-operators-gq85d" Nov 27 08:56:53 crc kubenswrapper[4971]: I1127 08:56:53.832588 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9wvgq\" (UniqueName: \"kubernetes.io/projected/b70cf8bb-240b-4336-ba16-2010c3ccdbea-kube-api-access-9wvgq\") pod \"community-operators-gq85d\" (UID: \"b70cf8bb-240b-4336-ba16-2010c3ccdbea\") " pod="openshift-marketplace/community-operators-gq85d" Nov 27 08:56:53 crc kubenswrapper[4971]: I1127 08:56:53.832806 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b70cf8bb-240b-4336-ba16-2010c3ccdbea-utilities\") pod \"community-operators-gq85d\" (UID: \"b70cf8bb-240b-4336-ba16-2010c3ccdbea\") " pod="openshift-marketplace/community-operators-gq85d" Nov 27 08:56:53 crc kubenswrapper[4971]: I1127 08:56:53.832925 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b70cf8bb-240b-4336-ba16-2010c3ccdbea-catalog-content\") pod \"community-operators-gq85d\" (UID: \"b70cf8bb-240b-4336-ba16-2010c3ccdbea\") " pod="openshift-marketplace/community-operators-gq85d" Nov 27 08:56:53 crc kubenswrapper[4971]: I1127 08:56:53.833167 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b70cf8bb-240b-4336-ba16-2010c3ccdbea-utilities\") pod \"community-operators-gq85d\" (UID: \"b70cf8bb-240b-4336-ba16-2010c3ccdbea\") " pod="openshift-marketplace/community-operators-gq85d" Nov 27 08:56:53 crc kubenswrapper[4971]: I1127 08:56:53.855123 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9wvgq\" (UniqueName: \"kubernetes.io/projected/b70cf8bb-240b-4336-ba16-2010c3ccdbea-kube-api-access-9wvgq\") pod \"community-operators-gq85d\" (UID: \"b70cf8bb-240b-4336-ba16-2010c3ccdbea\") " pod="openshift-marketplace/community-operators-gq85d" Nov 27 08:56:53 crc kubenswrapper[4971]: I1127 08:56:53.964092 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gq85d" Nov 27 08:56:54 crc kubenswrapper[4971]: I1127 08:56:54.541751 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gq85d"] Nov 27 08:56:54 crc kubenswrapper[4971]: I1127 08:56:54.727874 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gq85d" event={"ID":"b70cf8bb-240b-4336-ba16-2010c3ccdbea","Type":"ContainerStarted","Data":"ad807f04c0e7facb0714c5df0ae71e7f9b58d71edd7cd85c37c7693150725c9b"} Nov 27 08:56:55 crc kubenswrapper[4971]: I1127 08:56:55.121805 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 27 08:56:55 crc kubenswrapper[4971]: I1127 08:56:55.122152 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 27 08:56:55 crc kubenswrapper[4971]: I1127 08:56:55.227524 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Nov 27 08:56:55 crc kubenswrapper[4971]: I1127 08:56:55.244053 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 27 08:56:55 crc kubenswrapper[4971]: I1127 08:56:55.244120 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 27 08:56:55 crc kubenswrapper[4971]: I1127 08:56:55.262195 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Nov 27 08:56:55 crc kubenswrapper[4971]: I1127 08:56:55.308378 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Nov 27 08:56:55 crc kubenswrapper[4971]: I1127 08:56:55.740199 4971 generic.go:334] "Generic (PLEG): container finished" podID="b70cf8bb-240b-4336-ba16-2010c3ccdbea" containerID="3f9fc24c22104932fd3ad4a6441598f5300091589b8ab6fe15f0589d69233a94" exitCode=0 Nov 27 08:56:55 crc kubenswrapper[4971]: I1127 08:56:55.741843 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gq85d" event={"ID":"b70cf8bb-240b-4336-ba16-2010c3ccdbea","Type":"ContainerDied","Data":"3f9fc24c22104932fd3ad4a6441598f5300091589b8ab6fe15f0589d69233a94"} Nov 27 08:56:55 crc kubenswrapper[4971]: I1127 08:56:55.782097 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Nov 27 08:56:56 crc kubenswrapper[4971]: I1127 08:56:56.203820 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="55562639-e816-4faf-8c2b-7124cd156a85" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.94:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 27 08:56:56 crc kubenswrapper[4971]: I1127 08:56:56.203879 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="55562639-e816-4faf-8c2b-7124cd156a85" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.94:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 27 08:56:56 crc kubenswrapper[4971]: I1127 08:56:56.325834 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="5c011972-ac6d-497d-9fd5-b5eed11ef507" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.96:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 27 08:56:56 crc kubenswrapper[4971]: I1127 08:56:56.326249 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="5c011972-ac6d-497d-9fd5-b5eed11ef507" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.96:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 27 08:56:57 crc kubenswrapper[4971]: I1127 08:56:57.768724 4971 generic.go:334] "Generic (PLEG): container finished" podID="b70cf8bb-240b-4336-ba16-2010c3ccdbea" containerID="a350b9a50e3fd4db4edfa2e40a080dbcdea3c359e0243b7717c35c1592a24845" exitCode=0 Nov 27 08:56:57 crc kubenswrapper[4971]: I1127 08:56:57.769204 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gq85d" event={"ID":"b70cf8bb-240b-4336-ba16-2010c3ccdbea","Type":"ContainerDied","Data":"a350b9a50e3fd4db4edfa2e40a080dbcdea3c359e0243b7717c35c1592a24845"} Nov 27 08:56:57 crc kubenswrapper[4971]: I1127 08:56:57.942399 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-2dw72"] Nov 27 08:56:57 crc kubenswrapper[4971]: I1127 08:56:57.944459 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2dw72" Nov 27 08:56:57 crc kubenswrapper[4971]: I1127 08:56:57.959081 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2dw72"] Nov 27 08:56:58 crc kubenswrapper[4971]: I1127 08:56:58.041488 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11702b67-c472-4edb-8e00-88a6b36cd1b9-utilities\") pod \"redhat-marketplace-2dw72\" (UID: \"11702b67-c472-4edb-8e00-88a6b36cd1b9\") " pod="openshift-marketplace/redhat-marketplace-2dw72" Nov 27 08:56:58 crc kubenswrapper[4971]: I1127 08:56:58.041657 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11702b67-c472-4edb-8e00-88a6b36cd1b9-catalog-content\") pod \"redhat-marketplace-2dw72\" (UID: \"11702b67-c472-4edb-8e00-88a6b36cd1b9\") " pod="openshift-marketplace/redhat-marketplace-2dw72" Nov 27 08:56:58 crc kubenswrapper[4971]: I1127 08:56:58.041789 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z2k2h\" (UniqueName: \"kubernetes.io/projected/11702b67-c472-4edb-8e00-88a6b36cd1b9-kube-api-access-z2k2h\") pod \"redhat-marketplace-2dw72\" (UID: \"11702b67-c472-4edb-8e00-88a6b36cd1b9\") " pod="openshift-marketplace/redhat-marketplace-2dw72" Nov 27 08:56:58 crc kubenswrapper[4971]: I1127 08:56:58.144269 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11702b67-c472-4edb-8e00-88a6b36cd1b9-utilities\") pod \"redhat-marketplace-2dw72\" (UID: \"11702b67-c472-4edb-8e00-88a6b36cd1b9\") " pod="openshift-marketplace/redhat-marketplace-2dw72" Nov 27 08:56:58 crc kubenswrapper[4971]: I1127 08:56:58.144413 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11702b67-c472-4edb-8e00-88a6b36cd1b9-catalog-content\") pod \"redhat-marketplace-2dw72\" (UID: \"11702b67-c472-4edb-8e00-88a6b36cd1b9\") " pod="openshift-marketplace/redhat-marketplace-2dw72" Nov 27 08:56:58 crc kubenswrapper[4971]: I1127 08:56:58.144499 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z2k2h\" (UniqueName: \"kubernetes.io/projected/11702b67-c472-4edb-8e00-88a6b36cd1b9-kube-api-access-z2k2h\") pod \"redhat-marketplace-2dw72\" (UID: \"11702b67-c472-4edb-8e00-88a6b36cd1b9\") " pod="openshift-marketplace/redhat-marketplace-2dw72" Nov 27 08:56:58 crc kubenswrapper[4971]: I1127 08:56:58.145110 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11702b67-c472-4edb-8e00-88a6b36cd1b9-utilities\") pod \"redhat-marketplace-2dw72\" (UID: \"11702b67-c472-4edb-8e00-88a6b36cd1b9\") " pod="openshift-marketplace/redhat-marketplace-2dw72" Nov 27 08:56:58 crc kubenswrapper[4971]: I1127 08:56:58.145110 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11702b67-c472-4edb-8e00-88a6b36cd1b9-catalog-content\") pod \"redhat-marketplace-2dw72\" (UID: \"11702b67-c472-4edb-8e00-88a6b36cd1b9\") " pod="openshift-marketplace/redhat-marketplace-2dw72" Nov 27 08:56:58 crc kubenswrapper[4971]: I1127 08:56:58.175382 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z2k2h\" (UniqueName: \"kubernetes.io/projected/11702b67-c472-4edb-8e00-88a6b36cd1b9-kube-api-access-z2k2h\") pod \"redhat-marketplace-2dw72\" (UID: \"11702b67-c472-4edb-8e00-88a6b36cd1b9\") " pod="openshift-marketplace/redhat-marketplace-2dw72" Nov 27 08:56:58 crc kubenswrapper[4971]: I1127 08:56:58.266943 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2dw72" Nov 27 08:56:58 crc kubenswrapper[4971]: I1127 08:56:58.792865 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gq85d" event={"ID":"b70cf8bb-240b-4336-ba16-2010c3ccdbea","Type":"ContainerStarted","Data":"3205f318ca7da640cf0c64a786fbd18b19fc582c4817f890b9f2ef77fda693ba"} Nov 27 08:56:58 crc kubenswrapper[4971]: I1127 08:56:58.795724 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2dw72"] Nov 27 08:56:58 crc kubenswrapper[4971]: I1127 08:56:58.821302 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-gq85d" podStartSLOduration=3.11898756 podStartE2EDuration="5.821268027s" podCreationTimestamp="2025-11-27 08:56:53 +0000 UTC" firstStartedPulling="2025-11-27 08:56:55.742569941 +0000 UTC m=+7453.934613859" lastFinishedPulling="2025-11-27 08:56:58.444850408 +0000 UTC m=+7456.636894326" observedRunningTime="2025-11-27 08:56:58.812234698 +0000 UTC m=+7457.004278616" watchObservedRunningTime="2025-11-27 08:56:58.821268027 +0000 UTC m=+7457.013311945" Nov 27 08:56:59 crc kubenswrapper[4971]: I1127 08:56:59.806592 4971 generic.go:334] "Generic (PLEG): container finished" podID="11702b67-c472-4edb-8e00-88a6b36cd1b9" containerID="bb52ce97afde6e77a771dad4f1f77a4ba90bf9194a83d11f8fd7974d23593bbd" exitCode=0 Nov 27 08:56:59 crc kubenswrapper[4971]: I1127 08:56:59.808311 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2dw72" event={"ID":"11702b67-c472-4edb-8e00-88a6b36cd1b9","Type":"ContainerDied","Data":"bb52ce97afde6e77a771dad4f1f77a4ba90bf9194a83d11f8fd7974d23593bbd"} Nov 27 08:56:59 crc kubenswrapper[4971]: I1127 08:56:59.808346 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2dw72" event={"ID":"11702b67-c472-4edb-8e00-88a6b36cd1b9","Type":"ContainerStarted","Data":"2a7681a9fd212240c13d50545679bbd9f0b1b72c51ec7327b25473ed4f42d80f"} Nov 27 08:57:01 crc kubenswrapper[4971]: I1127 08:57:01.841558 4971 generic.go:334] "Generic (PLEG): container finished" podID="11702b67-c472-4edb-8e00-88a6b36cd1b9" containerID="fa4e8935b8336cad3d53c0d8c312bacbf5020c9b14ffeb785888710c5fe8c27f" exitCode=0 Nov 27 08:57:01 crc kubenswrapper[4971]: I1127 08:57:01.841653 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2dw72" event={"ID":"11702b67-c472-4edb-8e00-88a6b36cd1b9","Type":"ContainerDied","Data":"fa4e8935b8336cad3d53c0d8c312bacbf5020c9b14ffeb785888710c5fe8c27f"} Nov 27 08:57:03 crc kubenswrapper[4971]: I1127 08:57:03.867160 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2dw72" event={"ID":"11702b67-c472-4edb-8e00-88a6b36cd1b9","Type":"ContainerStarted","Data":"66ef438949b3b47aa8977cb824a2fa80b00ccc172c0695088633cca72057f76d"} Nov 27 08:57:03 crc kubenswrapper[4971]: I1127 08:57:03.897690 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-2dw72" podStartSLOduration=3.542439701 podStartE2EDuration="6.897663575s" podCreationTimestamp="2025-11-27 08:56:57 +0000 UTC" firstStartedPulling="2025-11-27 08:56:59.810449222 +0000 UTC m=+7458.002493150" lastFinishedPulling="2025-11-27 08:57:03.165673106 +0000 UTC m=+7461.357717024" observedRunningTime="2025-11-27 08:57:03.886694201 +0000 UTC m=+7462.078738119" watchObservedRunningTime="2025-11-27 08:57:03.897663575 +0000 UTC m=+7462.089707503" Nov 27 08:57:03 crc kubenswrapper[4971]: I1127 08:57:03.964340 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-gq85d" Nov 27 08:57:03 crc kubenswrapper[4971]: I1127 08:57:03.964399 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-gq85d" Nov 27 08:57:04 crc kubenswrapper[4971]: I1127 08:57:04.019551 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-gq85d" Nov 27 08:57:04 crc kubenswrapper[4971]: I1127 08:57:04.592280 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Nov 27 08:57:04 crc kubenswrapper[4971]: I1127 08:57:04.594261 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 27 08:57:04 crc kubenswrapper[4971]: I1127 08:57:04.597044 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Nov 27 08:57:04 crc kubenswrapper[4971]: I1127 08:57:04.624094 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 27 08:57:04 crc kubenswrapper[4971]: I1127 08:57:04.677250 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4a80336f-fdf0-4b01-878a-af81d4c5f310-scripts\") pod \"cinder-scheduler-0\" (UID: \"4a80336f-fdf0-4b01-878a-af81d4c5f310\") " pod="openstack/cinder-scheduler-0" Nov 27 08:57:04 crc kubenswrapper[4971]: I1127 08:57:04.677331 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4a80336f-fdf0-4b01-878a-af81d4c5f310-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"4a80336f-fdf0-4b01-878a-af81d4c5f310\") " pod="openstack/cinder-scheduler-0" Nov 27 08:57:04 crc kubenswrapper[4971]: I1127 08:57:04.677387 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4a80336f-fdf0-4b01-878a-af81d4c5f310-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"4a80336f-fdf0-4b01-878a-af81d4c5f310\") " pod="openstack/cinder-scheduler-0" Nov 27 08:57:04 crc kubenswrapper[4971]: I1127 08:57:04.677524 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a80336f-fdf0-4b01-878a-af81d4c5f310-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"4a80336f-fdf0-4b01-878a-af81d4c5f310\") " pod="openstack/cinder-scheduler-0" Nov 27 08:57:04 crc kubenswrapper[4971]: I1127 08:57:04.677596 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7wlh\" (UniqueName: \"kubernetes.io/projected/4a80336f-fdf0-4b01-878a-af81d4c5f310-kube-api-access-q7wlh\") pod \"cinder-scheduler-0\" (UID: \"4a80336f-fdf0-4b01-878a-af81d4c5f310\") " pod="openstack/cinder-scheduler-0" Nov 27 08:57:04 crc kubenswrapper[4971]: I1127 08:57:04.677676 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a80336f-fdf0-4b01-878a-af81d4c5f310-config-data\") pod \"cinder-scheduler-0\" (UID: \"4a80336f-fdf0-4b01-878a-af81d4c5f310\") " pod="openstack/cinder-scheduler-0" Nov 27 08:57:04 crc kubenswrapper[4971]: I1127 08:57:04.779938 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a80336f-fdf0-4b01-878a-af81d4c5f310-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"4a80336f-fdf0-4b01-878a-af81d4c5f310\") " pod="openstack/cinder-scheduler-0" Nov 27 08:57:04 crc kubenswrapper[4971]: I1127 08:57:04.780012 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7wlh\" (UniqueName: \"kubernetes.io/projected/4a80336f-fdf0-4b01-878a-af81d4c5f310-kube-api-access-q7wlh\") pod \"cinder-scheduler-0\" (UID: \"4a80336f-fdf0-4b01-878a-af81d4c5f310\") " pod="openstack/cinder-scheduler-0" Nov 27 08:57:04 crc kubenswrapper[4971]: I1127 08:57:04.780070 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a80336f-fdf0-4b01-878a-af81d4c5f310-config-data\") pod \"cinder-scheduler-0\" (UID: \"4a80336f-fdf0-4b01-878a-af81d4c5f310\") " pod="openstack/cinder-scheduler-0" Nov 27 08:57:04 crc kubenswrapper[4971]: I1127 08:57:04.780103 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4a80336f-fdf0-4b01-878a-af81d4c5f310-scripts\") pod \"cinder-scheduler-0\" (UID: \"4a80336f-fdf0-4b01-878a-af81d4c5f310\") " pod="openstack/cinder-scheduler-0" Nov 27 08:57:04 crc kubenswrapper[4971]: I1127 08:57:04.780123 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4a80336f-fdf0-4b01-878a-af81d4c5f310-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"4a80336f-fdf0-4b01-878a-af81d4c5f310\") " pod="openstack/cinder-scheduler-0" Nov 27 08:57:04 crc kubenswrapper[4971]: I1127 08:57:04.780159 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4a80336f-fdf0-4b01-878a-af81d4c5f310-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"4a80336f-fdf0-4b01-878a-af81d4c5f310\") " pod="openstack/cinder-scheduler-0" Nov 27 08:57:04 crc kubenswrapper[4971]: I1127 08:57:04.780295 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4a80336f-fdf0-4b01-878a-af81d4c5f310-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"4a80336f-fdf0-4b01-878a-af81d4c5f310\") " pod="openstack/cinder-scheduler-0" Nov 27 08:57:04 crc kubenswrapper[4971]: I1127 08:57:04.786043 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4a80336f-fdf0-4b01-878a-af81d4c5f310-scripts\") pod \"cinder-scheduler-0\" (UID: \"4a80336f-fdf0-4b01-878a-af81d4c5f310\") " pod="openstack/cinder-scheduler-0" Nov 27 08:57:04 crc kubenswrapper[4971]: I1127 08:57:04.786389 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a80336f-fdf0-4b01-878a-af81d4c5f310-config-data\") pod \"cinder-scheduler-0\" (UID: \"4a80336f-fdf0-4b01-878a-af81d4c5f310\") " pod="openstack/cinder-scheduler-0" Nov 27 08:57:04 crc kubenswrapper[4971]: I1127 08:57:04.787169 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4a80336f-fdf0-4b01-878a-af81d4c5f310-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"4a80336f-fdf0-4b01-878a-af81d4c5f310\") " pod="openstack/cinder-scheduler-0" Nov 27 08:57:04 crc kubenswrapper[4971]: I1127 08:57:04.794842 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a80336f-fdf0-4b01-878a-af81d4c5f310-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"4a80336f-fdf0-4b01-878a-af81d4c5f310\") " pod="openstack/cinder-scheduler-0" Nov 27 08:57:04 crc kubenswrapper[4971]: I1127 08:57:04.798057 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7wlh\" (UniqueName: \"kubernetes.io/projected/4a80336f-fdf0-4b01-878a-af81d4c5f310-kube-api-access-q7wlh\") pod \"cinder-scheduler-0\" (UID: \"4a80336f-fdf0-4b01-878a-af81d4c5f310\") " pod="openstack/cinder-scheduler-0" Nov 27 08:57:04 crc kubenswrapper[4971]: I1127 08:57:04.925694 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-gq85d" Nov 27 08:57:04 crc kubenswrapper[4971]: I1127 08:57:04.934513 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 27 08:57:05 crc kubenswrapper[4971]: I1127 08:57:05.127686 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 27 08:57:05 crc kubenswrapper[4971]: I1127 08:57:05.128095 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 27 08:57:05 crc kubenswrapper[4971]: I1127 08:57:05.130935 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 27 08:57:05 crc kubenswrapper[4971]: I1127 08:57:05.131037 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 27 08:57:05 crc kubenswrapper[4971]: I1127 08:57:05.246865 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 27 08:57:05 crc kubenswrapper[4971]: I1127 08:57:05.248081 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 27 08:57:05 crc kubenswrapper[4971]: I1127 08:57:05.248420 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 27 08:57:05 crc kubenswrapper[4971]: I1127 08:57:05.250842 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 27 08:57:05 crc kubenswrapper[4971]: W1127 08:57:05.396127 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4a80336f_fdf0_4b01_878a_af81d4c5f310.slice/crio-7a908779fcf640d268ca773ea0ff158c16327c67d57e06e748d670df10d9491f WatchSource:0}: Error finding container 7a908779fcf640d268ca773ea0ff158c16327c67d57e06e748d670df10d9491f: Status 404 returned error can't find the container with id 7a908779fcf640d268ca773ea0ff158c16327c67d57e06e748d670df10d9491f Nov 27 08:57:05 crc kubenswrapper[4971]: I1127 08:57:05.398893 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 27 08:57:05 crc kubenswrapper[4971]: I1127 08:57:05.885715 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"4a80336f-fdf0-4b01-878a-af81d4c5f310","Type":"ContainerStarted","Data":"7a908779fcf640d268ca773ea0ff158c16327c67d57e06e748d670df10d9491f"} Nov 27 08:57:05 crc kubenswrapper[4971]: I1127 08:57:05.886713 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 27 08:57:05 crc kubenswrapper[4971]: I1127 08:57:05.897011 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 27 08:57:06 crc kubenswrapper[4971]: I1127 08:57:06.134957 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gq85d"] Nov 27 08:57:06 crc kubenswrapper[4971]: I1127 08:57:06.444773 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Nov 27 08:57:06 crc kubenswrapper[4971]: I1127 08:57:06.445222 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="0d609d64-7197-43f6-820c-cdfafd7bcfe8" containerName="cinder-api-log" containerID="cri-o://692e20e108bbc1595c1ecee5d8d9a890a48fb05164378b3dab73527d66185071" gracePeriod=30 Nov 27 08:57:06 crc kubenswrapper[4971]: I1127 08:57:06.445806 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="0d609d64-7197-43f6-820c-cdfafd7bcfe8" containerName="cinder-api" containerID="cri-o://fa836b024b0c8aaddf48007e345a15e9fb10f7606b0222c3bdeaea669bc620ab" gracePeriod=30 Nov 27 08:57:06 crc kubenswrapper[4971]: I1127 08:57:06.917204 4971 generic.go:334] "Generic (PLEG): container finished" podID="0d609d64-7197-43f6-820c-cdfafd7bcfe8" containerID="692e20e108bbc1595c1ecee5d8d9a890a48fb05164378b3dab73527d66185071" exitCode=143 Nov 27 08:57:06 crc kubenswrapper[4971]: I1127 08:57:06.917640 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0d609d64-7197-43f6-820c-cdfafd7bcfe8","Type":"ContainerDied","Data":"692e20e108bbc1595c1ecee5d8d9a890a48fb05164378b3dab73527d66185071"} Nov 27 08:57:06 crc kubenswrapper[4971]: I1127 08:57:06.930308 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-gq85d" podUID="b70cf8bb-240b-4336-ba16-2010c3ccdbea" containerName="registry-server" containerID="cri-o://3205f318ca7da640cf0c64a786fbd18b19fc582c4817f890b9f2ef77fda693ba" gracePeriod=2 Nov 27 08:57:06 crc kubenswrapper[4971]: I1127 08:57:06.930602 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"4a80336f-fdf0-4b01-878a-af81d4c5f310","Type":"ContainerStarted","Data":"0d0364a476dcd68601131461e1cd0c874bdc8bb09adddcf0801b2e258db73f13"} Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.147768 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-volume-volume1-0"] Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.149292 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.153007 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-volume-volume1-config-data" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.164414 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.267038 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/25a18d82-9dba-46e3-b37d-da25ab109122-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.267357 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/25a18d82-9dba-46e3-b37d-da25ab109122-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.267391 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/25a18d82-9dba-46e3-b37d-da25ab109122-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.267427 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/25a18d82-9dba-46e3-b37d-da25ab109122-dev\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.267458 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25a18d82-9dba-46e3-b37d-da25ab109122-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.267473 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/25a18d82-9dba-46e3-b37d-da25ab109122-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.267489 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25a18d82-9dba-46e3-b37d-da25ab109122-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.267507 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/25a18d82-9dba-46e3-b37d-da25ab109122-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.267528 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25a18d82-9dba-46e3-b37d-da25ab109122-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.267579 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ncnrh\" (UniqueName: \"kubernetes.io/projected/25a18d82-9dba-46e3-b37d-da25ab109122-kube-api-access-ncnrh\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.267629 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/25a18d82-9dba-46e3-b37d-da25ab109122-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.267645 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/25a18d82-9dba-46e3-b37d-da25ab109122-run\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.267662 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/25a18d82-9dba-46e3-b37d-da25ab109122-sys\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.267679 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/25a18d82-9dba-46e3-b37d-da25ab109122-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.267697 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/25a18d82-9dba-46e3-b37d-da25ab109122-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.267721 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/25a18d82-9dba-46e3-b37d-da25ab109122-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.369308 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/25a18d82-9dba-46e3-b37d-da25ab109122-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.369383 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/25a18d82-9dba-46e3-b37d-da25ab109122-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.369431 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/25a18d82-9dba-46e3-b37d-da25ab109122-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.369481 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/25a18d82-9dba-46e3-b37d-da25ab109122-dev\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.369524 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25a18d82-9dba-46e3-b37d-da25ab109122-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.369565 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/25a18d82-9dba-46e3-b37d-da25ab109122-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.369590 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25a18d82-9dba-46e3-b37d-da25ab109122-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.369615 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/25a18d82-9dba-46e3-b37d-da25ab109122-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.369642 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25a18d82-9dba-46e3-b37d-da25ab109122-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.369665 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ncnrh\" (UniqueName: \"kubernetes.io/projected/25a18d82-9dba-46e3-b37d-da25ab109122-kube-api-access-ncnrh\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.369731 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/25a18d82-9dba-46e3-b37d-da25ab109122-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.369753 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/25a18d82-9dba-46e3-b37d-da25ab109122-run\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.369783 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/25a18d82-9dba-46e3-b37d-da25ab109122-sys\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.369807 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/25a18d82-9dba-46e3-b37d-da25ab109122-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.369828 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/25a18d82-9dba-46e3-b37d-da25ab109122-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.369854 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/25a18d82-9dba-46e3-b37d-da25ab109122-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.370185 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/25a18d82-9dba-46e3-b37d-da25ab109122-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.370200 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/25a18d82-9dba-46e3-b37d-da25ab109122-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.370430 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/25a18d82-9dba-46e3-b37d-da25ab109122-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.370484 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/25a18d82-9dba-46e3-b37d-da25ab109122-run\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.371116 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/25a18d82-9dba-46e3-b37d-da25ab109122-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.371247 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/25a18d82-9dba-46e3-b37d-da25ab109122-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.371668 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/25a18d82-9dba-46e3-b37d-da25ab109122-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.371696 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/25a18d82-9dba-46e3-b37d-da25ab109122-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.371745 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/25a18d82-9dba-46e3-b37d-da25ab109122-sys\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.371779 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/25a18d82-9dba-46e3-b37d-da25ab109122-dev\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.376091 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/25a18d82-9dba-46e3-b37d-da25ab109122-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.376725 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25a18d82-9dba-46e3-b37d-da25ab109122-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.376829 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25a18d82-9dba-46e3-b37d-da25ab109122-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.381978 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/25a18d82-9dba-46e3-b37d-da25ab109122-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.386527 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25a18d82-9dba-46e3-b37d-da25ab109122-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.396350 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ncnrh\" (UniqueName: \"kubernetes.io/projected/25a18d82-9dba-46e3-b37d-da25ab109122-kube-api-access-ncnrh\") pod \"cinder-volume-volume1-0\" (UID: \"25a18d82-9dba-46e3-b37d-da25ab109122\") " pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.479917 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.545570 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gq85d" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.676900 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b70cf8bb-240b-4336-ba16-2010c3ccdbea-catalog-content\") pod \"b70cf8bb-240b-4336-ba16-2010c3ccdbea\" (UID: \"b70cf8bb-240b-4336-ba16-2010c3ccdbea\") " Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.677333 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b70cf8bb-240b-4336-ba16-2010c3ccdbea-utilities\") pod \"b70cf8bb-240b-4336-ba16-2010c3ccdbea\" (UID: \"b70cf8bb-240b-4336-ba16-2010c3ccdbea\") " Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.677514 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9wvgq\" (UniqueName: \"kubernetes.io/projected/b70cf8bb-240b-4336-ba16-2010c3ccdbea-kube-api-access-9wvgq\") pod \"b70cf8bb-240b-4336-ba16-2010c3ccdbea\" (UID: \"b70cf8bb-240b-4336-ba16-2010c3ccdbea\") " Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.681282 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b70cf8bb-240b-4336-ba16-2010c3ccdbea-utilities" (OuterVolumeSpecName: "utilities") pod "b70cf8bb-240b-4336-ba16-2010c3ccdbea" (UID: "b70cf8bb-240b-4336-ba16-2010c3ccdbea"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.690932 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b70cf8bb-240b-4336-ba16-2010c3ccdbea-kube-api-access-9wvgq" (OuterVolumeSpecName: "kube-api-access-9wvgq") pod "b70cf8bb-240b-4336-ba16-2010c3ccdbea" (UID: "b70cf8bb-240b-4336-ba16-2010c3ccdbea"). InnerVolumeSpecName "kube-api-access-9wvgq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.742918 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b70cf8bb-240b-4336-ba16-2010c3ccdbea-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b70cf8bb-240b-4336-ba16-2010c3ccdbea" (UID: "b70cf8bb-240b-4336-ba16-2010c3ccdbea"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.780092 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9wvgq\" (UniqueName: \"kubernetes.io/projected/b70cf8bb-240b-4336-ba16-2010c3ccdbea-kube-api-access-9wvgq\") on node \"crc\" DevicePath \"\"" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.780140 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b70cf8bb-240b-4336-ba16-2010c3ccdbea-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.780154 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b70cf8bb-240b-4336-ba16-2010c3ccdbea-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.926269 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-backup-0"] Nov 27 08:57:07 crc kubenswrapper[4971]: E1127 08:57:07.926797 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b70cf8bb-240b-4336-ba16-2010c3ccdbea" containerName="extract-utilities" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.926818 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b70cf8bb-240b-4336-ba16-2010c3ccdbea" containerName="extract-utilities" Nov 27 08:57:07 crc kubenswrapper[4971]: E1127 08:57:07.926847 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b70cf8bb-240b-4336-ba16-2010c3ccdbea" containerName="extract-content" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.926858 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b70cf8bb-240b-4336-ba16-2010c3ccdbea" containerName="extract-content" Nov 27 08:57:07 crc kubenswrapper[4971]: E1127 08:57:07.926900 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b70cf8bb-240b-4336-ba16-2010c3ccdbea" containerName="registry-server" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.926908 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b70cf8bb-240b-4336-ba16-2010c3ccdbea" containerName="registry-server" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.927135 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="b70cf8bb-240b-4336-ba16-2010c3ccdbea" containerName="registry-server" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.928325 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.931090 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-backup-config-data" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.946281 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.947932 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"4a80336f-fdf0-4b01-878a-af81d4c5f310","Type":"ContainerStarted","Data":"292271dd559ae909c51963e1d0118c9452258c4e9237775dc4029634b66a7285"} Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.958832 4971 generic.go:334] "Generic (PLEG): container finished" podID="b70cf8bb-240b-4336-ba16-2010c3ccdbea" containerID="3205f318ca7da640cf0c64a786fbd18b19fc582c4817f890b9f2ef77fda693ba" exitCode=0 Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.958930 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gq85d" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.958989 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gq85d" event={"ID":"b70cf8bb-240b-4336-ba16-2010c3ccdbea","Type":"ContainerDied","Data":"3205f318ca7da640cf0c64a786fbd18b19fc582c4817f890b9f2ef77fda693ba"} Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.959030 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gq85d" event={"ID":"b70cf8bb-240b-4336-ba16-2010c3ccdbea","Type":"ContainerDied","Data":"ad807f04c0e7facb0714c5df0ae71e7f9b58d71edd7cd85c37c7693150725c9b"} Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.959054 4971 scope.go:117] "RemoveContainer" containerID="3205f318ca7da640cf0c64a786fbd18b19fc582c4817f890b9f2ef77fda693ba" Nov 27 08:57:07 crc kubenswrapper[4971]: I1127 08:57:07.995173 4971 scope.go:117] "RemoveContainer" containerID="a350b9a50e3fd4db4edfa2e40a080dbcdea3c359e0243b7717c35c1592a24845" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.012385 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.688535435 podStartE2EDuration="4.012361777s" podCreationTimestamp="2025-11-27 08:57:04 +0000 UTC" firstStartedPulling="2025-11-27 08:57:05.398699437 +0000 UTC m=+7463.590743355" lastFinishedPulling="2025-11-27 08:57:05.722525779 +0000 UTC m=+7463.914569697" observedRunningTime="2025-11-27 08:57:08.005889851 +0000 UTC m=+7466.197933769" watchObservedRunningTime="2025-11-27 08:57:08.012361777 +0000 UTC m=+7466.204405695" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.049204 4971 scope.go:117] "RemoveContainer" containerID="3f9fc24c22104932fd3ad4a6441598f5300091589b8ab6fe15f0589d69233a94" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.049444 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gq85d"] Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.064522 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-gq85d"] Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.083921 4971 scope.go:117] "RemoveContainer" containerID="3205f318ca7da640cf0c64a786fbd18b19fc582c4817f890b9f2ef77fda693ba" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.085095 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-lib-modules\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.085174 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.085198 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-etc-nvme\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.085218 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.085258 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.085284 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-config-data-custom\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.085313 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-dev\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.085338 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.085370 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-ceph\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.085456 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.085496 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-run\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.085517 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-sys\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.085558 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qw9cd\" (UniqueName: \"kubernetes.io/projected/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-kube-api-access-qw9cd\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.085589 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-config-data\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.085625 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-scripts\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.085661 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: E1127 08:57:08.087930 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3205f318ca7da640cf0c64a786fbd18b19fc582c4817f890b9f2ef77fda693ba\": container with ID starting with 3205f318ca7da640cf0c64a786fbd18b19fc582c4817f890b9f2ef77fda693ba not found: ID does not exist" containerID="3205f318ca7da640cf0c64a786fbd18b19fc582c4817f890b9f2ef77fda693ba" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.087967 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3205f318ca7da640cf0c64a786fbd18b19fc582c4817f890b9f2ef77fda693ba"} err="failed to get container status \"3205f318ca7da640cf0c64a786fbd18b19fc582c4817f890b9f2ef77fda693ba\": rpc error: code = NotFound desc = could not find container \"3205f318ca7da640cf0c64a786fbd18b19fc582c4817f890b9f2ef77fda693ba\": container with ID starting with 3205f318ca7da640cf0c64a786fbd18b19fc582c4817f890b9f2ef77fda693ba not found: ID does not exist" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.087991 4971 scope.go:117] "RemoveContainer" containerID="a350b9a50e3fd4db4edfa2e40a080dbcdea3c359e0243b7717c35c1592a24845" Nov 27 08:57:08 crc kubenswrapper[4971]: E1127 08:57:08.088446 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a350b9a50e3fd4db4edfa2e40a080dbcdea3c359e0243b7717c35c1592a24845\": container with ID starting with a350b9a50e3fd4db4edfa2e40a080dbcdea3c359e0243b7717c35c1592a24845 not found: ID does not exist" containerID="a350b9a50e3fd4db4edfa2e40a080dbcdea3c359e0243b7717c35c1592a24845" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.088474 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a350b9a50e3fd4db4edfa2e40a080dbcdea3c359e0243b7717c35c1592a24845"} err="failed to get container status \"a350b9a50e3fd4db4edfa2e40a080dbcdea3c359e0243b7717c35c1592a24845\": rpc error: code = NotFound desc = could not find container \"a350b9a50e3fd4db4edfa2e40a080dbcdea3c359e0243b7717c35c1592a24845\": container with ID starting with a350b9a50e3fd4db4edfa2e40a080dbcdea3c359e0243b7717c35c1592a24845 not found: ID does not exist" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.088489 4971 scope.go:117] "RemoveContainer" containerID="3f9fc24c22104932fd3ad4a6441598f5300091589b8ab6fe15f0589d69233a94" Nov 27 08:57:08 crc kubenswrapper[4971]: E1127 08:57:08.088947 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f9fc24c22104932fd3ad4a6441598f5300091589b8ab6fe15f0589d69233a94\": container with ID starting with 3f9fc24c22104932fd3ad4a6441598f5300091589b8ab6fe15f0589d69233a94 not found: ID does not exist" containerID="3f9fc24c22104932fd3ad4a6441598f5300091589b8ab6fe15f0589d69233a94" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.089039 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f9fc24c22104932fd3ad4a6441598f5300091589b8ab6fe15f0589d69233a94"} err="failed to get container status \"3f9fc24c22104932fd3ad4a6441598f5300091589b8ab6fe15f0589d69233a94\": rpc error: code = NotFound desc = could not find container \"3f9fc24c22104932fd3ad4a6441598f5300091589b8ab6fe15f0589d69233a94\": container with ID starting with 3f9fc24c22104932fd3ad4a6441598f5300091589b8ab6fe15f0589d69233a94 not found: ID does not exist" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.132453 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.187388 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-ceph\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.188513 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.188673 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-run\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.188752 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-sys\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.188832 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qw9cd\" (UniqueName: \"kubernetes.io/projected/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-kube-api-access-qw9cd\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.188911 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-config-data\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.188996 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-scripts\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.189127 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-run\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.189127 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-sys\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.189298 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.189080 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.190230 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-lib-modules\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.190343 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-lib-modules\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.190358 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.190430 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-etc-nvme\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.190449 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.190523 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.190592 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-config-data-custom\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.190636 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-dev\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.190671 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.190840 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.190905 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.190860 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.190879 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-dev\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.190848 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-etc-nvme\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.191121 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.193677 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-config-data\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.194505 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-scripts\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.194560 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-ceph\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.199018 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-config-data-custom\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.199084 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.206413 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qw9cd\" (UniqueName: \"kubernetes.io/projected/3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6-kube-api-access-qw9cd\") pod \"cinder-backup-0\" (UID: \"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6\") " pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.250190 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.267282 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-2dw72" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.267321 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-2dw72" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.334116 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-2dw72" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.587973 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b70cf8bb-240b-4336-ba16-2010c3ccdbea" path="/var/lib/kubelet/pods/b70cf8bb-240b-4336-ba16-2010c3ccdbea/volumes" Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.897096 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.971343 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6","Type":"ContainerStarted","Data":"bcacc82f4771ccb85978f957530c2de92b2a85578b4177e0f2826df1c650c70a"} Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.975514 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"25a18d82-9dba-46e3-b37d-da25ab109122","Type":"ContainerStarted","Data":"c34aeef4af069cf51cfefbff6ccecba01ad0c8677314c228e6b417a179c9d316"} Nov 27 08:57:08 crc kubenswrapper[4971]: I1127 08:57:08.975595 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"25a18d82-9dba-46e3-b37d-da25ab109122","Type":"ContainerStarted","Data":"d361c476fadbbdba7544fc0e4f80c5a314f6ff36b42abe585476afc33a31fd43"} Nov 27 08:57:09 crc kubenswrapper[4971]: I1127 08:57:09.033207 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-2dw72" Nov 27 08:57:09 crc kubenswrapper[4971]: I1127 08:57:09.625728 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="0d609d64-7197-43f6-820c-cdfafd7bcfe8" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.1.92:8776/healthcheck\": read tcp 10.217.0.2:49560->10.217.1.92:8776: read: connection reset by peer" Nov 27 08:57:09 crc kubenswrapper[4971]: I1127 08:57:09.935276 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Nov 27 08:57:09 crc kubenswrapper[4971]: I1127 08:57:09.995443 4971 generic.go:334] "Generic (PLEG): container finished" podID="0d609d64-7197-43f6-820c-cdfafd7bcfe8" containerID="fa836b024b0c8aaddf48007e345a15e9fb10f7606b0222c3bdeaea669bc620ab" exitCode=0 Nov 27 08:57:09 crc kubenswrapper[4971]: I1127 08:57:09.995525 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0d609d64-7197-43f6-820c-cdfafd7bcfe8","Type":"ContainerDied","Data":"fa836b024b0c8aaddf48007e345a15e9fb10f7606b0222c3bdeaea669bc620ab"} Nov 27 08:57:09 crc kubenswrapper[4971]: I1127 08:57:09.999321 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"25a18d82-9dba-46e3-b37d-da25ab109122","Type":"ContainerStarted","Data":"c6a2aec4961b6a30f91847a21e148b0665201a98599aba85ca631b1835f0b7ee"} Nov 27 08:57:10 crc kubenswrapper[4971]: I1127 08:57:10.002658 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6","Type":"ContainerStarted","Data":"6e52799ca2e4139c0f65195723096062a9e2175ad9d7dec7dd2d82e4496e37a6"} Nov 27 08:57:10 crc kubenswrapper[4971]: I1127 08:57:10.002701 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6","Type":"ContainerStarted","Data":"bcbb6dd25de224ba769a93b366446978468142a1b75fb432df7d8b3e32d87c8b"} Nov 27 08:57:10 crc kubenswrapper[4971]: I1127 08:57:10.021076 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-volume-volume1-0" podStartSLOduration=2.558185411 podStartE2EDuration="3.021058635s" podCreationTimestamp="2025-11-27 08:57:07 +0000 UTC" firstStartedPulling="2025-11-27 08:57:08.131172179 +0000 UTC m=+7466.323216097" lastFinishedPulling="2025-11-27 08:57:08.594045403 +0000 UTC m=+7466.786089321" observedRunningTime="2025-11-27 08:57:10.01880757 +0000 UTC m=+7468.210851488" watchObservedRunningTime="2025-11-27 08:57:10.021058635 +0000 UTC m=+7468.213102553" Nov 27 08:57:10 crc kubenswrapper[4971]: I1127 08:57:10.056451 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-backup-0" podStartSLOduration=2.691485428 podStartE2EDuration="3.056433158s" podCreationTimestamp="2025-11-27 08:57:07 +0000 UTC" firstStartedPulling="2025-11-27 08:57:08.905768689 +0000 UTC m=+7467.097812607" lastFinishedPulling="2025-11-27 08:57:09.270716419 +0000 UTC m=+7467.462760337" observedRunningTime="2025-11-27 08:57:10.056275203 +0000 UTC m=+7468.248319131" watchObservedRunningTime="2025-11-27 08:57:10.056433158 +0000 UTC m=+7468.248477076" Nov 27 08:57:10 crc kubenswrapper[4971]: I1127 08:57:10.134044 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 27 08:57:10 crc kubenswrapper[4971]: I1127 08:57:10.245603 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d609d64-7197-43f6-820c-cdfafd7bcfe8-logs\") pod \"0d609d64-7197-43f6-820c-cdfafd7bcfe8\" (UID: \"0d609d64-7197-43f6-820c-cdfafd7bcfe8\") " Nov 27 08:57:10 crc kubenswrapper[4971]: I1127 08:57:10.245716 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d609d64-7197-43f6-820c-cdfafd7bcfe8-config-data\") pod \"0d609d64-7197-43f6-820c-cdfafd7bcfe8\" (UID: \"0d609d64-7197-43f6-820c-cdfafd7bcfe8\") " Nov 27 08:57:10 crc kubenswrapper[4971]: I1127 08:57:10.245970 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0d609d64-7197-43f6-820c-cdfafd7bcfe8-config-data-custom\") pod \"0d609d64-7197-43f6-820c-cdfafd7bcfe8\" (UID: \"0d609d64-7197-43f6-820c-cdfafd7bcfe8\") " Nov 27 08:57:10 crc kubenswrapper[4971]: I1127 08:57:10.246035 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d609d64-7197-43f6-820c-cdfafd7bcfe8-scripts\") pod \"0d609d64-7197-43f6-820c-cdfafd7bcfe8\" (UID: \"0d609d64-7197-43f6-820c-cdfafd7bcfe8\") " Nov 27 08:57:10 crc kubenswrapper[4971]: I1127 08:57:10.246071 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4vdrj\" (UniqueName: \"kubernetes.io/projected/0d609d64-7197-43f6-820c-cdfafd7bcfe8-kube-api-access-4vdrj\") pod \"0d609d64-7197-43f6-820c-cdfafd7bcfe8\" (UID: \"0d609d64-7197-43f6-820c-cdfafd7bcfe8\") " Nov 27 08:57:10 crc kubenswrapper[4971]: I1127 08:57:10.246113 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d609d64-7197-43f6-820c-cdfafd7bcfe8-combined-ca-bundle\") pod \"0d609d64-7197-43f6-820c-cdfafd7bcfe8\" (UID: \"0d609d64-7197-43f6-820c-cdfafd7bcfe8\") " Nov 27 08:57:10 crc kubenswrapper[4971]: I1127 08:57:10.246173 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0d609d64-7197-43f6-820c-cdfafd7bcfe8-etc-machine-id\") pod \"0d609d64-7197-43f6-820c-cdfafd7bcfe8\" (UID: \"0d609d64-7197-43f6-820c-cdfafd7bcfe8\") " Nov 27 08:57:10 crc kubenswrapper[4971]: I1127 08:57:10.246697 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0d609d64-7197-43f6-820c-cdfafd7bcfe8-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "0d609d64-7197-43f6-820c-cdfafd7bcfe8" (UID: "0d609d64-7197-43f6-820c-cdfafd7bcfe8"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 08:57:10 crc kubenswrapper[4971]: I1127 08:57:10.247180 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0d609d64-7197-43f6-820c-cdfafd7bcfe8-logs" (OuterVolumeSpecName: "logs") pod "0d609d64-7197-43f6-820c-cdfafd7bcfe8" (UID: "0d609d64-7197-43f6-820c-cdfafd7bcfe8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:57:10 crc kubenswrapper[4971]: I1127 08:57:10.254687 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d609d64-7197-43f6-820c-cdfafd7bcfe8-scripts" (OuterVolumeSpecName: "scripts") pod "0d609d64-7197-43f6-820c-cdfafd7bcfe8" (UID: "0d609d64-7197-43f6-820c-cdfafd7bcfe8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:57:10 crc kubenswrapper[4971]: I1127 08:57:10.257720 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d609d64-7197-43f6-820c-cdfafd7bcfe8-kube-api-access-4vdrj" (OuterVolumeSpecName: "kube-api-access-4vdrj") pod "0d609d64-7197-43f6-820c-cdfafd7bcfe8" (UID: "0d609d64-7197-43f6-820c-cdfafd7bcfe8"). InnerVolumeSpecName "kube-api-access-4vdrj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:57:10 crc kubenswrapper[4971]: I1127 08:57:10.259395 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d609d64-7197-43f6-820c-cdfafd7bcfe8-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "0d609d64-7197-43f6-820c-cdfafd7bcfe8" (UID: "0d609d64-7197-43f6-820c-cdfafd7bcfe8"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:57:10 crc kubenswrapper[4971]: I1127 08:57:10.293362 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d609d64-7197-43f6-820c-cdfafd7bcfe8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0d609d64-7197-43f6-820c-cdfafd7bcfe8" (UID: "0d609d64-7197-43f6-820c-cdfafd7bcfe8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:57:10 crc kubenswrapper[4971]: I1127 08:57:10.338498 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d609d64-7197-43f6-820c-cdfafd7bcfe8-config-data" (OuterVolumeSpecName: "config-data") pod "0d609d64-7197-43f6-820c-cdfafd7bcfe8" (UID: "0d609d64-7197-43f6-820c-cdfafd7bcfe8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:57:10 crc kubenswrapper[4971]: I1127 08:57:10.353444 4971 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0d609d64-7197-43f6-820c-cdfafd7bcfe8-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 27 08:57:10 crc kubenswrapper[4971]: I1127 08:57:10.353486 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d609d64-7197-43f6-820c-cdfafd7bcfe8-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 08:57:10 crc kubenswrapper[4971]: I1127 08:57:10.353498 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4vdrj\" (UniqueName: \"kubernetes.io/projected/0d609d64-7197-43f6-820c-cdfafd7bcfe8-kube-api-access-4vdrj\") on node \"crc\" DevicePath \"\"" Nov 27 08:57:10 crc kubenswrapper[4971]: I1127 08:57:10.353509 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d609d64-7197-43f6-820c-cdfafd7bcfe8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 08:57:10 crc kubenswrapper[4971]: I1127 08:57:10.353520 4971 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0d609d64-7197-43f6-820c-cdfafd7bcfe8-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 27 08:57:10 crc kubenswrapper[4971]: I1127 08:57:10.353529 4971 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d609d64-7197-43f6-820c-cdfafd7bcfe8-logs\") on node \"crc\" DevicePath \"\"" Nov 27 08:57:10 crc kubenswrapper[4971]: I1127 08:57:10.353557 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d609d64-7197-43f6-820c-cdfafd7bcfe8-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 08:57:10 crc kubenswrapper[4971]: I1127 08:57:10.532114 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2dw72"] Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.017612 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0d609d64-7197-43f6-820c-cdfafd7bcfe8","Type":"ContainerDied","Data":"99e7a4178fa4e35b16f09acc3a4e4eaba0a53acfda61f4f373c844d439862901"} Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.018040 4971 scope.go:117] "RemoveContainer" containerID="fa836b024b0c8aaddf48007e345a15e9fb10f7606b0222c3bdeaea669bc620ab" Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.017932 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.018078 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-2dw72" podUID="11702b67-c472-4edb-8e00-88a6b36cd1b9" containerName="registry-server" containerID="cri-o://66ef438949b3b47aa8977cb824a2fa80b00ccc172c0695088633cca72057f76d" gracePeriod=2 Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.060687 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.073508 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.080659 4971 scope.go:117] "RemoveContainer" containerID="692e20e108bbc1595c1ecee5d8d9a890a48fb05164378b3dab73527d66185071" Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.088496 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Nov 27 08:57:11 crc kubenswrapper[4971]: E1127 08:57:11.088976 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d609d64-7197-43f6-820c-cdfafd7bcfe8" containerName="cinder-api" Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.088998 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d609d64-7197-43f6-820c-cdfafd7bcfe8" containerName="cinder-api" Nov 27 08:57:11 crc kubenswrapper[4971]: E1127 08:57:11.089014 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d609d64-7197-43f6-820c-cdfafd7bcfe8" containerName="cinder-api-log" Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.089020 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d609d64-7197-43f6-820c-cdfafd7bcfe8" containerName="cinder-api-log" Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.089680 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d609d64-7197-43f6-820c-cdfafd7bcfe8" containerName="cinder-api" Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.090963 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d609d64-7197-43f6-820c-cdfafd7bcfe8" containerName="cinder-api-log" Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.092249 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.094991 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.149505 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.170481 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9382e66-cf98-4705-8a0d-475e3f29ddea-logs\") pod \"cinder-api-0\" (UID: \"b9382e66-cf98-4705-8a0d-475e3f29ddea\") " pod="openstack/cinder-api-0" Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.170578 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9382e66-cf98-4705-8a0d-475e3f29ddea-scripts\") pod \"cinder-api-0\" (UID: \"b9382e66-cf98-4705-8a0d-475e3f29ddea\") " pod="openstack/cinder-api-0" Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.170692 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9382e66-cf98-4705-8a0d-475e3f29ddea-config-data\") pod \"cinder-api-0\" (UID: \"b9382e66-cf98-4705-8a0d-475e3f29ddea\") " pod="openstack/cinder-api-0" Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.170725 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b9382e66-cf98-4705-8a0d-475e3f29ddea-etc-machine-id\") pod \"cinder-api-0\" (UID: \"b9382e66-cf98-4705-8a0d-475e3f29ddea\") " pod="openstack/cinder-api-0" Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.170766 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9382e66-cf98-4705-8a0d-475e3f29ddea-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"b9382e66-cf98-4705-8a0d-475e3f29ddea\") " pod="openstack/cinder-api-0" Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.170889 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2c5b7\" (UniqueName: \"kubernetes.io/projected/b9382e66-cf98-4705-8a0d-475e3f29ddea-kube-api-access-2c5b7\") pod \"cinder-api-0\" (UID: \"b9382e66-cf98-4705-8a0d-475e3f29ddea\") " pod="openstack/cinder-api-0" Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.170993 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b9382e66-cf98-4705-8a0d-475e3f29ddea-config-data-custom\") pod \"cinder-api-0\" (UID: \"b9382e66-cf98-4705-8a0d-475e3f29ddea\") " pod="openstack/cinder-api-0" Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.272590 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9382e66-cf98-4705-8a0d-475e3f29ddea-scripts\") pod \"cinder-api-0\" (UID: \"b9382e66-cf98-4705-8a0d-475e3f29ddea\") " pod="openstack/cinder-api-0" Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.272675 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9382e66-cf98-4705-8a0d-475e3f29ddea-config-data\") pod \"cinder-api-0\" (UID: \"b9382e66-cf98-4705-8a0d-475e3f29ddea\") " pod="openstack/cinder-api-0" Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.272710 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b9382e66-cf98-4705-8a0d-475e3f29ddea-etc-machine-id\") pod \"cinder-api-0\" (UID: \"b9382e66-cf98-4705-8a0d-475e3f29ddea\") " pod="openstack/cinder-api-0" Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.272729 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9382e66-cf98-4705-8a0d-475e3f29ddea-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"b9382e66-cf98-4705-8a0d-475e3f29ddea\") " pod="openstack/cinder-api-0" Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.272769 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2c5b7\" (UniqueName: \"kubernetes.io/projected/b9382e66-cf98-4705-8a0d-475e3f29ddea-kube-api-access-2c5b7\") pod \"cinder-api-0\" (UID: \"b9382e66-cf98-4705-8a0d-475e3f29ddea\") " pod="openstack/cinder-api-0" Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.272827 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b9382e66-cf98-4705-8a0d-475e3f29ddea-config-data-custom\") pod \"cinder-api-0\" (UID: \"b9382e66-cf98-4705-8a0d-475e3f29ddea\") " pod="openstack/cinder-api-0" Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.272864 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9382e66-cf98-4705-8a0d-475e3f29ddea-logs\") pod \"cinder-api-0\" (UID: \"b9382e66-cf98-4705-8a0d-475e3f29ddea\") " pod="openstack/cinder-api-0" Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.273260 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9382e66-cf98-4705-8a0d-475e3f29ddea-logs\") pod \"cinder-api-0\" (UID: \"b9382e66-cf98-4705-8a0d-475e3f29ddea\") " pod="openstack/cinder-api-0" Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.281342 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9382e66-cf98-4705-8a0d-475e3f29ddea-scripts\") pod \"cinder-api-0\" (UID: \"b9382e66-cf98-4705-8a0d-475e3f29ddea\") " pod="openstack/cinder-api-0" Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.283901 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9382e66-cf98-4705-8a0d-475e3f29ddea-config-data\") pod \"cinder-api-0\" (UID: \"b9382e66-cf98-4705-8a0d-475e3f29ddea\") " pod="openstack/cinder-api-0" Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.283945 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b9382e66-cf98-4705-8a0d-475e3f29ddea-etc-machine-id\") pod \"cinder-api-0\" (UID: \"b9382e66-cf98-4705-8a0d-475e3f29ddea\") " pod="openstack/cinder-api-0" Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.291961 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b9382e66-cf98-4705-8a0d-475e3f29ddea-config-data-custom\") pod \"cinder-api-0\" (UID: \"b9382e66-cf98-4705-8a0d-475e3f29ddea\") " pod="openstack/cinder-api-0" Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.293739 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9382e66-cf98-4705-8a0d-475e3f29ddea-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"b9382e66-cf98-4705-8a0d-475e3f29ddea\") " pod="openstack/cinder-api-0" Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.310289 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2c5b7\" (UniqueName: \"kubernetes.io/projected/b9382e66-cf98-4705-8a0d-475e3f29ddea-kube-api-access-2c5b7\") pod \"cinder-api-0\" (UID: \"b9382e66-cf98-4705-8a0d-475e3f29ddea\") " pod="openstack/cinder-api-0" Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.428909 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.581582 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2dw72" Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.685442 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11702b67-c472-4edb-8e00-88a6b36cd1b9-catalog-content\") pod \"11702b67-c472-4edb-8e00-88a6b36cd1b9\" (UID: \"11702b67-c472-4edb-8e00-88a6b36cd1b9\") " Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.685509 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11702b67-c472-4edb-8e00-88a6b36cd1b9-utilities\") pod \"11702b67-c472-4edb-8e00-88a6b36cd1b9\" (UID: \"11702b67-c472-4edb-8e00-88a6b36cd1b9\") " Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.685737 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z2k2h\" (UniqueName: \"kubernetes.io/projected/11702b67-c472-4edb-8e00-88a6b36cd1b9-kube-api-access-z2k2h\") pod \"11702b67-c472-4edb-8e00-88a6b36cd1b9\" (UID: \"11702b67-c472-4edb-8e00-88a6b36cd1b9\") " Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.688095 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/11702b67-c472-4edb-8e00-88a6b36cd1b9-utilities" (OuterVolumeSpecName: "utilities") pod "11702b67-c472-4edb-8e00-88a6b36cd1b9" (UID: "11702b67-c472-4edb-8e00-88a6b36cd1b9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.698757 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11702b67-c472-4edb-8e00-88a6b36cd1b9-kube-api-access-z2k2h" (OuterVolumeSpecName: "kube-api-access-z2k2h") pod "11702b67-c472-4edb-8e00-88a6b36cd1b9" (UID: "11702b67-c472-4edb-8e00-88a6b36cd1b9"). InnerVolumeSpecName "kube-api-access-z2k2h". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.710981 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/11702b67-c472-4edb-8e00-88a6b36cd1b9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "11702b67-c472-4edb-8e00-88a6b36cd1b9" (UID: "11702b67-c472-4edb-8e00-88a6b36cd1b9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.789077 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11702b67-c472-4edb-8e00-88a6b36cd1b9-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.789129 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11702b67-c472-4edb-8e00-88a6b36cd1b9-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.789143 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z2k2h\" (UniqueName: \"kubernetes.io/projected/11702b67-c472-4edb-8e00-88a6b36cd1b9-kube-api-access-z2k2h\") on node \"crc\" DevicePath \"\"" Nov 27 08:57:11 crc kubenswrapper[4971]: I1127 08:57:11.965878 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Nov 27 08:57:12 crc kubenswrapper[4971]: I1127 08:57:12.055137 4971 generic.go:334] "Generic (PLEG): container finished" podID="11702b67-c472-4edb-8e00-88a6b36cd1b9" containerID="66ef438949b3b47aa8977cb824a2fa80b00ccc172c0695088633cca72057f76d" exitCode=0 Nov 27 08:57:12 crc kubenswrapper[4971]: I1127 08:57:12.055210 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2dw72" event={"ID":"11702b67-c472-4edb-8e00-88a6b36cd1b9","Type":"ContainerDied","Data":"66ef438949b3b47aa8977cb824a2fa80b00ccc172c0695088633cca72057f76d"} Nov 27 08:57:12 crc kubenswrapper[4971]: I1127 08:57:12.055250 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2dw72" event={"ID":"11702b67-c472-4edb-8e00-88a6b36cd1b9","Type":"ContainerDied","Data":"2a7681a9fd212240c13d50545679bbd9f0b1b72c51ec7327b25473ed4f42d80f"} Nov 27 08:57:12 crc kubenswrapper[4971]: I1127 08:57:12.055270 4971 scope.go:117] "RemoveContainer" containerID="66ef438949b3b47aa8977cb824a2fa80b00ccc172c0695088633cca72057f76d" Nov 27 08:57:12 crc kubenswrapper[4971]: I1127 08:57:12.055464 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2dw72" Nov 27 08:57:12 crc kubenswrapper[4971]: I1127 08:57:12.067977 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b9382e66-cf98-4705-8a0d-475e3f29ddea","Type":"ContainerStarted","Data":"968f7cab378a7e0eadc811c76a006804ea3f3a0808248ffe8920f02630687373"} Nov 27 08:57:12 crc kubenswrapper[4971]: I1127 08:57:12.098942 4971 scope.go:117] "RemoveContainer" containerID="fa4e8935b8336cad3d53c0d8c312bacbf5020c9b14ffeb785888710c5fe8c27f" Nov 27 08:57:12 crc kubenswrapper[4971]: I1127 08:57:12.104309 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2dw72"] Nov 27 08:57:12 crc kubenswrapper[4971]: I1127 08:57:12.116968 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-2dw72"] Nov 27 08:57:12 crc kubenswrapper[4971]: I1127 08:57:12.126583 4971 scope.go:117] "RemoveContainer" containerID="bb52ce97afde6e77a771dad4f1f77a4ba90bf9194a83d11f8fd7974d23593bbd" Nov 27 08:57:12 crc kubenswrapper[4971]: I1127 08:57:12.171187 4971 scope.go:117] "RemoveContainer" containerID="66ef438949b3b47aa8977cb824a2fa80b00ccc172c0695088633cca72057f76d" Nov 27 08:57:12 crc kubenswrapper[4971]: E1127 08:57:12.172235 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"66ef438949b3b47aa8977cb824a2fa80b00ccc172c0695088633cca72057f76d\": container with ID starting with 66ef438949b3b47aa8977cb824a2fa80b00ccc172c0695088633cca72057f76d not found: ID does not exist" containerID="66ef438949b3b47aa8977cb824a2fa80b00ccc172c0695088633cca72057f76d" Nov 27 08:57:12 crc kubenswrapper[4971]: I1127 08:57:12.172293 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66ef438949b3b47aa8977cb824a2fa80b00ccc172c0695088633cca72057f76d"} err="failed to get container status \"66ef438949b3b47aa8977cb824a2fa80b00ccc172c0695088633cca72057f76d\": rpc error: code = NotFound desc = could not find container \"66ef438949b3b47aa8977cb824a2fa80b00ccc172c0695088633cca72057f76d\": container with ID starting with 66ef438949b3b47aa8977cb824a2fa80b00ccc172c0695088633cca72057f76d not found: ID does not exist" Nov 27 08:57:12 crc kubenswrapper[4971]: I1127 08:57:12.172332 4971 scope.go:117] "RemoveContainer" containerID="fa4e8935b8336cad3d53c0d8c312bacbf5020c9b14ffeb785888710c5fe8c27f" Nov 27 08:57:12 crc kubenswrapper[4971]: E1127 08:57:12.172843 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa4e8935b8336cad3d53c0d8c312bacbf5020c9b14ffeb785888710c5fe8c27f\": container with ID starting with fa4e8935b8336cad3d53c0d8c312bacbf5020c9b14ffeb785888710c5fe8c27f not found: ID does not exist" containerID="fa4e8935b8336cad3d53c0d8c312bacbf5020c9b14ffeb785888710c5fe8c27f" Nov 27 08:57:12 crc kubenswrapper[4971]: I1127 08:57:12.172916 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa4e8935b8336cad3d53c0d8c312bacbf5020c9b14ffeb785888710c5fe8c27f"} err="failed to get container status \"fa4e8935b8336cad3d53c0d8c312bacbf5020c9b14ffeb785888710c5fe8c27f\": rpc error: code = NotFound desc = could not find container \"fa4e8935b8336cad3d53c0d8c312bacbf5020c9b14ffeb785888710c5fe8c27f\": container with ID starting with fa4e8935b8336cad3d53c0d8c312bacbf5020c9b14ffeb785888710c5fe8c27f not found: ID does not exist" Nov 27 08:57:12 crc kubenswrapper[4971]: I1127 08:57:12.172966 4971 scope.go:117] "RemoveContainer" containerID="bb52ce97afde6e77a771dad4f1f77a4ba90bf9194a83d11f8fd7974d23593bbd" Nov 27 08:57:12 crc kubenswrapper[4971]: E1127 08:57:12.173475 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb52ce97afde6e77a771dad4f1f77a4ba90bf9194a83d11f8fd7974d23593bbd\": container with ID starting with bb52ce97afde6e77a771dad4f1f77a4ba90bf9194a83d11f8fd7974d23593bbd not found: ID does not exist" containerID="bb52ce97afde6e77a771dad4f1f77a4ba90bf9194a83d11f8fd7974d23593bbd" Nov 27 08:57:12 crc kubenswrapper[4971]: I1127 08:57:12.173523 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb52ce97afde6e77a771dad4f1f77a4ba90bf9194a83d11f8fd7974d23593bbd"} err="failed to get container status \"bb52ce97afde6e77a771dad4f1f77a4ba90bf9194a83d11f8fd7974d23593bbd\": rpc error: code = NotFound desc = could not find container \"bb52ce97afde6e77a771dad4f1f77a4ba90bf9194a83d11f8fd7974d23593bbd\": container with ID starting with bb52ce97afde6e77a771dad4f1f77a4ba90bf9194a83d11f8fd7974d23593bbd not found: ID does not exist" Nov 27 08:57:12 crc kubenswrapper[4971]: I1127 08:57:12.480809 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:12 crc kubenswrapper[4971]: I1127 08:57:12.562486 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d609d64-7197-43f6-820c-cdfafd7bcfe8" path="/var/lib/kubelet/pods/0d609d64-7197-43f6-820c-cdfafd7bcfe8/volumes" Nov 27 08:57:12 crc kubenswrapper[4971]: I1127 08:57:12.563947 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11702b67-c472-4edb-8e00-88a6b36cd1b9" path="/var/lib/kubelet/pods/11702b67-c472-4edb-8e00-88a6b36cd1b9/volumes" Nov 27 08:57:13 crc kubenswrapper[4971]: I1127 08:57:13.091706 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b9382e66-cf98-4705-8a0d-475e3f29ddea","Type":"ContainerStarted","Data":"021722b5ecb1cb55486d835976adac37b316947fec781160a4450ee10b0db5e7"} Nov 27 08:57:13 crc kubenswrapper[4971]: I1127 08:57:13.250407 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-backup-0" Nov 27 08:57:14 crc kubenswrapper[4971]: I1127 08:57:14.110689 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b9382e66-cf98-4705-8a0d-475e3f29ddea","Type":"ContainerStarted","Data":"cea30fb681ad4c508528e512836961fe58b8309ffb74db677cca8e1ff544cf7b"} Nov 27 08:57:14 crc kubenswrapper[4971]: I1127 08:57:14.110892 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Nov 27 08:57:14 crc kubenswrapper[4971]: I1127 08:57:14.138606 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.138583557 podStartE2EDuration="3.138583557s" podCreationTimestamp="2025-11-27 08:57:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:57:14.129352473 +0000 UTC m=+7472.321396401" watchObservedRunningTime="2025-11-27 08:57:14.138583557 +0000 UTC m=+7472.330627475" Nov 27 08:57:15 crc kubenswrapper[4971]: I1127 08:57:15.164638 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Nov 27 08:57:15 crc kubenswrapper[4971]: I1127 08:57:15.254360 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 27 08:57:16 crc kubenswrapper[4971]: I1127 08:57:16.148597 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="4a80336f-fdf0-4b01-878a-af81d4c5f310" containerName="cinder-scheduler" containerID="cri-o://0d0364a476dcd68601131461e1cd0c874bdc8bb09adddcf0801b2e258db73f13" gracePeriod=30 Nov 27 08:57:16 crc kubenswrapper[4971]: I1127 08:57:16.148676 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="4a80336f-fdf0-4b01-878a-af81d4c5f310" containerName="probe" containerID="cri-o://292271dd559ae909c51963e1d0118c9452258c4e9237775dc4029634b66a7285" gracePeriod=30 Nov 27 08:57:16 crc kubenswrapper[4971]: I1127 08:57:16.493322 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-nwtx2"] Nov 27 08:57:16 crc kubenswrapper[4971]: E1127 08:57:16.498008 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11702b67-c472-4edb-8e00-88a6b36cd1b9" containerName="extract-content" Nov 27 08:57:16 crc kubenswrapper[4971]: I1127 08:57:16.498044 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="11702b67-c472-4edb-8e00-88a6b36cd1b9" containerName="extract-content" Nov 27 08:57:16 crc kubenswrapper[4971]: E1127 08:57:16.498113 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11702b67-c472-4edb-8e00-88a6b36cd1b9" containerName="extract-utilities" Nov 27 08:57:16 crc kubenswrapper[4971]: I1127 08:57:16.498124 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="11702b67-c472-4edb-8e00-88a6b36cd1b9" containerName="extract-utilities" Nov 27 08:57:16 crc kubenswrapper[4971]: E1127 08:57:16.498138 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11702b67-c472-4edb-8e00-88a6b36cd1b9" containerName="registry-server" Nov 27 08:57:16 crc kubenswrapper[4971]: I1127 08:57:16.498147 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="11702b67-c472-4edb-8e00-88a6b36cd1b9" containerName="registry-server" Nov 27 08:57:16 crc kubenswrapper[4971]: I1127 08:57:16.498358 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="11702b67-c472-4edb-8e00-88a6b36cd1b9" containerName="registry-server" Nov 27 08:57:16 crc kubenswrapper[4971]: I1127 08:57:16.499879 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nwtx2" Nov 27 08:57:16 crc kubenswrapper[4971]: I1127 08:57:16.516650 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nwtx2"] Nov 27 08:57:16 crc kubenswrapper[4971]: I1127 08:57:16.594977 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81f9813c-4df5-458c-a044-f8488e44e16f-utilities\") pod \"certified-operators-nwtx2\" (UID: \"81f9813c-4df5-458c-a044-f8488e44e16f\") " pod="openshift-marketplace/certified-operators-nwtx2" Nov 27 08:57:16 crc kubenswrapper[4971]: I1127 08:57:16.595051 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bgjns\" (UniqueName: \"kubernetes.io/projected/81f9813c-4df5-458c-a044-f8488e44e16f-kube-api-access-bgjns\") pod \"certified-operators-nwtx2\" (UID: \"81f9813c-4df5-458c-a044-f8488e44e16f\") " pod="openshift-marketplace/certified-operators-nwtx2" Nov 27 08:57:16 crc kubenswrapper[4971]: I1127 08:57:16.595158 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81f9813c-4df5-458c-a044-f8488e44e16f-catalog-content\") pod \"certified-operators-nwtx2\" (UID: \"81f9813c-4df5-458c-a044-f8488e44e16f\") " pod="openshift-marketplace/certified-operators-nwtx2" Nov 27 08:57:16 crc kubenswrapper[4971]: I1127 08:57:16.696984 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81f9813c-4df5-458c-a044-f8488e44e16f-utilities\") pod \"certified-operators-nwtx2\" (UID: \"81f9813c-4df5-458c-a044-f8488e44e16f\") " pod="openshift-marketplace/certified-operators-nwtx2" Nov 27 08:57:16 crc kubenswrapper[4971]: I1127 08:57:16.697086 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bgjns\" (UniqueName: \"kubernetes.io/projected/81f9813c-4df5-458c-a044-f8488e44e16f-kube-api-access-bgjns\") pod \"certified-operators-nwtx2\" (UID: \"81f9813c-4df5-458c-a044-f8488e44e16f\") " pod="openshift-marketplace/certified-operators-nwtx2" Nov 27 08:57:16 crc kubenswrapper[4971]: I1127 08:57:16.697159 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81f9813c-4df5-458c-a044-f8488e44e16f-catalog-content\") pod \"certified-operators-nwtx2\" (UID: \"81f9813c-4df5-458c-a044-f8488e44e16f\") " pod="openshift-marketplace/certified-operators-nwtx2" Nov 27 08:57:16 crc kubenswrapper[4971]: I1127 08:57:16.697464 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81f9813c-4df5-458c-a044-f8488e44e16f-utilities\") pod \"certified-operators-nwtx2\" (UID: \"81f9813c-4df5-458c-a044-f8488e44e16f\") " pod="openshift-marketplace/certified-operators-nwtx2" Nov 27 08:57:16 crc kubenswrapper[4971]: I1127 08:57:16.697739 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81f9813c-4df5-458c-a044-f8488e44e16f-catalog-content\") pod \"certified-operators-nwtx2\" (UID: \"81f9813c-4df5-458c-a044-f8488e44e16f\") " pod="openshift-marketplace/certified-operators-nwtx2" Nov 27 08:57:16 crc kubenswrapper[4971]: I1127 08:57:16.719634 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bgjns\" (UniqueName: \"kubernetes.io/projected/81f9813c-4df5-458c-a044-f8488e44e16f-kube-api-access-bgjns\") pod \"certified-operators-nwtx2\" (UID: \"81f9813c-4df5-458c-a044-f8488e44e16f\") " pod="openshift-marketplace/certified-operators-nwtx2" Nov 27 08:57:16 crc kubenswrapper[4971]: I1127 08:57:16.830559 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nwtx2" Nov 27 08:57:17 crc kubenswrapper[4971]: I1127 08:57:17.186269 4971 generic.go:334] "Generic (PLEG): container finished" podID="4a80336f-fdf0-4b01-878a-af81d4c5f310" containerID="292271dd559ae909c51963e1d0118c9452258c4e9237775dc4029634b66a7285" exitCode=0 Nov 27 08:57:17 crc kubenswrapper[4971]: I1127 08:57:17.186492 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"4a80336f-fdf0-4b01-878a-af81d4c5f310","Type":"ContainerDied","Data":"292271dd559ae909c51963e1d0118c9452258c4e9237775dc4029634b66a7285"} Nov 27 08:57:17 crc kubenswrapper[4971]: W1127 08:57:17.440806 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod81f9813c_4df5_458c_a044_f8488e44e16f.slice/crio-796d5cebfd5acee53af75a42fb1828deb86074edef6f146c41852df18db2591a WatchSource:0}: Error finding container 796d5cebfd5acee53af75a42fb1828deb86074edef6f146c41852df18db2591a: Status 404 returned error can't find the container with id 796d5cebfd5acee53af75a42fb1828deb86074edef6f146c41852df18db2591a Nov 27 08:57:17 crc kubenswrapper[4971]: I1127 08:57:17.443072 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nwtx2"] Nov 27 08:57:17 crc kubenswrapper[4971]: I1127 08:57:17.743018 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-volume-volume1-0" Nov 27 08:57:18 crc kubenswrapper[4971]: I1127 08:57:18.210105 4971 generic.go:334] "Generic (PLEG): container finished" podID="81f9813c-4df5-458c-a044-f8488e44e16f" containerID="72b5ac953e778adb62d0a84e18f96e661077e3cbd9d5a1d6cb28cdeba7857e2b" exitCode=0 Nov 27 08:57:18 crc kubenswrapper[4971]: I1127 08:57:18.210161 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nwtx2" event={"ID":"81f9813c-4df5-458c-a044-f8488e44e16f","Type":"ContainerDied","Data":"72b5ac953e778adb62d0a84e18f96e661077e3cbd9d5a1d6cb28cdeba7857e2b"} Nov 27 08:57:18 crc kubenswrapper[4971]: I1127 08:57:18.210211 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nwtx2" event={"ID":"81f9813c-4df5-458c-a044-f8488e44e16f","Type":"ContainerStarted","Data":"796d5cebfd5acee53af75a42fb1828deb86074edef6f146c41852df18db2591a"} Nov 27 08:57:18 crc kubenswrapper[4971]: I1127 08:57:18.214756 4971 generic.go:334] "Generic (PLEG): container finished" podID="4a80336f-fdf0-4b01-878a-af81d4c5f310" containerID="0d0364a476dcd68601131461e1cd0c874bdc8bb09adddcf0801b2e258db73f13" exitCode=0 Nov 27 08:57:18 crc kubenswrapper[4971]: I1127 08:57:18.214819 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"4a80336f-fdf0-4b01-878a-af81d4c5f310","Type":"ContainerDied","Data":"0d0364a476dcd68601131461e1cd0c874bdc8bb09adddcf0801b2e258db73f13"} Nov 27 08:57:18 crc kubenswrapper[4971]: I1127 08:57:18.466189 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 27 08:57:18 crc kubenswrapper[4971]: I1127 08:57:18.531300 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-backup-0" Nov 27 08:57:18 crc kubenswrapper[4971]: I1127 08:57:18.543478 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4a80336f-fdf0-4b01-878a-af81d4c5f310-config-data-custom\") pod \"4a80336f-fdf0-4b01-878a-af81d4c5f310\" (UID: \"4a80336f-fdf0-4b01-878a-af81d4c5f310\") " Nov 27 08:57:18 crc kubenswrapper[4971]: I1127 08:57:18.543669 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a80336f-fdf0-4b01-878a-af81d4c5f310-combined-ca-bundle\") pod \"4a80336f-fdf0-4b01-878a-af81d4c5f310\" (UID: \"4a80336f-fdf0-4b01-878a-af81d4c5f310\") " Nov 27 08:57:18 crc kubenswrapper[4971]: I1127 08:57:18.543750 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4a80336f-fdf0-4b01-878a-af81d4c5f310-scripts\") pod \"4a80336f-fdf0-4b01-878a-af81d4c5f310\" (UID: \"4a80336f-fdf0-4b01-878a-af81d4c5f310\") " Nov 27 08:57:18 crc kubenswrapper[4971]: I1127 08:57:18.543806 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a80336f-fdf0-4b01-878a-af81d4c5f310-config-data\") pod \"4a80336f-fdf0-4b01-878a-af81d4c5f310\" (UID: \"4a80336f-fdf0-4b01-878a-af81d4c5f310\") " Nov 27 08:57:18 crc kubenswrapper[4971]: I1127 08:57:18.544213 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q7wlh\" (UniqueName: \"kubernetes.io/projected/4a80336f-fdf0-4b01-878a-af81d4c5f310-kube-api-access-q7wlh\") pod \"4a80336f-fdf0-4b01-878a-af81d4c5f310\" (UID: \"4a80336f-fdf0-4b01-878a-af81d4c5f310\") " Nov 27 08:57:18 crc kubenswrapper[4971]: I1127 08:57:18.544278 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4a80336f-fdf0-4b01-878a-af81d4c5f310-etc-machine-id\") pod \"4a80336f-fdf0-4b01-878a-af81d4c5f310\" (UID: \"4a80336f-fdf0-4b01-878a-af81d4c5f310\") " Nov 27 08:57:18 crc kubenswrapper[4971]: I1127 08:57:18.544923 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4a80336f-fdf0-4b01-878a-af81d4c5f310-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "4a80336f-fdf0-4b01-878a-af81d4c5f310" (UID: "4a80336f-fdf0-4b01-878a-af81d4c5f310"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 08:57:18 crc kubenswrapper[4971]: I1127 08:57:18.551933 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a80336f-fdf0-4b01-878a-af81d4c5f310-scripts" (OuterVolumeSpecName: "scripts") pod "4a80336f-fdf0-4b01-878a-af81d4c5f310" (UID: "4a80336f-fdf0-4b01-878a-af81d4c5f310"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:57:18 crc kubenswrapper[4971]: I1127 08:57:18.558945 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a80336f-fdf0-4b01-878a-af81d4c5f310-kube-api-access-q7wlh" (OuterVolumeSpecName: "kube-api-access-q7wlh") pod "4a80336f-fdf0-4b01-878a-af81d4c5f310" (UID: "4a80336f-fdf0-4b01-878a-af81d4c5f310"). InnerVolumeSpecName "kube-api-access-q7wlh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:57:18 crc kubenswrapper[4971]: I1127 08:57:18.559795 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a80336f-fdf0-4b01-878a-af81d4c5f310-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "4a80336f-fdf0-4b01-878a-af81d4c5f310" (UID: "4a80336f-fdf0-4b01-878a-af81d4c5f310"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:57:18 crc kubenswrapper[4971]: I1127 08:57:18.647620 4971 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4a80336f-fdf0-4b01-878a-af81d4c5f310-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 27 08:57:18 crc kubenswrapper[4971]: I1127 08:57:18.647661 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4a80336f-fdf0-4b01-878a-af81d4c5f310-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 08:57:18 crc kubenswrapper[4971]: I1127 08:57:18.647674 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q7wlh\" (UniqueName: \"kubernetes.io/projected/4a80336f-fdf0-4b01-878a-af81d4c5f310-kube-api-access-q7wlh\") on node \"crc\" DevicePath \"\"" Nov 27 08:57:18 crc kubenswrapper[4971]: I1127 08:57:18.647688 4971 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4a80336f-fdf0-4b01-878a-af81d4c5f310-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 27 08:57:18 crc kubenswrapper[4971]: I1127 08:57:18.666765 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a80336f-fdf0-4b01-878a-af81d4c5f310-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4a80336f-fdf0-4b01-878a-af81d4c5f310" (UID: "4a80336f-fdf0-4b01-878a-af81d4c5f310"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:57:18 crc kubenswrapper[4971]: I1127 08:57:18.674316 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a80336f-fdf0-4b01-878a-af81d4c5f310-config-data" (OuterVolumeSpecName: "config-data") pod "4a80336f-fdf0-4b01-878a-af81d4c5f310" (UID: "4a80336f-fdf0-4b01-878a-af81d4c5f310"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:57:18 crc kubenswrapper[4971]: I1127 08:57:18.752262 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a80336f-fdf0-4b01-878a-af81d4c5f310-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 08:57:18 crc kubenswrapper[4971]: I1127 08:57:18.752332 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a80336f-fdf0-4b01-878a-af81d4c5f310-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 08:57:19 crc kubenswrapper[4971]: I1127 08:57:19.232184 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"4a80336f-fdf0-4b01-878a-af81d4c5f310","Type":"ContainerDied","Data":"7a908779fcf640d268ca773ea0ff158c16327c67d57e06e748d670df10d9491f"} Nov 27 08:57:19 crc kubenswrapper[4971]: I1127 08:57:19.232299 4971 scope.go:117] "RemoveContainer" containerID="292271dd559ae909c51963e1d0118c9452258c4e9237775dc4029634b66a7285" Nov 27 08:57:19 crc kubenswrapper[4971]: I1127 08:57:19.232472 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 27 08:57:19 crc kubenswrapper[4971]: I1127 08:57:19.274008 4971 scope.go:117] "RemoveContainer" containerID="0d0364a476dcd68601131461e1cd0c874bdc8bb09adddcf0801b2e258db73f13" Nov 27 08:57:19 crc kubenswrapper[4971]: I1127 08:57:19.278889 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 27 08:57:19 crc kubenswrapper[4971]: I1127 08:57:19.298636 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 27 08:57:19 crc kubenswrapper[4971]: I1127 08:57:19.310338 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Nov 27 08:57:19 crc kubenswrapper[4971]: E1127 08:57:19.310803 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a80336f-fdf0-4b01-878a-af81d4c5f310" containerName="probe" Nov 27 08:57:19 crc kubenswrapper[4971]: I1127 08:57:19.310831 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a80336f-fdf0-4b01-878a-af81d4c5f310" containerName="probe" Nov 27 08:57:19 crc kubenswrapper[4971]: E1127 08:57:19.310865 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a80336f-fdf0-4b01-878a-af81d4c5f310" containerName="cinder-scheduler" Nov 27 08:57:19 crc kubenswrapper[4971]: I1127 08:57:19.310874 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a80336f-fdf0-4b01-878a-af81d4c5f310" containerName="cinder-scheduler" Nov 27 08:57:19 crc kubenswrapper[4971]: I1127 08:57:19.311106 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a80336f-fdf0-4b01-878a-af81d4c5f310" containerName="cinder-scheduler" Nov 27 08:57:19 crc kubenswrapper[4971]: I1127 08:57:19.311138 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a80336f-fdf0-4b01-878a-af81d4c5f310" containerName="probe" Nov 27 08:57:19 crc kubenswrapper[4971]: I1127 08:57:19.312701 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 27 08:57:19 crc kubenswrapper[4971]: I1127 08:57:19.320830 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Nov 27 08:57:19 crc kubenswrapper[4971]: I1127 08:57:19.339270 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 27 08:57:19 crc kubenswrapper[4971]: I1127 08:57:19.466401 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/de667c8e-c3dc-48f7-9f4a-4330a5a4416d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"de667c8e-c3dc-48f7-9f4a-4330a5a4416d\") " pod="openstack/cinder-scheduler-0" Nov 27 08:57:19 crc kubenswrapper[4971]: I1127 08:57:19.466515 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/de667c8e-c3dc-48f7-9f4a-4330a5a4416d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"de667c8e-c3dc-48f7-9f4a-4330a5a4416d\") " pod="openstack/cinder-scheduler-0" Nov 27 08:57:19 crc kubenswrapper[4971]: I1127 08:57:19.466615 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5qpwj\" (UniqueName: \"kubernetes.io/projected/de667c8e-c3dc-48f7-9f4a-4330a5a4416d-kube-api-access-5qpwj\") pod \"cinder-scheduler-0\" (UID: \"de667c8e-c3dc-48f7-9f4a-4330a5a4416d\") " pod="openstack/cinder-scheduler-0" Nov 27 08:57:19 crc kubenswrapper[4971]: I1127 08:57:19.466668 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/de667c8e-c3dc-48f7-9f4a-4330a5a4416d-scripts\") pod \"cinder-scheduler-0\" (UID: \"de667c8e-c3dc-48f7-9f4a-4330a5a4416d\") " pod="openstack/cinder-scheduler-0" Nov 27 08:57:19 crc kubenswrapper[4971]: I1127 08:57:19.466725 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de667c8e-c3dc-48f7-9f4a-4330a5a4416d-config-data\") pod \"cinder-scheduler-0\" (UID: \"de667c8e-c3dc-48f7-9f4a-4330a5a4416d\") " pod="openstack/cinder-scheduler-0" Nov 27 08:57:19 crc kubenswrapper[4971]: I1127 08:57:19.467384 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de667c8e-c3dc-48f7-9f4a-4330a5a4416d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"de667c8e-c3dc-48f7-9f4a-4330a5a4416d\") " pod="openstack/cinder-scheduler-0" Nov 27 08:57:19 crc kubenswrapper[4971]: I1127 08:57:19.569810 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de667c8e-c3dc-48f7-9f4a-4330a5a4416d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"de667c8e-c3dc-48f7-9f4a-4330a5a4416d\") " pod="openstack/cinder-scheduler-0" Nov 27 08:57:19 crc kubenswrapper[4971]: I1127 08:57:19.569903 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/de667c8e-c3dc-48f7-9f4a-4330a5a4416d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"de667c8e-c3dc-48f7-9f4a-4330a5a4416d\") " pod="openstack/cinder-scheduler-0" Nov 27 08:57:19 crc kubenswrapper[4971]: I1127 08:57:19.569988 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/de667c8e-c3dc-48f7-9f4a-4330a5a4416d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"de667c8e-c3dc-48f7-9f4a-4330a5a4416d\") " pod="openstack/cinder-scheduler-0" Nov 27 08:57:19 crc kubenswrapper[4971]: I1127 08:57:19.570017 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5qpwj\" (UniqueName: \"kubernetes.io/projected/de667c8e-c3dc-48f7-9f4a-4330a5a4416d-kube-api-access-5qpwj\") pod \"cinder-scheduler-0\" (UID: \"de667c8e-c3dc-48f7-9f4a-4330a5a4416d\") " pod="openstack/cinder-scheduler-0" Nov 27 08:57:19 crc kubenswrapper[4971]: I1127 08:57:19.570053 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/de667c8e-c3dc-48f7-9f4a-4330a5a4416d-scripts\") pod \"cinder-scheduler-0\" (UID: \"de667c8e-c3dc-48f7-9f4a-4330a5a4416d\") " pod="openstack/cinder-scheduler-0" Nov 27 08:57:19 crc kubenswrapper[4971]: I1127 08:57:19.570083 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de667c8e-c3dc-48f7-9f4a-4330a5a4416d-config-data\") pod \"cinder-scheduler-0\" (UID: \"de667c8e-c3dc-48f7-9f4a-4330a5a4416d\") " pod="openstack/cinder-scheduler-0" Nov 27 08:57:19 crc kubenswrapper[4971]: I1127 08:57:19.570841 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/de667c8e-c3dc-48f7-9f4a-4330a5a4416d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"de667c8e-c3dc-48f7-9f4a-4330a5a4416d\") " pod="openstack/cinder-scheduler-0" Nov 27 08:57:19 crc kubenswrapper[4971]: I1127 08:57:19.574387 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/de667c8e-c3dc-48f7-9f4a-4330a5a4416d-scripts\") pod \"cinder-scheduler-0\" (UID: \"de667c8e-c3dc-48f7-9f4a-4330a5a4416d\") " pod="openstack/cinder-scheduler-0" Nov 27 08:57:19 crc kubenswrapper[4971]: I1127 08:57:19.577128 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de667c8e-c3dc-48f7-9f4a-4330a5a4416d-config-data\") pod \"cinder-scheduler-0\" (UID: \"de667c8e-c3dc-48f7-9f4a-4330a5a4416d\") " pod="openstack/cinder-scheduler-0" Nov 27 08:57:19 crc kubenswrapper[4971]: I1127 08:57:19.578096 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de667c8e-c3dc-48f7-9f4a-4330a5a4416d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"de667c8e-c3dc-48f7-9f4a-4330a5a4416d\") " pod="openstack/cinder-scheduler-0" Nov 27 08:57:19 crc kubenswrapper[4971]: I1127 08:57:19.583002 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/de667c8e-c3dc-48f7-9f4a-4330a5a4416d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"de667c8e-c3dc-48f7-9f4a-4330a5a4416d\") " pod="openstack/cinder-scheduler-0" Nov 27 08:57:19 crc kubenswrapper[4971]: I1127 08:57:19.590081 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5qpwj\" (UniqueName: \"kubernetes.io/projected/de667c8e-c3dc-48f7-9f4a-4330a5a4416d-kube-api-access-5qpwj\") pod \"cinder-scheduler-0\" (UID: \"de667c8e-c3dc-48f7-9f4a-4330a5a4416d\") " pod="openstack/cinder-scheduler-0" Nov 27 08:57:19 crc kubenswrapper[4971]: I1127 08:57:19.643653 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 27 08:57:20 crc kubenswrapper[4971]: I1127 08:57:20.082312 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 27 08:57:20 crc kubenswrapper[4971]: W1127 08:57:20.084176 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podde667c8e_c3dc_48f7_9f4a_4330a5a4416d.slice/crio-c735f4c2625dcf2374033ea2a5ba392d4206c68c7df15948deb6f9b4d8b241ff WatchSource:0}: Error finding container c735f4c2625dcf2374033ea2a5ba392d4206c68c7df15948deb6f9b4d8b241ff: Status 404 returned error can't find the container with id c735f4c2625dcf2374033ea2a5ba392d4206c68c7df15948deb6f9b4d8b241ff Nov 27 08:57:20 crc kubenswrapper[4971]: I1127 08:57:20.250068 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"de667c8e-c3dc-48f7-9f4a-4330a5a4416d","Type":"ContainerStarted","Data":"c735f4c2625dcf2374033ea2a5ba392d4206c68c7df15948deb6f9b4d8b241ff"} Nov 27 08:57:20 crc kubenswrapper[4971]: I1127 08:57:20.565896 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a80336f-fdf0-4b01-878a-af81d4c5f310" path="/var/lib/kubelet/pods/4a80336f-fdf0-4b01-878a-af81d4c5f310/volumes" Nov 27 08:57:21 crc kubenswrapper[4971]: I1127 08:57:21.267159 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"de667c8e-c3dc-48f7-9f4a-4330a5a4416d","Type":"ContainerStarted","Data":"333e2bd634751c43a765aabd1d60183781d278fa1a9b79288a79d81fa878378e"} Nov 27 08:57:23 crc kubenswrapper[4971]: I1127 08:57:23.291829 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"de667c8e-c3dc-48f7-9f4a-4330a5a4416d","Type":"ContainerStarted","Data":"775d07975ec1b5ab562b30af0af401b5b99c6632d35640398c279dab1c89240f"} Nov 27 08:57:23 crc kubenswrapper[4971]: I1127 08:57:23.296098 4971 generic.go:334] "Generic (PLEG): container finished" podID="81f9813c-4df5-458c-a044-f8488e44e16f" containerID="304e3420b6798a57b063de909ae9fce58feca84fd196a4eafe01d7590b61b083" exitCode=0 Nov 27 08:57:23 crc kubenswrapper[4971]: I1127 08:57:23.296133 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nwtx2" event={"ID":"81f9813c-4df5-458c-a044-f8488e44e16f","Type":"ContainerDied","Data":"304e3420b6798a57b063de909ae9fce58feca84fd196a4eafe01d7590b61b083"} Nov 27 08:57:23 crc kubenswrapper[4971]: I1127 08:57:23.298756 4971 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 27 08:57:23 crc kubenswrapper[4971]: I1127 08:57:23.326991 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.32697045 podStartE2EDuration="4.32697045s" podCreationTimestamp="2025-11-27 08:57:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:57:23.315281605 +0000 UTC m=+7481.507325533" watchObservedRunningTime="2025-11-27 08:57:23.32697045 +0000 UTC m=+7481.519014368" Nov 27 08:57:23 crc kubenswrapper[4971]: I1127 08:57:23.523523 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Nov 27 08:57:24 crc kubenswrapper[4971]: I1127 08:57:24.311507 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nwtx2" event={"ID":"81f9813c-4df5-458c-a044-f8488e44e16f","Type":"ContainerStarted","Data":"1196480d555dc950bd1dfc5c6b297ce11aae65757903317837611a0f40020b70"} Nov 27 08:57:24 crc kubenswrapper[4971]: I1127 08:57:24.336063 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-nwtx2" podStartSLOduration=2.754066888 podStartE2EDuration="8.336043894s" podCreationTimestamp="2025-11-27 08:57:16 +0000 UTC" firstStartedPulling="2025-11-27 08:57:18.212940903 +0000 UTC m=+7476.404984811" lastFinishedPulling="2025-11-27 08:57:23.794917869 +0000 UTC m=+7481.986961817" observedRunningTime="2025-11-27 08:57:24.329384414 +0000 UTC m=+7482.521428332" watchObservedRunningTime="2025-11-27 08:57:24.336043894 +0000 UTC m=+7482.528087812" Nov 27 08:57:24 crc kubenswrapper[4971]: I1127 08:57:24.644224 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Nov 27 08:57:26 crc kubenswrapper[4971]: I1127 08:57:26.831454 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-nwtx2" Nov 27 08:57:26 crc kubenswrapper[4971]: I1127 08:57:26.832087 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-nwtx2" Nov 27 08:57:26 crc kubenswrapper[4971]: I1127 08:57:26.880438 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-nwtx2" Nov 27 08:57:28 crc kubenswrapper[4971]: I1127 08:57:28.446391 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-nwtx2" Nov 27 08:57:28 crc kubenswrapper[4971]: I1127 08:57:28.535048 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nwtx2"] Nov 27 08:57:28 crc kubenswrapper[4971]: I1127 08:57:28.594296 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-45m2w"] Nov 27 08:57:28 crc kubenswrapper[4971]: I1127 08:57:28.595028 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-45m2w" podUID="f33d3322-d126-414a-b601-9eedbc225211" containerName="registry-server" containerID="cri-o://a50d0ded044bca6a2e073bd1aae4ed13eebbb7cad6d1c314f242f2e763be07a4" gracePeriod=2 Nov 27 08:57:29 crc kubenswrapper[4971]: I1127 08:57:29.168181 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-45m2w" Nov 27 08:57:29 crc kubenswrapper[4971]: I1127 08:57:29.189524 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f33d3322-d126-414a-b601-9eedbc225211-utilities\") pod \"f33d3322-d126-414a-b601-9eedbc225211\" (UID: \"f33d3322-d126-414a-b601-9eedbc225211\") " Nov 27 08:57:29 crc kubenswrapper[4971]: I1127 08:57:29.189722 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gp4bf\" (UniqueName: \"kubernetes.io/projected/f33d3322-d126-414a-b601-9eedbc225211-kube-api-access-gp4bf\") pod \"f33d3322-d126-414a-b601-9eedbc225211\" (UID: \"f33d3322-d126-414a-b601-9eedbc225211\") " Nov 27 08:57:29 crc kubenswrapper[4971]: I1127 08:57:29.189845 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f33d3322-d126-414a-b601-9eedbc225211-catalog-content\") pod \"f33d3322-d126-414a-b601-9eedbc225211\" (UID: \"f33d3322-d126-414a-b601-9eedbc225211\") " Nov 27 08:57:29 crc kubenswrapper[4971]: I1127 08:57:29.192918 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f33d3322-d126-414a-b601-9eedbc225211-utilities" (OuterVolumeSpecName: "utilities") pod "f33d3322-d126-414a-b601-9eedbc225211" (UID: "f33d3322-d126-414a-b601-9eedbc225211"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:57:29 crc kubenswrapper[4971]: I1127 08:57:29.209669 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f33d3322-d126-414a-b601-9eedbc225211-kube-api-access-gp4bf" (OuterVolumeSpecName: "kube-api-access-gp4bf") pod "f33d3322-d126-414a-b601-9eedbc225211" (UID: "f33d3322-d126-414a-b601-9eedbc225211"). InnerVolumeSpecName "kube-api-access-gp4bf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:57:29 crc kubenswrapper[4971]: I1127 08:57:29.251905 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f33d3322-d126-414a-b601-9eedbc225211-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f33d3322-d126-414a-b601-9eedbc225211" (UID: "f33d3322-d126-414a-b601-9eedbc225211"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:57:29 crc kubenswrapper[4971]: I1127 08:57:29.292154 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gp4bf\" (UniqueName: \"kubernetes.io/projected/f33d3322-d126-414a-b601-9eedbc225211-kube-api-access-gp4bf\") on node \"crc\" DevicePath \"\"" Nov 27 08:57:29 crc kubenswrapper[4971]: I1127 08:57:29.292200 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f33d3322-d126-414a-b601-9eedbc225211-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 08:57:29 crc kubenswrapper[4971]: I1127 08:57:29.292211 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f33d3322-d126-414a-b601-9eedbc225211-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 08:57:29 crc kubenswrapper[4971]: I1127 08:57:29.374946 4971 generic.go:334] "Generic (PLEG): container finished" podID="f33d3322-d126-414a-b601-9eedbc225211" containerID="a50d0ded044bca6a2e073bd1aae4ed13eebbb7cad6d1c314f242f2e763be07a4" exitCode=0 Nov 27 08:57:29 crc kubenswrapper[4971]: I1127 08:57:29.375014 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-45m2w" event={"ID":"f33d3322-d126-414a-b601-9eedbc225211","Type":"ContainerDied","Data":"a50d0ded044bca6a2e073bd1aae4ed13eebbb7cad6d1c314f242f2e763be07a4"} Nov 27 08:57:29 crc kubenswrapper[4971]: I1127 08:57:29.375026 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-45m2w" Nov 27 08:57:29 crc kubenswrapper[4971]: I1127 08:57:29.375081 4971 scope.go:117] "RemoveContainer" containerID="a50d0ded044bca6a2e073bd1aae4ed13eebbb7cad6d1c314f242f2e763be07a4" Nov 27 08:57:29 crc kubenswrapper[4971]: I1127 08:57:29.375069 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-45m2w" event={"ID":"f33d3322-d126-414a-b601-9eedbc225211","Type":"ContainerDied","Data":"cfef44444a729051eb6738185e8770537b4fe51ff8b58816ad81137e3d463c59"} Nov 27 08:57:29 crc kubenswrapper[4971]: I1127 08:57:29.409172 4971 scope.go:117] "RemoveContainer" containerID="40b3161a4ae3603b6df43e280a034ca34c9b425c2fcddb30d676d9c26df08d4a" Nov 27 08:57:29 crc kubenswrapper[4971]: I1127 08:57:29.420782 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-45m2w"] Nov 27 08:57:29 crc kubenswrapper[4971]: I1127 08:57:29.439583 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-45m2w"] Nov 27 08:57:29 crc kubenswrapper[4971]: I1127 08:57:29.440131 4971 scope.go:117] "RemoveContainer" containerID="5f9a047e8556cac8e5f23e167512a605300175feb3c6a92f6f088989e359f4a1" Nov 27 08:57:29 crc kubenswrapper[4971]: I1127 08:57:29.487399 4971 scope.go:117] "RemoveContainer" containerID="a50d0ded044bca6a2e073bd1aae4ed13eebbb7cad6d1c314f242f2e763be07a4" Nov 27 08:57:29 crc kubenswrapper[4971]: E1127 08:57:29.488070 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a50d0ded044bca6a2e073bd1aae4ed13eebbb7cad6d1c314f242f2e763be07a4\": container with ID starting with a50d0ded044bca6a2e073bd1aae4ed13eebbb7cad6d1c314f242f2e763be07a4 not found: ID does not exist" containerID="a50d0ded044bca6a2e073bd1aae4ed13eebbb7cad6d1c314f242f2e763be07a4" Nov 27 08:57:29 crc kubenswrapper[4971]: I1127 08:57:29.488111 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a50d0ded044bca6a2e073bd1aae4ed13eebbb7cad6d1c314f242f2e763be07a4"} err="failed to get container status \"a50d0ded044bca6a2e073bd1aae4ed13eebbb7cad6d1c314f242f2e763be07a4\": rpc error: code = NotFound desc = could not find container \"a50d0ded044bca6a2e073bd1aae4ed13eebbb7cad6d1c314f242f2e763be07a4\": container with ID starting with a50d0ded044bca6a2e073bd1aae4ed13eebbb7cad6d1c314f242f2e763be07a4 not found: ID does not exist" Nov 27 08:57:29 crc kubenswrapper[4971]: I1127 08:57:29.488139 4971 scope.go:117] "RemoveContainer" containerID="40b3161a4ae3603b6df43e280a034ca34c9b425c2fcddb30d676d9c26df08d4a" Nov 27 08:57:29 crc kubenswrapper[4971]: E1127 08:57:29.488764 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"40b3161a4ae3603b6df43e280a034ca34c9b425c2fcddb30d676d9c26df08d4a\": container with ID starting with 40b3161a4ae3603b6df43e280a034ca34c9b425c2fcddb30d676d9c26df08d4a not found: ID does not exist" containerID="40b3161a4ae3603b6df43e280a034ca34c9b425c2fcddb30d676d9c26df08d4a" Nov 27 08:57:29 crc kubenswrapper[4971]: I1127 08:57:29.488810 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40b3161a4ae3603b6df43e280a034ca34c9b425c2fcddb30d676d9c26df08d4a"} err="failed to get container status \"40b3161a4ae3603b6df43e280a034ca34c9b425c2fcddb30d676d9c26df08d4a\": rpc error: code = NotFound desc = could not find container \"40b3161a4ae3603b6df43e280a034ca34c9b425c2fcddb30d676d9c26df08d4a\": container with ID starting with 40b3161a4ae3603b6df43e280a034ca34c9b425c2fcddb30d676d9c26df08d4a not found: ID does not exist" Nov 27 08:57:29 crc kubenswrapper[4971]: I1127 08:57:29.488841 4971 scope.go:117] "RemoveContainer" containerID="5f9a047e8556cac8e5f23e167512a605300175feb3c6a92f6f088989e359f4a1" Nov 27 08:57:29 crc kubenswrapper[4971]: E1127 08:57:29.489195 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f9a047e8556cac8e5f23e167512a605300175feb3c6a92f6f088989e359f4a1\": container with ID starting with 5f9a047e8556cac8e5f23e167512a605300175feb3c6a92f6f088989e359f4a1 not found: ID does not exist" containerID="5f9a047e8556cac8e5f23e167512a605300175feb3c6a92f6f088989e359f4a1" Nov 27 08:57:29 crc kubenswrapper[4971]: I1127 08:57:29.489224 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f9a047e8556cac8e5f23e167512a605300175feb3c6a92f6f088989e359f4a1"} err="failed to get container status \"5f9a047e8556cac8e5f23e167512a605300175feb3c6a92f6f088989e359f4a1\": rpc error: code = NotFound desc = could not find container \"5f9a047e8556cac8e5f23e167512a605300175feb3c6a92f6f088989e359f4a1\": container with ID starting with 5f9a047e8556cac8e5f23e167512a605300175feb3c6a92f6f088989e359f4a1 not found: ID does not exist" Nov 27 08:57:29 crc kubenswrapper[4971]: I1127 08:57:29.938690 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Nov 27 08:57:30 crc kubenswrapper[4971]: I1127 08:57:30.561981 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f33d3322-d126-414a-b601-9eedbc225211" path="/var/lib/kubelet/pods/f33d3322-d126-414a-b601-9eedbc225211/volumes" Nov 27 08:57:56 crc kubenswrapper[4971]: I1127 08:57:56.413002 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 08:57:56 crc kubenswrapper[4971]: I1127 08:57:56.413875 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 08:58:03 crc kubenswrapper[4971]: I1127 08:58:03.073922 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-rvxt8"] Nov 27 08:58:03 crc kubenswrapper[4971]: I1127 08:58:03.087178 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-74f3-account-create-update-mlqzn"] Nov 27 08:58:03 crc kubenswrapper[4971]: I1127 08:58:03.109832 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-rvxt8"] Nov 27 08:58:03 crc kubenswrapper[4971]: I1127 08:58:03.122391 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-74f3-account-create-update-mlqzn"] Nov 27 08:58:04 crc kubenswrapper[4971]: I1127 08:58:04.567628 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0c132b00-609c-45e7-bbd1-648bc1ec2152" path="/var/lib/kubelet/pods/0c132b00-609c-45e7-bbd1-648bc1ec2152/volumes" Nov 27 08:58:04 crc kubenswrapper[4971]: I1127 08:58:04.569068 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e7b3ea9-343b-4e78-b126-da324d6a1143" path="/var/lib/kubelet/pods/5e7b3ea9-343b-4e78-b126-da324d6a1143/volumes" Nov 27 08:58:18 crc kubenswrapper[4971]: I1127 08:58:18.040429 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-lnfbm"] Nov 27 08:58:18 crc kubenswrapper[4971]: I1127 08:58:18.053714 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-lnfbm"] Nov 27 08:58:18 crc kubenswrapper[4971]: I1127 08:58:18.560525 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ca80319-5e50-45ff-9bd3-3ad98244e9d8" path="/var/lib/kubelet/pods/9ca80319-5e50-45ff-9bd3-3ad98244e9d8/volumes" Nov 27 08:58:26 crc kubenswrapper[4971]: I1127 08:58:26.412992 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 08:58:26 crc kubenswrapper[4971]: I1127 08:58:26.413818 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 08:58:33 crc kubenswrapper[4971]: I1127 08:58:33.104078 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-tgqvj"] Nov 27 08:58:33 crc kubenswrapper[4971]: I1127 08:58:33.114011 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-tgqvj"] Nov 27 08:58:34 crc kubenswrapper[4971]: I1127 08:58:34.565949 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b83d4ed-92d4-41a2-9dff-071a1b6932ad" path="/var/lib/kubelet/pods/6b83d4ed-92d4-41a2-9dff-071a1b6932ad/volumes" Nov 27 08:58:34 crc kubenswrapper[4971]: I1127 08:58:34.977697 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Nov 27 08:58:54 crc kubenswrapper[4971]: I1127 08:58:54.435909 4971 scope.go:117] "RemoveContainer" containerID="a3295fbfde623d2924ea84b3ba0a09ed9ce0017b32e5be386a1d7c05c0e6b051" Nov 27 08:58:54 crc kubenswrapper[4971]: I1127 08:58:54.501668 4971 scope.go:117] "RemoveContainer" containerID="07b0b323836e053194f4181c0e2fd5342fb88b99eb800b718b27bf8b2034975e" Nov 27 08:58:54 crc kubenswrapper[4971]: I1127 08:58:54.543747 4971 scope.go:117] "RemoveContainer" containerID="bbc3bedf1f403abcee9431bc34f94ad114433bf2766f43eae161613770d54fdf" Nov 27 08:58:54 crc kubenswrapper[4971]: I1127 08:58:54.580410 4971 scope.go:117] "RemoveContainer" containerID="fbbe32cf5da101b90379c83fba83dc224585a390ff16e0b98b39062d1d4cf6e5" Nov 27 08:58:56 crc kubenswrapper[4971]: I1127 08:58:56.414024 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 08:58:56 crc kubenswrapper[4971]: I1127 08:58:56.414793 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 08:58:56 crc kubenswrapper[4971]: I1127 08:58:56.414900 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 08:58:56 crc kubenswrapper[4971]: I1127 08:58:56.416396 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4de37cdeabf174e17fecf55eb37e927f401aeb831db4b1fc4ee5997316920a31"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 08:58:56 crc kubenswrapper[4971]: I1127 08:58:56.416510 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://4de37cdeabf174e17fecf55eb37e927f401aeb831db4b1fc4ee5997316920a31" gracePeriod=600 Nov 27 08:58:57 crc kubenswrapper[4971]: I1127 08:58:57.319921 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="4de37cdeabf174e17fecf55eb37e927f401aeb831db4b1fc4ee5997316920a31" exitCode=0 Nov 27 08:58:57 crc kubenswrapper[4971]: I1127 08:58:57.319975 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"4de37cdeabf174e17fecf55eb37e927f401aeb831db4b1fc4ee5997316920a31"} Nov 27 08:58:57 crc kubenswrapper[4971]: I1127 08:58:57.320654 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"994a193d8aefca1605e71d1a50a8591da0b0ae972faf08604e8f68555feb1168"} Nov 27 08:58:57 crc kubenswrapper[4971]: I1127 08:58:57.320684 4971 scope.go:117] "RemoveContainer" containerID="503fed002d2daf941b71890d178a428a5c3cffb1b48a2221858dc92e28178dc4" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.603492 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7b85956fb5-qh59m"] Nov 27 08:59:02 crc kubenswrapper[4971]: E1127 08:59:02.608246 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f33d3322-d126-414a-b601-9eedbc225211" containerName="registry-server" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.608685 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="f33d3322-d126-414a-b601-9eedbc225211" containerName="registry-server" Nov 27 08:59:02 crc kubenswrapper[4971]: E1127 08:59:02.608791 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f33d3322-d126-414a-b601-9eedbc225211" containerName="extract-content" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.608868 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="f33d3322-d126-414a-b601-9eedbc225211" containerName="extract-content" Nov 27 08:59:02 crc kubenswrapper[4971]: E1127 08:59:02.608956 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f33d3322-d126-414a-b601-9eedbc225211" containerName="extract-utilities" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.609027 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="f33d3322-d126-414a-b601-9eedbc225211" containerName="extract-utilities" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.609353 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="f33d3322-d126-414a-b601-9eedbc225211" containerName="registry-server" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.610924 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7b85956fb5-qh59m" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.619052 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.619419 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.619854 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-68qnf" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.620568 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.626796 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7b85956fb5-qh59m"] Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.702703 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6gnhr\" (UniqueName: \"kubernetes.io/projected/f4f6af29-f0c5-4b9e-9c60-bad44050a7fe-kube-api-access-6gnhr\") pod \"horizon-7b85956fb5-qh59m\" (UID: \"f4f6af29-f0c5-4b9e-9c60-bad44050a7fe\") " pod="openstack/horizon-7b85956fb5-qh59m" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.703147 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f4f6af29-f0c5-4b9e-9c60-bad44050a7fe-horizon-secret-key\") pod \"horizon-7b85956fb5-qh59m\" (UID: \"f4f6af29-f0c5-4b9e-9c60-bad44050a7fe\") " pod="openstack/horizon-7b85956fb5-qh59m" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.703229 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f4f6af29-f0c5-4b9e-9c60-bad44050a7fe-config-data\") pod \"horizon-7b85956fb5-qh59m\" (UID: \"f4f6af29-f0c5-4b9e-9c60-bad44050a7fe\") " pod="openstack/horizon-7b85956fb5-qh59m" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.703271 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f4f6af29-f0c5-4b9e-9c60-bad44050a7fe-logs\") pod \"horizon-7b85956fb5-qh59m\" (UID: \"f4f6af29-f0c5-4b9e-9c60-bad44050a7fe\") " pod="openstack/horizon-7b85956fb5-qh59m" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.703467 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f4f6af29-f0c5-4b9e-9c60-bad44050a7fe-scripts\") pod \"horizon-7b85956fb5-qh59m\" (UID: \"f4f6af29-f0c5-4b9e-9c60-bad44050a7fe\") " pod="openstack/horizon-7b85956fb5-qh59m" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.704635 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.704886 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="f4e3a631-76b8-4ab7-9836-b73e8ade5bac" containerName="glance-log" containerID="cri-o://cd2fea27a85791a8405b4cc7497d181643216527344449c18ae97b29f58abd8c" gracePeriod=30 Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.704955 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="f4e3a631-76b8-4ab7-9836-b73e8ade5bac" containerName="glance-httpd" containerID="cri-o://8874bc68655bb4edb830936197a59ecf4a7b58cc0437cdc96485d9007a4d500e" gracePeriod=30 Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.772902 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7f8c46cf7c-qfvm8"] Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.774644 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7f8c46cf7c-qfvm8" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.783286 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.783566 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="21546bf5-5b71-49ae-ba26-865ec87cda59" containerName="glance-log" containerID="cri-o://b9b5c435cc8b1b7f346bd8d7784742969317d3ad45f77756a85745dd3e261be8" gracePeriod=30 Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.783604 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="21546bf5-5b71-49ae-ba26-865ec87cda59" containerName="glance-httpd" containerID="cri-o://ccfde958dcaf43405b3c3ea5cf14dde53978c3a46b16171a3e8ce688894c0bb4" gracePeriod=30 Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.803391 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7f8c46cf7c-qfvm8"] Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.805268 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6gnhr\" (UniqueName: \"kubernetes.io/projected/f4f6af29-f0c5-4b9e-9c60-bad44050a7fe-kube-api-access-6gnhr\") pod \"horizon-7b85956fb5-qh59m\" (UID: \"f4f6af29-f0c5-4b9e-9c60-bad44050a7fe\") " pod="openstack/horizon-7b85956fb5-qh59m" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.805358 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/440b3490-db25-483b-af49-43a5db3530ec-scripts\") pod \"horizon-7f8c46cf7c-qfvm8\" (UID: \"440b3490-db25-483b-af49-43a5db3530ec\") " pod="openstack/horizon-7f8c46cf7c-qfvm8" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.805414 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/440b3490-db25-483b-af49-43a5db3530ec-config-data\") pod \"horizon-7f8c46cf7c-qfvm8\" (UID: \"440b3490-db25-483b-af49-43a5db3530ec\") " pod="openstack/horizon-7f8c46cf7c-qfvm8" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.805446 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/440b3490-db25-483b-af49-43a5db3530ec-horizon-secret-key\") pod \"horizon-7f8c46cf7c-qfvm8\" (UID: \"440b3490-db25-483b-af49-43a5db3530ec\") " pod="openstack/horizon-7f8c46cf7c-qfvm8" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.805476 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/440b3490-db25-483b-af49-43a5db3530ec-logs\") pod \"horizon-7f8c46cf7c-qfvm8\" (UID: \"440b3490-db25-483b-af49-43a5db3530ec\") " pod="openstack/horizon-7f8c46cf7c-qfvm8" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.805521 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f4f6af29-f0c5-4b9e-9c60-bad44050a7fe-horizon-secret-key\") pod \"horizon-7b85956fb5-qh59m\" (UID: \"f4f6af29-f0c5-4b9e-9c60-bad44050a7fe\") " pod="openstack/horizon-7b85956fb5-qh59m" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.805569 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f4f6af29-f0c5-4b9e-9c60-bad44050a7fe-config-data\") pod \"horizon-7b85956fb5-qh59m\" (UID: \"f4f6af29-f0c5-4b9e-9c60-bad44050a7fe\") " pod="openstack/horizon-7b85956fb5-qh59m" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.805606 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f4f6af29-f0c5-4b9e-9c60-bad44050a7fe-logs\") pod \"horizon-7b85956fb5-qh59m\" (UID: \"f4f6af29-f0c5-4b9e-9c60-bad44050a7fe\") " pod="openstack/horizon-7b85956fb5-qh59m" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.805635 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzsr9\" (UniqueName: \"kubernetes.io/projected/440b3490-db25-483b-af49-43a5db3530ec-kube-api-access-dzsr9\") pod \"horizon-7f8c46cf7c-qfvm8\" (UID: \"440b3490-db25-483b-af49-43a5db3530ec\") " pod="openstack/horizon-7f8c46cf7c-qfvm8" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.805697 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f4f6af29-f0c5-4b9e-9c60-bad44050a7fe-scripts\") pod \"horizon-7b85956fb5-qh59m\" (UID: \"f4f6af29-f0c5-4b9e-9c60-bad44050a7fe\") " pod="openstack/horizon-7b85956fb5-qh59m" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.806688 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f4f6af29-f0c5-4b9e-9c60-bad44050a7fe-scripts\") pod \"horizon-7b85956fb5-qh59m\" (UID: \"f4f6af29-f0c5-4b9e-9c60-bad44050a7fe\") " pod="openstack/horizon-7b85956fb5-qh59m" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.808054 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f4f6af29-f0c5-4b9e-9c60-bad44050a7fe-config-data\") pod \"horizon-7b85956fb5-qh59m\" (UID: \"f4f6af29-f0c5-4b9e-9c60-bad44050a7fe\") " pod="openstack/horizon-7b85956fb5-qh59m" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.808390 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f4f6af29-f0c5-4b9e-9c60-bad44050a7fe-logs\") pod \"horizon-7b85956fb5-qh59m\" (UID: \"f4f6af29-f0c5-4b9e-9c60-bad44050a7fe\") " pod="openstack/horizon-7b85956fb5-qh59m" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.820071 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f4f6af29-f0c5-4b9e-9c60-bad44050a7fe-horizon-secret-key\") pod \"horizon-7b85956fb5-qh59m\" (UID: \"f4f6af29-f0c5-4b9e-9c60-bad44050a7fe\") " pod="openstack/horizon-7b85956fb5-qh59m" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.838591 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6gnhr\" (UniqueName: \"kubernetes.io/projected/f4f6af29-f0c5-4b9e-9c60-bad44050a7fe-kube-api-access-6gnhr\") pod \"horizon-7b85956fb5-qh59m\" (UID: \"f4f6af29-f0c5-4b9e-9c60-bad44050a7fe\") " pod="openstack/horizon-7b85956fb5-qh59m" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.907897 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/440b3490-db25-483b-af49-43a5db3530ec-scripts\") pod \"horizon-7f8c46cf7c-qfvm8\" (UID: \"440b3490-db25-483b-af49-43a5db3530ec\") " pod="openstack/horizon-7f8c46cf7c-qfvm8" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.907998 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/440b3490-db25-483b-af49-43a5db3530ec-config-data\") pod \"horizon-7f8c46cf7c-qfvm8\" (UID: \"440b3490-db25-483b-af49-43a5db3530ec\") " pod="openstack/horizon-7f8c46cf7c-qfvm8" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.908036 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/440b3490-db25-483b-af49-43a5db3530ec-horizon-secret-key\") pod \"horizon-7f8c46cf7c-qfvm8\" (UID: \"440b3490-db25-483b-af49-43a5db3530ec\") " pod="openstack/horizon-7f8c46cf7c-qfvm8" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.908067 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/440b3490-db25-483b-af49-43a5db3530ec-logs\") pod \"horizon-7f8c46cf7c-qfvm8\" (UID: \"440b3490-db25-483b-af49-43a5db3530ec\") " pod="openstack/horizon-7f8c46cf7c-qfvm8" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.908123 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzsr9\" (UniqueName: \"kubernetes.io/projected/440b3490-db25-483b-af49-43a5db3530ec-kube-api-access-dzsr9\") pod \"horizon-7f8c46cf7c-qfvm8\" (UID: \"440b3490-db25-483b-af49-43a5db3530ec\") " pod="openstack/horizon-7f8c46cf7c-qfvm8" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.908927 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/440b3490-db25-483b-af49-43a5db3530ec-logs\") pod \"horizon-7f8c46cf7c-qfvm8\" (UID: \"440b3490-db25-483b-af49-43a5db3530ec\") " pod="openstack/horizon-7f8c46cf7c-qfvm8" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.909583 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/440b3490-db25-483b-af49-43a5db3530ec-config-data\") pod \"horizon-7f8c46cf7c-qfvm8\" (UID: \"440b3490-db25-483b-af49-43a5db3530ec\") " pod="openstack/horizon-7f8c46cf7c-qfvm8" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.910036 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/440b3490-db25-483b-af49-43a5db3530ec-scripts\") pod \"horizon-7f8c46cf7c-qfvm8\" (UID: \"440b3490-db25-483b-af49-43a5db3530ec\") " pod="openstack/horizon-7f8c46cf7c-qfvm8" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.915607 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/440b3490-db25-483b-af49-43a5db3530ec-horizon-secret-key\") pod \"horizon-7f8c46cf7c-qfvm8\" (UID: \"440b3490-db25-483b-af49-43a5db3530ec\") " pod="openstack/horizon-7f8c46cf7c-qfvm8" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.931803 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7b85956fb5-qh59m" Nov 27 08:59:02 crc kubenswrapper[4971]: I1127 08:59:02.931956 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzsr9\" (UniqueName: \"kubernetes.io/projected/440b3490-db25-483b-af49-43a5db3530ec-kube-api-access-dzsr9\") pod \"horizon-7f8c46cf7c-qfvm8\" (UID: \"440b3490-db25-483b-af49-43a5db3530ec\") " pod="openstack/horizon-7f8c46cf7c-qfvm8" Nov 27 08:59:03 crc kubenswrapper[4971]: I1127 08:59:03.097946 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7f8c46cf7c-qfvm8" Nov 27 08:59:03 crc kubenswrapper[4971]: I1127 08:59:03.349118 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7b85956fb5-qh59m"] Nov 27 08:59:03 crc kubenswrapper[4971]: I1127 08:59:03.387716 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-5dbbdfc9bf-94jbx"] Nov 27 08:59:03 crc kubenswrapper[4971]: I1127 08:59:03.389852 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5dbbdfc9bf-94jbx" Nov 27 08:59:03 crc kubenswrapper[4971]: I1127 08:59:03.409922 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5dbbdfc9bf-94jbx"] Nov 27 08:59:03 crc kubenswrapper[4971]: I1127 08:59:03.419687 4971 generic.go:334] "Generic (PLEG): container finished" podID="21546bf5-5b71-49ae-ba26-865ec87cda59" containerID="b9b5c435cc8b1b7f346bd8d7784742969317d3ad45f77756a85745dd3e261be8" exitCode=143 Nov 27 08:59:03 crc kubenswrapper[4971]: I1127 08:59:03.419731 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"21546bf5-5b71-49ae-ba26-865ec87cda59","Type":"ContainerDied","Data":"b9b5c435cc8b1b7f346bd8d7784742969317d3ad45f77756a85745dd3e261be8"} Nov 27 08:59:03 crc kubenswrapper[4971]: I1127 08:59:03.422045 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d3243de9-39c3-4511-9e89-131659e2179a-logs\") pod \"horizon-5dbbdfc9bf-94jbx\" (UID: \"d3243de9-39c3-4511-9e89-131659e2179a\") " pod="openstack/horizon-5dbbdfc9bf-94jbx" Nov 27 08:59:03 crc kubenswrapper[4971]: I1127 08:59:03.422100 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d3243de9-39c3-4511-9e89-131659e2179a-config-data\") pod \"horizon-5dbbdfc9bf-94jbx\" (UID: \"d3243de9-39c3-4511-9e89-131659e2179a\") " pod="openstack/horizon-5dbbdfc9bf-94jbx" Nov 27 08:59:03 crc kubenswrapper[4971]: I1127 08:59:03.422166 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qqf8b\" (UniqueName: \"kubernetes.io/projected/d3243de9-39c3-4511-9e89-131659e2179a-kube-api-access-qqf8b\") pod \"horizon-5dbbdfc9bf-94jbx\" (UID: \"d3243de9-39c3-4511-9e89-131659e2179a\") " pod="openstack/horizon-5dbbdfc9bf-94jbx" Nov 27 08:59:03 crc kubenswrapper[4971]: I1127 08:59:03.422224 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d3243de9-39c3-4511-9e89-131659e2179a-scripts\") pod \"horizon-5dbbdfc9bf-94jbx\" (UID: \"d3243de9-39c3-4511-9e89-131659e2179a\") " pod="openstack/horizon-5dbbdfc9bf-94jbx" Nov 27 08:59:03 crc kubenswrapper[4971]: I1127 08:59:03.422254 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d3243de9-39c3-4511-9e89-131659e2179a-horizon-secret-key\") pod \"horizon-5dbbdfc9bf-94jbx\" (UID: \"d3243de9-39c3-4511-9e89-131659e2179a\") " pod="openstack/horizon-5dbbdfc9bf-94jbx" Nov 27 08:59:03 crc kubenswrapper[4971]: I1127 08:59:03.430096 4971 generic.go:334] "Generic (PLEG): container finished" podID="f4e3a631-76b8-4ab7-9836-b73e8ade5bac" containerID="cd2fea27a85791a8405b4cc7497d181643216527344449c18ae97b29f58abd8c" exitCode=143 Nov 27 08:59:03 crc kubenswrapper[4971]: I1127 08:59:03.430149 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"f4e3a631-76b8-4ab7-9836-b73e8ade5bac","Type":"ContainerDied","Data":"cd2fea27a85791a8405b4cc7497d181643216527344449c18ae97b29f58abd8c"} Nov 27 08:59:03 crc kubenswrapper[4971]: I1127 08:59:03.474140 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7b85956fb5-qh59m"] Nov 27 08:59:03 crc kubenswrapper[4971]: I1127 08:59:03.524076 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d3243de9-39c3-4511-9e89-131659e2179a-config-data\") pod \"horizon-5dbbdfc9bf-94jbx\" (UID: \"d3243de9-39c3-4511-9e89-131659e2179a\") " pod="openstack/horizon-5dbbdfc9bf-94jbx" Nov 27 08:59:03 crc kubenswrapper[4971]: I1127 08:59:03.524217 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qqf8b\" (UniqueName: \"kubernetes.io/projected/d3243de9-39c3-4511-9e89-131659e2179a-kube-api-access-qqf8b\") pod \"horizon-5dbbdfc9bf-94jbx\" (UID: \"d3243de9-39c3-4511-9e89-131659e2179a\") " pod="openstack/horizon-5dbbdfc9bf-94jbx" Nov 27 08:59:03 crc kubenswrapper[4971]: I1127 08:59:03.524309 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d3243de9-39c3-4511-9e89-131659e2179a-scripts\") pod \"horizon-5dbbdfc9bf-94jbx\" (UID: \"d3243de9-39c3-4511-9e89-131659e2179a\") " pod="openstack/horizon-5dbbdfc9bf-94jbx" Nov 27 08:59:03 crc kubenswrapper[4971]: I1127 08:59:03.524373 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d3243de9-39c3-4511-9e89-131659e2179a-horizon-secret-key\") pod \"horizon-5dbbdfc9bf-94jbx\" (UID: \"d3243de9-39c3-4511-9e89-131659e2179a\") " pod="openstack/horizon-5dbbdfc9bf-94jbx" Nov 27 08:59:03 crc kubenswrapper[4971]: I1127 08:59:03.524450 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d3243de9-39c3-4511-9e89-131659e2179a-logs\") pod \"horizon-5dbbdfc9bf-94jbx\" (UID: \"d3243de9-39c3-4511-9e89-131659e2179a\") " pod="openstack/horizon-5dbbdfc9bf-94jbx" Nov 27 08:59:03 crc kubenswrapper[4971]: I1127 08:59:03.525690 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d3243de9-39c3-4511-9e89-131659e2179a-scripts\") pod \"horizon-5dbbdfc9bf-94jbx\" (UID: \"d3243de9-39c3-4511-9e89-131659e2179a\") " pod="openstack/horizon-5dbbdfc9bf-94jbx" Nov 27 08:59:03 crc kubenswrapper[4971]: I1127 08:59:03.526423 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d3243de9-39c3-4511-9e89-131659e2179a-logs\") pod \"horizon-5dbbdfc9bf-94jbx\" (UID: \"d3243de9-39c3-4511-9e89-131659e2179a\") " pod="openstack/horizon-5dbbdfc9bf-94jbx" Nov 27 08:59:03 crc kubenswrapper[4971]: I1127 08:59:03.527022 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d3243de9-39c3-4511-9e89-131659e2179a-config-data\") pod \"horizon-5dbbdfc9bf-94jbx\" (UID: \"d3243de9-39c3-4511-9e89-131659e2179a\") " pod="openstack/horizon-5dbbdfc9bf-94jbx" Nov 27 08:59:03 crc kubenswrapper[4971]: I1127 08:59:03.531411 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d3243de9-39c3-4511-9e89-131659e2179a-horizon-secret-key\") pod \"horizon-5dbbdfc9bf-94jbx\" (UID: \"d3243de9-39c3-4511-9e89-131659e2179a\") " pod="openstack/horizon-5dbbdfc9bf-94jbx" Nov 27 08:59:03 crc kubenswrapper[4971]: I1127 08:59:03.541664 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qqf8b\" (UniqueName: \"kubernetes.io/projected/d3243de9-39c3-4511-9e89-131659e2179a-kube-api-access-qqf8b\") pod \"horizon-5dbbdfc9bf-94jbx\" (UID: \"d3243de9-39c3-4511-9e89-131659e2179a\") " pod="openstack/horizon-5dbbdfc9bf-94jbx" Nov 27 08:59:03 crc kubenswrapper[4971]: W1127 08:59:03.607853 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod440b3490_db25_483b_af49_43a5db3530ec.slice/crio-feebe1cad6aa7ceaa0ff83bcc3f5cc86f289be27a8e2233f1e40c17c6489085d WatchSource:0}: Error finding container feebe1cad6aa7ceaa0ff83bcc3f5cc86f289be27a8e2233f1e40c17c6489085d: Status 404 returned error can't find the container with id feebe1cad6aa7ceaa0ff83bcc3f5cc86f289be27a8e2233f1e40c17c6489085d Nov 27 08:59:03 crc kubenswrapper[4971]: I1127 08:59:03.608583 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7f8c46cf7c-qfvm8"] Nov 27 08:59:03 crc kubenswrapper[4971]: I1127 08:59:03.719110 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5dbbdfc9bf-94jbx" Nov 27 08:59:04 crc kubenswrapper[4971]: I1127 08:59:04.295999 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5dbbdfc9bf-94jbx"] Nov 27 08:59:04 crc kubenswrapper[4971]: W1127 08:59:04.306283 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd3243de9_39c3_4511_9e89_131659e2179a.slice/crio-ce02a92d5d0f116031f97f0b6cb98f532280a725ac29cbf6b4697e49498198f1 WatchSource:0}: Error finding container ce02a92d5d0f116031f97f0b6cb98f532280a725ac29cbf6b4697e49498198f1: Status 404 returned error can't find the container with id ce02a92d5d0f116031f97f0b6cb98f532280a725ac29cbf6b4697e49498198f1 Nov 27 08:59:04 crc kubenswrapper[4971]: I1127 08:59:04.443989 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5dbbdfc9bf-94jbx" event={"ID":"d3243de9-39c3-4511-9e89-131659e2179a","Type":"ContainerStarted","Data":"ce02a92d5d0f116031f97f0b6cb98f532280a725ac29cbf6b4697e49498198f1"} Nov 27 08:59:04 crc kubenswrapper[4971]: I1127 08:59:04.448062 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7f8c46cf7c-qfvm8" event={"ID":"440b3490-db25-483b-af49-43a5db3530ec","Type":"ContainerStarted","Data":"feebe1cad6aa7ceaa0ff83bcc3f5cc86f289be27a8e2233f1e40c17c6489085d"} Nov 27 08:59:04 crc kubenswrapper[4971]: I1127 08:59:04.449879 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b85956fb5-qh59m" event={"ID":"f4f6af29-f0c5-4b9e-9c60-bad44050a7fe","Type":"ContainerStarted","Data":"0f6028eb1fd45c6c6f998f3adc269fb68e9015dcf02e3e3fde3cb86baba45e4d"} Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.503311 4971 generic.go:334] "Generic (PLEG): container finished" podID="f4e3a631-76b8-4ab7-9836-b73e8ade5bac" containerID="8874bc68655bb4edb830936197a59ecf4a7b58cc0437cdc96485d9007a4d500e" exitCode=0 Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.503898 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"f4e3a631-76b8-4ab7-9836-b73e8ade5bac","Type":"ContainerDied","Data":"8874bc68655bb4edb830936197a59ecf4a7b58cc0437cdc96485d9007a4d500e"} Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.512802 4971 generic.go:334] "Generic (PLEG): container finished" podID="21546bf5-5b71-49ae-ba26-865ec87cda59" containerID="ccfde958dcaf43405b3c3ea5cf14dde53978c3a46b16171a3e8ce688894c0bb4" exitCode=0 Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.512855 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"21546bf5-5b71-49ae-ba26-865ec87cda59","Type":"ContainerDied","Data":"ccfde958dcaf43405b3c3ea5cf14dde53978c3a46b16171a3e8ce688894c0bb4"} Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.644359 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.735997 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21546bf5-5b71-49ae-ba26-865ec87cda59-scripts\") pod \"21546bf5-5b71-49ae-ba26-865ec87cda59\" (UID: \"21546bf5-5b71-49ae-ba26-865ec87cda59\") " Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.736043 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/21546bf5-5b71-49ae-ba26-865ec87cda59-ceph\") pod \"21546bf5-5b71-49ae-ba26-865ec87cda59\" (UID: \"21546bf5-5b71-49ae-ba26-865ec87cda59\") " Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.736094 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21546bf5-5b71-49ae-ba26-865ec87cda59-combined-ca-bundle\") pod \"21546bf5-5b71-49ae-ba26-865ec87cda59\" (UID: \"21546bf5-5b71-49ae-ba26-865ec87cda59\") " Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.736125 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21546bf5-5b71-49ae-ba26-865ec87cda59-logs\") pod \"21546bf5-5b71-49ae-ba26-865ec87cda59\" (UID: \"21546bf5-5b71-49ae-ba26-865ec87cda59\") " Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.736194 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/21546bf5-5b71-49ae-ba26-865ec87cda59-httpd-run\") pod \"21546bf5-5b71-49ae-ba26-865ec87cda59\" (UID: \"21546bf5-5b71-49ae-ba26-865ec87cda59\") " Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.736295 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hkgsq\" (UniqueName: \"kubernetes.io/projected/21546bf5-5b71-49ae-ba26-865ec87cda59-kube-api-access-hkgsq\") pod \"21546bf5-5b71-49ae-ba26-865ec87cda59\" (UID: \"21546bf5-5b71-49ae-ba26-865ec87cda59\") " Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.736329 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21546bf5-5b71-49ae-ba26-865ec87cda59-config-data\") pod \"21546bf5-5b71-49ae-ba26-865ec87cda59\" (UID: \"21546bf5-5b71-49ae-ba26-865ec87cda59\") " Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.737558 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/21546bf5-5b71-49ae-ba26-865ec87cda59-logs" (OuterVolumeSpecName: "logs") pod "21546bf5-5b71-49ae-ba26-865ec87cda59" (UID: "21546bf5-5b71-49ae-ba26-865ec87cda59"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.738758 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/21546bf5-5b71-49ae-ba26-865ec87cda59-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "21546bf5-5b71-49ae-ba26-865ec87cda59" (UID: "21546bf5-5b71-49ae-ba26-865ec87cda59"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.745029 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21546bf5-5b71-49ae-ba26-865ec87cda59-scripts" (OuterVolumeSpecName: "scripts") pod "21546bf5-5b71-49ae-ba26-865ec87cda59" (UID: "21546bf5-5b71-49ae-ba26-865ec87cda59"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.745254 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21546bf5-5b71-49ae-ba26-865ec87cda59-kube-api-access-hkgsq" (OuterVolumeSpecName: "kube-api-access-hkgsq") pod "21546bf5-5b71-49ae-ba26-865ec87cda59" (UID: "21546bf5-5b71-49ae-ba26-865ec87cda59"). InnerVolumeSpecName "kube-api-access-hkgsq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.765797 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21546bf5-5b71-49ae-ba26-865ec87cda59-ceph" (OuterVolumeSpecName: "ceph") pod "21546bf5-5b71-49ae-ba26-865ec87cda59" (UID: "21546bf5-5b71-49ae-ba26-865ec87cda59"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.791181 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21546bf5-5b71-49ae-ba26-865ec87cda59-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "21546bf5-5b71-49ae-ba26-865ec87cda59" (UID: "21546bf5-5b71-49ae-ba26-865ec87cda59"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.820074 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21546bf5-5b71-49ae-ba26-865ec87cda59-config-data" (OuterVolumeSpecName: "config-data") pod "21546bf5-5b71-49ae-ba26-865ec87cda59" (UID: "21546bf5-5b71-49ae-ba26-865ec87cda59"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.838975 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21546bf5-5b71-49ae-ba26-865ec87cda59-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.838990 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.839006 4971 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21546bf5-5b71-49ae-ba26-865ec87cda59-logs\") on node \"crc\" DevicePath \"\"" Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.839017 4971 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/21546bf5-5b71-49ae-ba26-865ec87cda59-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.839027 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hkgsq\" (UniqueName: \"kubernetes.io/projected/21546bf5-5b71-49ae-ba26-865ec87cda59-kube-api-access-hkgsq\") on node \"crc\" DevicePath \"\"" Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.839040 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21546bf5-5b71-49ae-ba26-865ec87cda59-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.839049 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21546bf5-5b71-49ae-ba26-865ec87cda59-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.839058 4971 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/21546bf5-5b71-49ae-ba26-865ec87cda59-ceph\") on node \"crc\" DevicePath \"\"" Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.939828 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-logs\") pod \"f4e3a631-76b8-4ab7-9836-b73e8ade5bac\" (UID: \"f4e3a631-76b8-4ab7-9836-b73e8ade5bac\") " Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.939935 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-combined-ca-bundle\") pod \"f4e3a631-76b8-4ab7-9836-b73e8ade5bac\" (UID: \"f4e3a631-76b8-4ab7-9836-b73e8ade5bac\") " Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.939971 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-scripts\") pod \"f4e3a631-76b8-4ab7-9836-b73e8ade5bac\" (UID: \"f4e3a631-76b8-4ab7-9836-b73e8ade5bac\") " Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.941022 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-httpd-run\") pod \"f4e3a631-76b8-4ab7-9836-b73e8ade5bac\" (UID: \"f4e3a631-76b8-4ab7-9836-b73e8ade5bac\") " Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.941057 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-ceph\") pod \"f4e3a631-76b8-4ab7-9836-b73e8ade5bac\" (UID: \"f4e3a631-76b8-4ab7-9836-b73e8ade5bac\") " Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.941101 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-config-data\") pod \"f4e3a631-76b8-4ab7-9836-b73e8ade5bac\" (UID: \"f4e3a631-76b8-4ab7-9836-b73e8ade5bac\") " Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.941138 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5lqp8\" (UniqueName: \"kubernetes.io/projected/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-kube-api-access-5lqp8\") pod \"f4e3a631-76b8-4ab7-9836-b73e8ade5bac\" (UID: \"f4e3a631-76b8-4ab7-9836-b73e8ade5bac\") " Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.941271 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-logs" (OuterVolumeSpecName: "logs") pod "f4e3a631-76b8-4ab7-9836-b73e8ade5bac" (UID: "f4e3a631-76b8-4ab7-9836-b73e8ade5bac"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.941971 4971 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-logs\") on node \"crc\" DevicePath \"\"" Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.944586 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "f4e3a631-76b8-4ab7-9836-b73e8ade5bac" (UID: "f4e3a631-76b8-4ab7-9836-b73e8ade5bac"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.948212 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-ceph" (OuterVolumeSpecName: "ceph") pod "f4e3a631-76b8-4ab7-9836-b73e8ade5bac" (UID: "f4e3a631-76b8-4ab7-9836-b73e8ade5bac"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.965390 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-kube-api-access-5lqp8" (OuterVolumeSpecName: "kube-api-access-5lqp8") pod "f4e3a631-76b8-4ab7-9836-b73e8ade5bac" (UID: "f4e3a631-76b8-4ab7-9836-b73e8ade5bac"). InnerVolumeSpecName "kube-api-access-5lqp8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.965638 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-scripts" (OuterVolumeSpecName: "scripts") pod "f4e3a631-76b8-4ab7-9836-b73e8ade5bac" (UID: "f4e3a631-76b8-4ab7-9836-b73e8ade5bac"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:59:06 crc kubenswrapper[4971]: I1127 08:59:06.988833 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f4e3a631-76b8-4ab7-9836-b73e8ade5bac" (UID: "f4e3a631-76b8-4ab7-9836-b73e8ade5bac"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.011988 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-config-data" (OuterVolumeSpecName: "config-data") pod "f4e3a631-76b8-4ab7-9836-b73e8ade5bac" (UID: "f4e3a631-76b8-4ab7-9836-b73e8ade5bac"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.044914 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5lqp8\" (UniqueName: \"kubernetes.io/projected/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-kube-api-access-5lqp8\") on node \"crc\" DevicePath \"\"" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.044955 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.044968 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.044979 4971 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.044989 4971 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-ceph\") on node \"crc\" DevicePath \"\"" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.044999 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4e3a631-76b8-4ab7-9836-b73e8ade5bac-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.533264 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.533465 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"f4e3a631-76b8-4ab7-9836-b73e8ade5bac","Type":"ContainerDied","Data":"62d4f3b69fa57d19b28989ab022404002f2ef7b5f1970c532a8df2d1eba1871f"} Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.534367 4971 scope.go:117] "RemoveContainer" containerID="8874bc68655bb4edb830936197a59ecf4a7b58cc0437cdc96485d9007a4d500e" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.540167 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"21546bf5-5b71-49ae-ba26-865ec87cda59","Type":"ContainerDied","Data":"09746a7fb951bfe924f654241dc5d324a76d2079fdd80935541f3b88a27e9878"} Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.540211 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.588960 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.604888 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.622141 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.638706 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.657811 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Nov 27 08:59:07 crc kubenswrapper[4971]: E1127 08:59:07.658409 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21546bf5-5b71-49ae-ba26-865ec87cda59" containerName="glance-log" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.658426 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="21546bf5-5b71-49ae-ba26-865ec87cda59" containerName="glance-log" Nov 27 08:59:07 crc kubenswrapper[4971]: E1127 08:59:07.658453 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21546bf5-5b71-49ae-ba26-865ec87cda59" containerName="glance-httpd" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.658460 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="21546bf5-5b71-49ae-ba26-865ec87cda59" containerName="glance-httpd" Nov 27 08:59:07 crc kubenswrapper[4971]: E1127 08:59:07.658486 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4e3a631-76b8-4ab7-9836-b73e8ade5bac" containerName="glance-httpd" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.658493 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4e3a631-76b8-4ab7-9836-b73e8ade5bac" containerName="glance-httpd" Nov 27 08:59:07 crc kubenswrapper[4971]: E1127 08:59:07.658510 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4e3a631-76b8-4ab7-9836-b73e8ade5bac" containerName="glance-log" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.658516 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4e3a631-76b8-4ab7-9836-b73e8ade5bac" containerName="glance-log" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.658882 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4e3a631-76b8-4ab7-9836-b73e8ade5bac" containerName="glance-httpd" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.658896 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="21546bf5-5b71-49ae-ba26-865ec87cda59" containerName="glance-httpd" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.658914 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="21546bf5-5b71-49ae-ba26-865ec87cda59" containerName="glance-log" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.658925 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4e3a631-76b8-4ab7-9836-b73e8ade5bac" containerName="glance-log" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.660197 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.664571 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.664799 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-whfqh" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.666105 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.678093 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.691281 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.710144 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.721994 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.741962 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.761173 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/e554a98e-ca6f-407c-be33-a634a8057430-ceph\") pod \"glance-default-internal-api-0\" (UID: \"e554a98e-ca6f-407c-be33-a634a8057430\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.761251 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8vw4b\" (UniqueName: \"kubernetes.io/projected/9b1eda3c-bd39-46d3-9e13-c330b23c286b-kube-api-access-8vw4b\") pod \"glance-default-external-api-0\" (UID: \"9b1eda3c-bd39-46d3-9e13-c330b23c286b\") " pod="openstack/glance-default-external-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.761288 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/9b1eda3c-bd39-46d3-9e13-c330b23c286b-ceph\") pod \"glance-default-external-api-0\" (UID: \"9b1eda3c-bd39-46d3-9e13-c330b23c286b\") " pod="openstack/glance-default-external-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.761334 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z4k9r\" (UniqueName: \"kubernetes.io/projected/e554a98e-ca6f-407c-be33-a634a8057430-kube-api-access-z4k9r\") pod \"glance-default-internal-api-0\" (UID: \"e554a98e-ca6f-407c-be33-a634a8057430\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.761769 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e554a98e-ca6f-407c-be33-a634a8057430-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e554a98e-ca6f-407c-be33-a634a8057430\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.761988 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e554a98e-ca6f-407c-be33-a634a8057430-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e554a98e-ca6f-407c-be33-a634a8057430\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.762038 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e554a98e-ca6f-407c-be33-a634a8057430-logs\") pod \"glance-default-internal-api-0\" (UID: \"e554a98e-ca6f-407c-be33-a634a8057430\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.762064 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9b1eda3c-bd39-46d3-9e13-c330b23c286b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"9b1eda3c-bd39-46d3-9e13-c330b23c286b\") " pod="openstack/glance-default-external-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.762318 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e554a98e-ca6f-407c-be33-a634a8057430-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e554a98e-ca6f-407c-be33-a634a8057430\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.762406 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b1eda3c-bd39-46d3-9e13-c330b23c286b-config-data\") pod \"glance-default-external-api-0\" (UID: \"9b1eda3c-bd39-46d3-9e13-c330b23c286b\") " pod="openstack/glance-default-external-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.762845 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e554a98e-ca6f-407c-be33-a634a8057430-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e554a98e-ca6f-407c-be33-a634a8057430\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.771197 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b1eda3c-bd39-46d3-9e13-c330b23c286b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"9b1eda3c-bd39-46d3-9e13-c330b23c286b\") " pod="openstack/glance-default-external-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.771627 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9b1eda3c-bd39-46d3-9e13-c330b23c286b-logs\") pod \"glance-default-external-api-0\" (UID: \"9b1eda3c-bd39-46d3-9e13-c330b23c286b\") " pod="openstack/glance-default-external-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.771840 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b1eda3c-bd39-46d3-9e13-c330b23c286b-scripts\") pod \"glance-default-external-api-0\" (UID: \"9b1eda3c-bd39-46d3-9e13-c330b23c286b\") " pod="openstack/glance-default-external-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.873705 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b1eda3c-bd39-46d3-9e13-c330b23c286b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"9b1eda3c-bd39-46d3-9e13-c330b23c286b\") " pod="openstack/glance-default-external-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.873791 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9b1eda3c-bd39-46d3-9e13-c330b23c286b-logs\") pod \"glance-default-external-api-0\" (UID: \"9b1eda3c-bd39-46d3-9e13-c330b23c286b\") " pod="openstack/glance-default-external-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.873855 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b1eda3c-bd39-46d3-9e13-c330b23c286b-scripts\") pod \"glance-default-external-api-0\" (UID: \"9b1eda3c-bd39-46d3-9e13-c330b23c286b\") " pod="openstack/glance-default-external-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.873900 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/e554a98e-ca6f-407c-be33-a634a8057430-ceph\") pod \"glance-default-internal-api-0\" (UID: \"e554a98e-ca6f-407c-be33-a634a8057430\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.875050 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8vw4b\" (UniqueName: \"kubernetes.io/projected/9b1eda3c-bd39-46d3-9e13-c330b23c286b-kube-api-access-8vw4b\") pod \"glance-default-external-api-0\" (UID: \"9b1eda3c-bd39-46d3-9e13-c330b23c286b\") " pod="openstack/glance-default-external-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.875106 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/9b1eda3c-bd39-46d3-9e13-c330b23c286b-ceph\") pod \"glance-default-external-api-0\" (UID: \"9b1eda3c-bd39-46d3-9e13-c330b23c286b\") " pod="openstack/glance-default-external-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.875156 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z4k9r\" (UniqueName: \"kubernetes.io/projected/e554a98e-ca6f-407c-be33-a634a8057430-kube-api-access-z4k9r\") pod \"glance-default-internal-api-0\" (UID: \"e554a98e-ca6f-407c-be33-a634a8057430\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.875182 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e554a98e-ca6f-407c-be33-a634a8057430-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e554a98e-ca6f-407c-be33-a634a8057430\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.875228 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e554a98e-ca6f-407c-be33-a634a8057430-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e554a98e-ca6f-407c-be33-a634a8057430\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.875251 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e554a98e-ca6f-407c-be33-a634a8057430-logs\") pod \"glance-default-internal-api-0\" (UID: \"e554a98e-ca6f-407c-be33-a634a8057430\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.875267 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9b1eda3c-bd39-46d3-9e13-c330b23c286b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"9b1eda3c-bd39-46d3-9e13-c330b23c286b\") " pod="openstack/glance-default-external-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.875308 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e554a98e-ca6f-407c-be33-a634a8057430-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e554a98e-ca6f-407c-be33-a634a8057430\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.875340 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b1eda3c-bd39-46d3-9e13-c330b23c286b-config-data\") pod \"glance-default-external-api-0\" (UID: \"9b1eda3c-bd39-46d3-9e13-c330b23c286b\") " pod="openstack/glance-default-external-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.875377 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e554a98e-ca6f-407c-be33-a634a8057430-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e554a98e-ca6f-407c-be33-a634a8057430\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.875475 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9b1eda3c-bd39-46d3-9e13-c330b23c286b-logs\") pod \"glance-default-external-api-0\" (UID: \"9b1eda3c-bd39-46d3-9e13-c330b23c286b\") " pod="openstack/glance-default-external-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.875971 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e554a98e-ca6f-407c-be33-a634a8057430-logs\") pod \"glance-default-internal-api-0\" (UID: \"e554a98e-ca6f-407c-be33-a634a8057430\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.876879 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e554a98e-ca6f-407c-be33-a634a8057430-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e554a98e-ca6f-407c-be33-a634a8057430\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.877028 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9b1eda3c-bd39-46d3-9e13-c330b23c286b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"9b1eda3c-bd39-46d3-9e13-c330b23c286b\") " pod="openstack/glance-default-external-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.882252 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/e554a98e-ca6f-407c-be33-a634a8057430-ceph\") pod \"glance-default-internal-api-0\" (UID: \"e554a98e-ca6f-407c-be33-a634a8057430\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.882929 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e554a98e-ca6f-407c-be33-a634a8057430-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e554a98e-ca6f-407c-be33-a634a8057430\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.884132 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b1eda3c-bd39-46d3-9e13-c330b23c286b-config-data\") pod \"glance-default-external-api-0\" (UID: \"9b1eda3c-bd39-46d3-9e13-c330b23c286b\") " pod="openstack/glance-default-external-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.884337 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b1eda3c-bd39-46d3-9e13-c330b23c286b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"9b1eda3c-bd39-46d3-9e13-c330b23c286b\") " pod="openstack/glance-default-external-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.885454 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e554a98e-ca6f-407c-be33-a634a8057430-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e554a98e-ca6f-407c-be33-a634a8057430\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.887978 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b1eda3c-bd39-46d3-9e13-c330b23c286b-scripts\") pod \"glance-default-external-api-0\" (UID: \"9b1eda3c-bd39-46d3-9e13-c330b23c286b\") " pod="openstack/glance-default-external-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.894769 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/9b1eda3c-bd39-46d3-9e13-c330b23c286b-ceph\") pod \"glance-default-external-api-0\" (UID: \"9b1eda3c-bd39-46d3-9e13-c330b23c286b\") " pod="openstack/glance-default-external-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.896165 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8vw4b\" (UniqueName: \"kubernetes.io/projected/9b1eda3c-bd39-46d3-9e13-c330b23c286b-kube-api-access-8vw4b\") pod \"glance-default-external-api-0\" (UID: \"9b1eda3c-bd39-46d3-9e13-c330b23c286b\") " pod="openstack/glance-default-external-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.901040 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z4k9r\" (UniqueName: \"kubernetes.io/projected/e554a98e-ca6f-407c-be33-a634a8057430-kube-api-access-z4k9r\") pod \"glance-default-internal-api-0\" (UID: \"e554a98e-ca6f-407c-be33-a634a8057430\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.906746 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e554a98e-ca6f-407c-be33-a634a8057430-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e554a98e-ca6f-407c-be33-a634a8057430\") " pod="openstack/glance-default-internal-api-0" Nov 27 08:59:07 crc kubenswrapper[4971]: I1127 08:59:07.985405 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 27 08:59:08 crc kubenswrapper[4971]: I1127 08:59:08.050335 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 27 08:59:08 crc kubenswrapper[4971]: I1127 08:59:08.566746 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21546bf5-5b71-49ae-ba26-865ec87cda59" path="/var/lib/kubelet/pods/21546bf5-5b71-49ae-ba26-865ec87cda59/volumes" Nov 27 08:59:08 crc kubenswrapper[4971]: I1127 08:59:08.568094 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4e3a631-76b8-4ab7-9836-b73e8ade5bac" path="/var/lib/kubelet/pods/f4e3a631-76b8-4ab7-9836-b73e8ade5bac/volumes" Nov 27 08:59:12 crc kubenswrapper[4971]: I1127 08:59:12.049387 4971 scope.go:117] "RemoveContainer" containerID="cd2fea27a85791a8405b4cc7497d181643216527344449c18ae97b29f58abd8c" Nov 27 08:59:12 crc kubenswrapper[4971]: I1127 08:59:12.096130 4971 scope.go:117] "RemoveContainer" containerID="ccfde958dcaf43405b3c3ea5cf14dde53978c3a46b16171a3e8ce688894c0bb4" Nov 27 08:59:12 crc kubenswrapper[4971]: I1127 08:59:12.188014 4971 scope.go:117] "RemoveContainer" containerID="b9b5c435cc8b1b7f346bd8d7784742969317d3ad45f77756a85745dd3e261be8" Nov 27 08:59:12 crc kubenswrapper[4971]: I1127 08:59:12.639994 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5dbbdfc9bf-94jbx" event={"ID":"d3243de9-39c3-4511-9e89-131659e2179a","Type":"ContainerStarted","Data":"0e05a2f0c75fe999e426ce0eb0775cfc00e011e3ef4ed7fe1873919354d956aa"} Nov 27 08:59:12 crc kubenswrapper[4971]: I1127 08:59:12.671083 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b85956fb5-qh59m" event={"ID":"f4f6af29-f0c5-4b9e-9c60-bad44050a7fe","Type":"ContainerStarted","Data":"e842f2ce0f923c6341eb67d260fd16876582af19ecbe87c4db886e8fda5a6d84"} Nov 27 08:59:12 crc kubenswrapper[4971]: I1127 08:59:12.679178 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7f8c46cf7c-qfvm8" event={"ID":"440b3490-db25-483b-af49-43a5db3530ec","Type":"ContainerStarted","Data":"95c5457291cada2eb4c410ff6b81ff9b19eac730b04348ee2a1e7e60c4aef082"} Nov 27 08:59:12 crc kubenswrapper[4971]: I1127 08:59:12.788049 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 27 08:59:12 crc kubenswrapper[4971]: W1127 08:59:12.809786 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9b1eda3c_bd39_46d3_9e13_c330b23c286b.slice/crio-3211ea29eff72ac3fae8ded872da6a687710c8fb3be9a96c16d1e8a45d57275f WatchSource:0}: Error finding container 3211ea29eff72ac3fae8ded872da6a687710c8fb3be9a96c16d1e8a45d57275f: Status 404 returned error can't find the container with id 3211ea29eff72ac3fae8ded872da6a687710c8fb3be9a96c16d1e8a45d57275f Nov 27 08:59:12 crc kubenswrapper[4971]: I1127 08:59:12.889905 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 27 08:59:12 crc kubenswrapper[4971]: W1127 08:59:12.896814 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode554a98e_ca6f_407c_be33_a634a8057430.slice/crio-484e7630fb02ba62dd1479c4764be3252ee497420b8ffa8de7b61815f3ccf63a WatchSource:0}: Error finding container 484e7630fb02ba62dd1479c4764be3252ee497420b8ffa8de7b61815f3ccf63a: Status 404 returned error can't find the container with id 484e7630fb02ba62dd1479c4764be3252ee497420b8ffa8de7b61815f3ccf63a Nov 27 08:59:13 crc kubenswrapper[4971]: I1127 08:59:13.733180 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7f8c46cf7c-qfvm8" event={"ID":"440b3490-db25-483b-af49-43a5db3530ec","Type":"ContainerStarted","Data":"47503c28fed2774d18f524614f9f31f9c58db58a509057b8aba5ab54bb60aefd"} Nov 27 08:59:13 crc kubenswrapper[4971]: I1127 08:59:13.775330 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-7f8c46cf7c-qfvm8" podStartSLOduration=3.119494687 podStartE2EDuration="11.775305761s" podCreationTimestamp="2025-11-27 08:59:02 +0000 UTC" firstStartedPulling="2025-11-27 08:59:03.611147396 +0000 UTC m=+7581.803191314" lastFinishedPulling="2025-11-27 08:59:12.26695847 +0000 UTC m=+7590.459002388" observedRunningTime="2025-11-27 08:59:13.766797377 +0000 UTC m=+7591.958841305" watchObservedRunningTime="2025-11-27 08:59:13.775305761 +0000 UTC m=+7591.967349669" Nov 27 08:59:13 crc kubenswrapper[4971]: I1127 08:59:13.803810 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b85956fb5-qh59m" event={"ID":"f4f6af29-f0c5-4b9e-9c60-bad44050a7fe","Type":"ContainerStarted","Data":"1419e8c3813c3f1f54d513cf61c79e8c86406d1c6fdfc6d27d1ad512294f5e94"} Nov 27 08:59:13 crc kubenswrapper[4971]: I1127 08:59:13.804015 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7b85956fb5-qh59m" podUID="f4f6af29-f0c5-4b9e-9c60-bad44050a7fe" containerName="horizon-log" containerID="cri-o://e842f2ce0f923c6341eb67d260fd16876582af19ecbe87c4db886e8fda5a6d84" gracePeriod=30 Nov 27 08:59:13 crc kubenswrapper[4971]: I1127 08:59:13.804669 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7b85956fb5-qh59m" podUID="f4f6af29-f0c5-4b9e-9c60-bad44050a7fe" containerName="horizon" containerID="cri-o://1419e8c3813c3f1f54d513cf61c79e8c86406d1c6fdfc6d27d1ad512294f5e94" gracePeriod=30 Nov 27 08:59:13 crc kubenswrapper[4971]: I1127 08:59:13.831086 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9b1eda3c-bd39-46d3-9e13-c330b23c286b","Type":"ContainerStarted","Data":"3c52d82837d19f6676e0a3362eff610af25981c6705f0290c1b30981e1a245f6"} Nov 27 08:59:13 crc kubenswrapper[4971]: I1127 08:59:13.831143 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9b1eda3c-bd39-46d3-9e13-c330b23c286b","Type":"ContainerStarted","Data":"3211ea29eff72ac3fae8ded872da6a687710c8fb3be9a96c16d1e8a45d57275f"} Nov 27 08:59:13 crc kubenswrapper[4971]: I1127 08:59:13.847009 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-7b85956fb5-qh59m" podStartSLOduration=3.120074713 podStartE2EDuration="11.846984155s" podCreationTimestamp="2025-11-27 08:59:02 +0000 UTC" firstStartedPulling="2025-11-27 08:59:03.476794365 +0000 UTC m=+7581.668838273" lastFinishedPulling="2025-11-27 08:59:12.203703787 +0000 UTC m=+7590.395747715" observedRunningTime="2025-11-27 08:59:13.842146066 +0000 UTC m=+7592.034189984" watchObservedRunningTime="2025-11-27 08:59:13.846984155 +0000 UTC m=+7592.039028063" Nov 27 08:59:13 crc kubenswrapper[4971]: I1127 08:59:13.876343 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5dbbdfc9bf-94jbx" event={"ID":"d3243de9-39c3-4511-9e89-131659e2179a","Type":"ContainerStarted","Data":"9074e2006c1a346a0d34c275b536f69c2b2351cf01413463b6854cf83232ce6f"} Nov 27 08:59:13 crc kubenswrapper[4971]: I1127 08:59:13.912133 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e554a98e-ca6f-407c-be33-a634a8057430","Type":"ContainerStarted","Data":"04eadd96c0b30d41dfa3a9131344bb52308075a16437574c78907793b6c25939"} Nov 27 08:59:13 crc kubenswrapper[4971]: I1127 08:59:13.912203 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e554a98e-ca6f-407c-be33-a634a8057430","Type":"ContainerStarted","Data":"484e7630fb02ba62dd1479c4764be3252ee497420b8ffa8de7b61815f3ccf63a"} Nov 27 08:59:13 crc kubenswrapper[4971]: I1127 08:59:13.926986 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-5dbbdfc9bf-94jbx" podStartSLOduration=2.969691274 podStartE2EDuration="10.926964967s" podCreationTimestamp="2025-11-27 08:59:03 +0000 UTC" firstStartedPulling="2025-11-27 08:59:04.310386997 +0000 UTC m=+7582.502430915" lastFinishedPulling="2025-11-27 08:59:12.26766068 +0000 UTC m=+7590.459704608" observedRunningTime="2025-11-27 08:59:13.926140394 +0000 UTC m=+7592.118184332" watchObservedRunningTime="2025-11-27 08:59:13.926964967 +0000 UTC m=+7592.119008895" Nov 27 08:59:14 crc kubenswrapper[4971]: I1127 08:59:14.925834 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9b1eda3c-bd39-46d3-9e13-c330b23c286b","Type":"ContainerStarted","Data":"19442fd8b69bf4412548609b7dc8cd3a149d00a51daacda20b95ff6e08189bee"} Nov 27 08:59:14 crc kubenswrapper[4971]: I1127 08:59:14.928302 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e554a98e-ca6f-407c-be33-a634a8057430","Type":"ContainerStarted","Data":"386c0ba58fe42c37c6c1b846ca025dd4c4c313a85dd00c0ddffb0be2e16101c1"} Nov 27 08:59:14 crc kubenswrapper[4971]: I1127 08:59:14.957842 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=7.9578117729999995 podStartE2EDuration="7.957811773s" podCreationTimestamp="2025-11-27 08:59:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:59:14.948198547 +0000 UTC m=+7593.140242465" watchObservedRunningTime="2025-11-27 08:59:14.957811773 +0000 UTC m=+7593.149855691" Nov 27 08:59:14 crc kubenswrapper[4971]: I1127 08:59:14.981362 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=7.981336017 podStartE2EDuration="7.981336017s" podCreationTimestamp="2025-11-27 08:59:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:59:14.971004851 +0000 UTC m=+7593.163048779" watchObservedRunningTime="2025-11-27 08:59:14.981336017 +0000 UTC m=+7593.173379935" Nov 27 08:59:17 crc kubenswrapper[4971]: I1127 08:59:17.985758 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Nov 27 08:59:17 crc kubenswrapper[4971]: I1127 08:59:17.990096 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Nov 27 08:59:18 crc kubenswrapper[4971]: I1127 08:59:18.027833 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Nov 27 08:59:18 crc kubenswrapper[4971]: I1127 08:59:18.037371 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Nov 27 08:59:18 crc kubenswrapper[4971]: I1127 08:59:18.050890 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Nov 27 08:59:18 crc kubenswrapper[4971]: I1127 08:59:18.056393 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Nov 27 08:59:18 crc kubenswrapper[4971]: I1127 08:59:18.097161 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Nov 27 08:59:18 crc kubenswrapper[4971]: I1127 08:59:18.099777 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Nov 27 08:59:18 crc kubenswrapper[4971]: I1127 08:59:18.967594 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Nov 27 08:59:18 crc kubenswrapper[4971]: I1127 08:59:18.967636 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Nov 27 08:59:18 crc kubenswrapper[4971]: I1127 08:59:18.967754 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Nov 27 08:59:18 crc kubenswrapper[4971]: I1127 08:59:18.967767 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Nov 27 08:59:20 crc kubenswrapper[4971]: I1127 08:59:20.987930 4971 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 27 08:59:21 crc kubenswrapper[4971]: I1127 08:59:21.234402 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Nov 27 08:59:21 crc kubenswrapper[4971]: I1127 08:59:21.247924 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Nov 27 08:59:21 crc kubenswrapper[4971]: I1127 08:59:21.343458 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Nov 27 08:59:21 crc kubenswrapper[4971]: I1127 08:59:21.343565 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Nov 27 08:59:22 crc kubenswrapper[4971]: I1127 08:59:22.932971 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7b85956fb5-qh59m" Nov 27 08:59:23 crc kubenswrapper[4971]: I1127 08:59:23.098353 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7f8c46cf7c-qfvm8" Nov 27 08:59:23 crc kubenswrapper[4971]: I1127 08:59:23.098415 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7f8c46cf7c-qfvm8" Nov 27 08:59:23 crc kubenswrapper[4971]: I1127 08:59:23.099693 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7f8c46cf7c-qfvm8" podUID="440b3490-db25-483b-af49-43a5db3530ec" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.108:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.108:8080: connect: connection refused" Nov 27 08:59:23 crc kubenswrapper[4971]: I1127 08:59:23.720292 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5dbbdfc9bf-94jbx" Nov 27 08:59:23 crc kubenswrapper[4971]: I1127 08:59:23.720474 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-5dbbdfc9bf-94jbx" Nov 27 08:59:23 crc kubenswrapper[4971]: I1127 08:59:23.723920 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-5dbbdfc9bf-94jbx" podUID="d3243de9-39c3-4511-9e89-131659e2179a" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.109:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.109:8080: connect: connection refused" Nov 27 08:59:33 crc kubenswrapper[4971]: I1127 08:59:33.111292 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7f8c46cf7c-qfvm8" podUID="440b3490-db25-483b-af49-43a5db3530ec" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.108:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.108:8080: connect: connection refused" Nov 27 08:59:33 crc kubenswrapper[4971]: I1127 08:59:33.720066 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-5dbbdfc9bf-94jbx" podUID="d3243de9-39c3-4511-9e89-131659e2179a" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.109:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.109:8080: connect: connection refused" Nov 27 08:59:44 crc kubenswrapper[4971]: I1127 08:59:44.263816 4971 generic.go:334] "Generic (PLEG): container finished" podID="f4f6af29-f0c5-4b9e-9c60-bad44050a7fe" containerID="1419e8c3813c3f1f54d513cf61c79e8c86406d1c6fdfc6d27d1ad512294f5e94" exitCode=137 Nov 27 08:59:44 crc kubenswrapper[4971]: I1127 08:59:44.264803 4971 generic.go:334] "Generic (PLEG): container finished" podID="f4f6af29-f0c5-4b9e-9c60-bad44050a7fe" containerID="e842f2ce0f923c6341eb67d260fd16876582af19ecbe87c4db886e8fda5a6d84" exitCode=137 Nov 27 08:59:44 crc kubenswrapper[4971]: I1127 08:59:44.263895 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b85956fb5-qh59m" event={"ID":"f4f6af29-f0c5-4b9e-9c60-bad44050a7fe","Type":"ContainerDied","Data":"1419e8c3813c3f1f54d513cf61c79e8c86406d1c6fdfc6d27d1ad512294f5e94"} Nov 27 08:59:44 crc kubenswrapper[4971]: I1127 08:59:44.264872 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b85956fb5-qh59m" event={"ID":"f4f6af29-f0c5-4b9e-9c60-bad44050a7fe","Type":"ContainerDied","Data":"e842f2ce0f923c6341eb67d260fd16876582af19ecbe87c4db886e8fda5a6d84"} Nov 27 08:59:44 crc kubenswrapper[4971]: I1127 08:59:44.264904 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b85956fb5-qh59m" event={"ID":"f4f6af29-f0c5-4b9e-9c60-bad44050a7fe","Type":"ContainerDied","Data":"0f6028eb1fd45c6c6f998f3adc269fb68e9015dcf02e3e3fde3cb86baba45e4d"} Nov 27 08:59:44 crc kubenswrapper[4971]: I1127 08:59:44.264922 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0f6028eb1fd45c6c6f998f3adc269fb68e9015dcf02e3e3fde3cb86baba45e4d" Nov 27 08:59:44 crc kubenswrapper[4971]: I1127 08:59:44.330218 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7b85956fb5-qh59m" Nov 27 08:59:44 crc kubenswrapper[4971]: I1127 08:59:44.432555 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f4f6af29-f0c5-4b9e-9c60-bad44050a7fe-config-data\") pod \"f4f6af29-f0c5-4b9e-9c60-bad44050a7fe\" (UID: \"f4f6af29-f0c5-4b9e-9c60-bad44050a7fe\") " Nov 27 08:59:44 crc kubenswrapper[4971]: I1127 08:59:44.432805 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f4f6af29-f0c5-4b9e-9c60-bad44050a7fe-horizon-secret-key\") pod \"f4f6af29-f0c5-4b9e-9c60-bad44050a7fe\" (UID: \"f4f6af29-f0c5-4b9e-9c60-bad44050a7fe\") " Nov 27 08:59:44 crc kubenswrapper[4971]: I1127 08:59:44.432855 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6gnhr\" (UniqueName: \"kubernetes.io/projected/f4f6af29-f0c5-4b9e-9c60-bad44050a7fe-kube-api-access-6gnhr\") pod \"f4f6af29-f0c5-4b9e-9c60-bad44050a7fe\" (UID: \"f4f6af29-f0c5-4b9e-9c60-bad44050a7fe\") " Nov 27 08:59:44 crc kubenswrapper[4971]: I1127 08:59:44.433044 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f4f6af29-f0c5-4b9e-9c60-bad44050a7fe-scripts\") pod \"f4f6af29-f0c5-4b9e-9c60-bad44050a7fe\" (UID: \"f4f6af29-f0c5-4b9e-9c60-bad44050a7fe\") " Nov 27 08:59:44 crc kubenswrapper[4971]: I1127 08:59:44.433103 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f4f6af29-f0c5-4b9e-9c60-bad44050a7fe-logs\") pod \"f4f6af29-f0c5-4b9e-9c60-bad44050a7fe\" (UID: \"f4f6af29-f0c5-4b9e-9c60-bad44050a7fe\") " Nov 27 08:59:44 crc kubenswrapper[4971]: I1127 08:59:44.434069 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4f6af29-f0c5-4b9e-9c60-bad44050a7fe-logs" (OuterVolumeSpecName: "logs") pod "f4f6af29-f0c5-4b9e-9c60-bad44050a7fe" (UID: "f4f6af29-f0c5-4b9e-9c60-bad44050a7fe"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 08:59:44 crc kubenswrapper[4971]: I1127 08:59:44.441317 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4f6af29-f0c5-4b9e-9c60-bad44050a7fe-kube-api-access-6gnhr" (OuterVolumeSpecName: "kube-api-access-6gnhr") pod "f4f6af29-f0c5-4b9e-9c60-bad44050a7fe" (UID: "f4f6af29-f0c5-4b9e-9c60-bad44050a7fe"). InnerVolumeSpecName "kube-api-access-6gnhr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 08:59:44 crc kubenswrapper[4971]: I1127 08:59:44.442588 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4f6af29-f0c5-4b9e-9c60-bad44050a7fe-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "f4f6af29-f0c5-4b9e-9c60-bad44050a7fe" (UID: "f4f6af29-f0c5-4b9e-9c60-bad44050a7fe"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 08:59:44 crc kubenswrapper[4971]: I1127 08:59:44.460836 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4f6af29-f0c5-4b9e-9c60-bad44050a7fe-scripts" (OuterVolumeSpecName: "scripts") pod "f4f6af29-f0c5-4b9e-9c60-bad44050a7fe" (UID: "f4f6af29-f0c5-4b9e-9c60-bad44050a7fe"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:59:44 crc kubenswrapper[4971]: I1127 08:59:44.470293 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4f6af29-f0c5-4b9e-9c60-bad44050a7fe-config-data" (OuterVolumeSpecName: "config-data") pod "f4f6af29-f0c5-4b9e-9c60-bad44050a7fe" (UID: "f4f6af29-f0c5-4b9e-9c60-bad44050a7fe"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 08:59:44 crc kubenswrapper[4971]: I1127 08:59:44.535841 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f4f6af29-f0c5-4b9e-9c60-bad44050a7fe-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 08:59:44 crc kubenswrapper[4971]: I1127 08:59:44.535893 4971 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f4f6af29-f0c5-4b9e-9c60-bad44050a7fe-logs\") on node \"crc\" DevicePath \"\"" Nov 27 08:59:44 crc kubenswrapper[4971]: I1127 08:59:44.535903 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f4f6af29-f0c5-4b9e-9c60-bad44050a7fe-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 08:59:44 crc kubenswrapper[4971]: I1127 08:59:44.535915 4971 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f4f6af29-f0c5-4b9e-9c60-bad44050a7fe-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Nov 27 08:59:44 crc kubenswrapper[4971]: I1127 08:59:44.535929 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6gnhr\" (UniqueName: \"kubernetes.io/projected/f4f6af29-f0c5-4b9e-9c60-bad44050a7fe-kube-api-access-6gnhr\") on node \"crc\" DevicePath \"\"" Nov 27 08:59:45 crc kubenswrapper[4971]: I1127 08:59:45.286702 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7b85956fb5-qh59m" Nov 27 08:59:45 crc kubenswrapper[4971]: I1127 08:59:45.323879 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7b85956fb5-qh59m"] Nov 27 08:59:45 crc kubenswrapper[4971]: I1127 08:59:45.333601 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-7b85956fb5-qh59m"] Nov 27 08:59:45 crc kubenswrapper[4971]: I1127 08:59:45.484309 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-7f8c46cf7c-qfvm8" Nov 27 08:59:45 crc kubenswrapper[4971]: I1127 08:59:45.756475 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-5dbbdfc9bf-94jbx" Nov 27 08:59:46 crc kubenswrapper[4971]: I1127 08:59:46.561963 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4f6af29-f0c5-4b9e-9c60-bad44050a7fe" path="/var/lib/kubelet/pods/f4f6af29-f0c5-4b9e-9c60-bad44050a7fe/volumes" Nov 27 08:59:47 crc kubenswrapper[4971]: I1127 08:59:47.193598 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-7f8c46cf7c-qfvm8" Nov 27 08:59:47 crc kubenswrapper[4971]: I1127 08:59:47.571927 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-5dbbdfc9bf-94jbx" Nov 27 08:59:47 crc kubenswrapper[4971]: I1127 08:59:47.660270 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7f8c46cf7c-qfvm8"] Nov 27 08:59:47 crc kubenswrapper[4971]: I1127 08:59:47.660997 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7f8c46cf7c-qfvm8" podUID="440b3490-db25-483b-af49-43a5db3530ec" containerName="horizon-log" containerID="cri-o://95c5457291cada2eb4c410ff6b81ff9b19eac730b04348ee2a1e7e60c4aef082" gracePeriod=30 Nov 27 08:59:47 crc kubenswrapper[4971]: I1127 08:59:47.661191 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7f8c46cf7c-qfvm8" podUID="440b3490-db25-483b-af49-43a5db3530ec" containerName="horizon" containerID="cri-o://47503c28fed2774d18f524614f9f31f9c58db58a509057b8aba5ab54bb60aefd" gracePeriod=30 Nov 27 08:59:51 crc kubenswrapper[4971]: I1127 08:59:51.364976 4971 generic.go:334] "Generic (PLEG): container finished" podID="440b3490-db25-483b-af49-43a5db3530ec" containerID="47503c28fed2774d18f524614f9f31f9c58db58a509057b8aba5ab54bb60aefd" exitCode=0 Nov 27 08:59:51 crc kubenswrapper[4971]: I1127 08:59:51.365060 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7f8c46cf7c-qfvm8" event={"ID":"440b3490-db25-483b-af49-43a5db3530ec","Type":"ContainerDied","Data":"47503c28fed2774d18f524614f9f31f9c58db58a509057b8aba5ab54bb60aefd"} Nov 27 08:59:53 crc kubenswrapper[4971]: I1127 08:59:53.099221 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-7f8c46cf7c-qfvm8" podUID="440b3490-db25-483b-af49-43a5db3530ec" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.108:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.108:8080: connect: connection refused" Nov 27 08:59:54 crc kubenswrapper[4971]: I1127 08:59:54.966922 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-fd46b4db5-sp4l9"] Nov 27 08:59:54 crc kubenswrapper[4971]: E1127 08:59:54.967619 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4f6af29-f0c5-4b9e-9c60-bad44050a7fe" containerName="horizon" Nov 27 08:59:54 crc kubenswrapper[4971]: I1127 08:59:54.967642 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4f6af29-f0c5-4b9e-9c60-bad44050a7fe" containerName="horizon" Nov 27 08:59:54 crc kubenswrapper[4971]: E1127 08:59:54.967678 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4f6af29-f0c5-4b9e-9c60-bad44050a7fe" containerName="horizon-log" Nov 27 08:59:54 crc kubenswrapper[4971]: I1127 08:59:54.967691 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4f6af29-f0c5-4b9e-9c60-bad44050a7fe" containerName="horizon-log" Nov 27 08:59:54 crc kubenswrapper[4971]: I1127 08:59:54.969314 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4f6af29-f0c5-4b9e-9c60-bad44050a7fe" containerName="horizon-log" Nov 27 08:59:54 crc kubenswrapper[4971]: I1127 08:59:54.969377 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4f6af29-f0c5-4b9e-9c60-bad44050a7fe" containerName="horizon" Nov 27 08:59:54 crc kubenswrapper[4971]: I1127 08:59:54.971065 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-fd46b4db5-sp4l9" Nov 27 08:59:54 crc kubenswrapper[4971]: I1127 08:59:54.993828 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-fd46b4db5-sp4l9"] Nov 27 08:59:55 crc kubenswrapper[4971]: I1127 08:59:55.090852 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e03d8c76-8c6c-4945-8fd3-5b3709e91963-config-data\") pod \"horizon-fd46b4db5-sp4l9\" (UID: \"e03d8c76-8c6c-4945-8fd3-5b3709e91963\") " pod="openstack/horizon-fd46b4db5-sp4l9" Nov 27 08:59:55 crc kubenswrapper[4971]: I1127 08:59:55.091202 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e03d8c76-8c6c-4945-8fd3-5b3709e91963-scripts\") pod \"horizon-fd46b4db5-sp4l9\" (UID: \"e03d8c76-8c6c-4945-8fd3-5b3709e91963\") " pod="openstack/horizon-fd46b4db5-sp4l9" Nov 27 08:59:55 crc kubenswrapper[4971]: I1127 08:59:55.091225 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e03d8c76-8c6c-4945-8fd3-5b3709e91963-logs\") pod \"horizon-fd46b4db5-sp4l9\" (UID: \"e03d8c76-8c6c-4945-8fd3-5b3709e91963\") " pod="openstack/horizon-fd46b4db5-sp4l9" Nov 27 08:59:55 crc kubenswrapper[4971]: I1127 08:59:55.091428 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e03d8c76-8c6c-4945-8fd3-5b3709e91963-horizon-secret-key\") pod \"horizon-fd46b4db5-sp4l9\" (UID: \"e03d8c76-8c6c-4945-8fd3-5b3709e91963\") " pod="openstack/horizon-fd46b4db5-sp4l9" Nov 27 08:59:55 crc kubenswrapper[4971]: I1127 08:59:55.091747 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m4hww\" (UniqueName: \"kubernetes.io/projected/e03d8c76-8c6c-4945-8fd3-5b3709e91963-kube-api-access-m4hww\") pod \"horizon-fd46b4db5-sp4l9\" (UID: \"e03d8c76-8c6c-4945-8fd3-5b3709e91963\") " pod="openstack/horizon-fd46b4db5-sp4l9" Nov 27 08:59:55 crc kubenswrapper[4971]: I1127 08:59:55.195264 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e03d8c76-8c6c-4945-8fd3-5b3709e91963-config-data\") pod \"horizon-fd46b4db5-sp4l9\" (UID: \"e03d8c76-8c6c-4945-8fd3-5b3709e91963\") " pod="openstack/horizon-fd46b4db5-sp4l9" Nov 27 08:59:55 crc kubenswrapper[4971]: I1127 08:59:55.195349 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e03d8c76-8c6c-4945-8fd3-5b3709e91963-scripts\") pod \"horizon-fd46b4db5-sp4l9\" (UID: \"e03d8c76-8c6c-4945-8fd3-5b3709e91963\") " pod="openstack/horizon-fd46b4db5-sp4l9" Nov 27 08:59:55 crc kubenswrapper[4971]: I1127 08:59:55.195388 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e03d8c76-8c6c-4945-8fd3-5b3709e91963-logs\") pod \"horizon-fd46b4db5-sp4l9\" (UID: \"e03d8c76-8c6c-4945-8fd3-5b3709e91963\") " pod="openstack/horizon-fd46b4db5-sp4l9" Nov 27 08:59:55 crc kubenswrapper[4971]: I1127 08:59:55.195441 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e03d8c76-8c6c-4945-8fd3-5b3709e91963-horizon-secret-key\") pod \"horizon-fd46b4db5-sp4l9\" (UID: \"e03d8c76-8c6c-4945-8fd3-5b3709e91963\") " pod="openstack/horizon-fd46b4db5-sp4l9" Nov 27 08:59:55 crc kubenswrapper[4971]: I1127 08:59:55.195518 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m4hww\" (UniqueName: \"kubernetes.io/projected/e03d8c76-8c6c-4945-8fd3-5b3709e91963-kube-api-access-m4hww\") pod \"horizon-fd46b4db5-sp4l9\" (UID: \"e03d8c76-8c6c-4945-8fd3-5b3709e91963\") " pod="openstack/horizon-fd46b4db5-sp4l9" Nov 27 08:59:55 crc kubenswrapper[4971]: I1127 08:59:55.196205 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e03d8c76-8c6c-4945-8fd3-5b3709e91963-logs\") pod \"horizon-fd46b4db5-sp4l9\" (UID: \"e03d8c76-8c6c-4945-8fd3-5b3709e91963\") " pod="openstack/horizon-fd46b4db5-sp4l9" Nov 27 08:59:55 crc kubenswrapper[4971]: I1127 08:59:55.196394 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e03d8c76-8c6c-4945-8fd3-5b3709e91963-scripts\") pod \"horizon-fd46b4db5-sp4l9\" (UID: \"e03d8c76-8c6c-4945-8fd3-5b3709e91963\") " pod="openstack/horizon-fd46b4db5-sp4l9" Nov 27 08:59:55 crc kubenswrapper[4971]: I1127 08:59:55.197235 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e03d8c76-8c6c-4945-8fd3-5b3709e91963-config-data\") pod \"horizon-fd46b4db5-sp4l9\" (UID: \"e03d8c76-8c6c-4945-8fd3-5b3709e91963\") " pod="openstack/horizon-fd46b4db5-sp4l9" Nov 27 08:59:55 crc kubenswrapper[4971]: I1127 08:59:55.204945 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e03d8c76-8c6c-4945-8fd3-5b3709e91963-horizon-secret-key\") pod \"horizon-fd46b4db5-sp4l9\" (UID: \"e03d8c76-8c6c-4945-8fd3-5b3709e91963\") " pod="openstack/horizon-fd46b4db5-sp4l9" Nov 27 08:59:55 crc kubenswrapper[4971]: I1127 08:59:55.220303 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m4hww\" (UniqueName: \"kubernetes.io/projected/e03d8c76-8c6c-4945-8fd3-5b3709e91963-kube-api-access-m4hww\") pod \"horizon-fd46b4db5-sp4l9\" (UID: \"e03d8c76-8c6c-4945-8fd3-5b3709e91963\") " pod="openstack/horizon-fd46b4db5-sp4l9" Nov 27 08:59:55 crc kubenswrapper[4971]: I1127 08:59:55.297843 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-fd46b4db5-sp4l9" Nov 27 08:59:55 crc kubenswrapper[4971]: I1127 08:59:55.800313 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-fd46b4db5-sp4l9"] Nov 27 08:59:56 crc kubenswrapper[4971]: I1127 08:59:56.436618 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-create-js7nf"] Nov 27 08:59:56 crc kubenswrapper[4971]: I1127 08:59:56.439887 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-js7nf" Nov 27 08:59:56 crc kubenswrapper[4971]: I1127 08:59:56.478403 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-5376-account-create-update-cg684"] Nov 27 08:59:56 crc kubenswrapper[4971]: I1127 08:59:56.514431 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-fd46b4db5-sp4l9" event={"ID":"e03d8c76-8c6c-4945-8fd3-5b3709e91963","Type":"ContainerStarted","Data":"537e0340ab05fb4364b6637807a18359286380ba96627424ff72268dc527d7c4"} Nov 27 08:59:56 crc kubenswrapper[4971]: I1127 08:59:56.514527 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-fd46b4db5-sp4l9" event={"ID":"e03d8c76-8c6c-4945-8fd3-5b3709e91963","Type":"ContainerStarted","Data":"76bc2f55e79b99f22ae16a67981e00d140d2c83193a802ab3256c0045562d6ae"} Nov 27 08:59:56 crc kubenswrapper[4971]: I1127 08:59:56.514557 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-fd46b4db5-sp4l9" event={"ID":"e03d8c76-8c6c-4945-8fd3-5b3709e91963","Type":"ContainerStarted","Data":"68976b5591fe86d39e7103f157c0156d046699ab5f99cf6763d5baaa51125c36"} Nov 27 08:59:56 crc kubenswrapper[4971]: I1127 08:59:56.514739 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-5376-account-create-update-cg684" Nov 27 08:59:56 crc kubenswrapper[4971]: I1127 08:59:56.520560 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-5376-account-create-update-cg684"] Nov 27 08:59:56 crc kubenswrapper[4971]: I1127 08:59:56.528010 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-db-secret" Nov 27 08:59:56 crc kubenswrapper[4971]: I1127 08:59:56.528848 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8bjtb\" (UniqueName: \"kubernetes.io/projected/cbf1aa5f-efca-4f73-a186-3f02978ce830-kube-api-access-8bjtb\") pod \"heat-db-create-js7nf\" (UID: \"cbf1aa5f-efca-4f73-a186-3f02978ce830\") " pod="openstack/heat-db-create-js7nf" Nov 27 08:59:56 crc kubenswrapper[4971]: I1127 08:59:56.532633 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cbf1aa5f-efca-4f73-a186-3f02978ce830-operator-scripts\") pod \"heat-db-create-js7nf\" (UID: \"cbf1aa5f-efca-4f73-a186-3f02978ce830\") " pod="openstack/heat-db-create-js7nf" Nov 27 08:59:56 crc kubenswrapper[4971]: I1127 08:59:56.540458 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-js7nf"] Nov 27 08:59:56 crc kubenswrapper[4971]: I1127 08:59:56.576126 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-fd46b4db5-sp4l9" podStartSLOduration=2.576096982 podStartE2EDuration="2.576096982s" podCreationTimestamp="2025-11-27 08:59:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 08:59:56.530465594 +0000 UTC m=+7634.722509532" watchObservedRunningTime="2025-11-27 08:59:56.576096982 +0000 UTC m=+7634.768140900" Nov 27 08:59:56 crc kubenswrapper[4971]: I1127 08:59:56.635418 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ssnpn\" (UniqueName: \"kubernetes.io/projected/26816754-07f0-4343-aade-0fa746b266f5-kube-api-access-ssnpn\") pod \"heat-5376-account-create-update-cg684\" (UID: \"26816754-07f0-4343-aade-0fa746b266f5\") " pod="openstack/heat-5376-account-create-update-cg684" Nov 27 08:59:56 crc kubenswrapper[4971]: I1127 08:59:56.635571 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8bjtb\" (UniqueName: \"kubernetes.io/projected/cbf1aa5f-efca-4f73-a186-3f02978ce830-kube-api-access-8bjtb\") pod \"heat-db-create-js7nf\" (UID: \"cbf1aa5f-efca-4f73-a186-3f02978ce830\") " pod="openstack/heat-db-create-js7nf" Nov 27 08:59:56 crc kubenswrapper[4971]: I1127 08:59:56.635710 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cbf1aa5f-efca-4f73-a186-3f02978ce830-operator-scripts\") pod \"heat-db-create-js7nf\" (UID: \"cbf1aa5f-efca-4f73-a186-3f02978ce830\") " pod="openstack/heat-db-create-js7nf" Nov 27 08:59:56 crc kubenswrapper[4971]: I1127 08:59:56.635732 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/26816754-07f0-4343-aade-0fa746b266f5-operator-scripts\") pod \"heat-5376-account-create-update-cg684\" (UID: \"26816754-07f0-4343-aade-0fa746b266f5\") " pod="openstack/heat-5376-account-create-update-cg684" Nov 27 08:59:56 crc kubenswrapper[4971]: I1127 08:59:56.637168 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cbf1aa5f-efca-4f73-a186-3f02978ce830-operator-scripts\") pod \"heat-db-create-js7nf\" (UID: \"cbf1aa5f-efca-4f73-a186-3f02978ce830\") " pod="openstack/heat-db-create-js7nf" Nov 27 08:59:56 crc kubenswrapper[4971]: I1127 08:59:56.654710 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8bjtb\" (UniqueName: \"kubernetes.io/projected/cbf1aa5f-efca-4f73-a186-3f02978ce830-kube-api-access-8bjtb\") pod \"heat-db-create-js7nf\" (UID: \"cbf1aa5f-efca-4f73-a186-3f02978ce830\") " pod="openstack/heat-db-create-js7nf" Nov 27 08:59:56 crc kubenswrapper[4971]: I1127 08:59:56.737577 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ssnpn\" (UniqueName: \"kubernetes.io/projected/26816754-07f0-4343-aade-0fa746b266f5-kube-api-access-ssnpn\") pod \"heat-5376-account-create-update-cg684\" (UID: \"26816754-07f0-4343-aade-0fa746b266f5\") " pod="openstack/heat-5376-account-create-update-cg684" Nov 27 08:59:56 crc kubenswrapper[4971]: I1127 08:59:56.738150 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/26816754-07f0-4343-aade-0fa746b266f5-operator-scripts\") pod \"heat-5376-account-create-update-cg684\" (UID: \"26816754-07f0-4343-aade-0fa746b266f5\") " pod="openstack/heat-5376-account-create-update-cg684" Nov 27 08:59:56 crc kubenswrapper[4971]: I1127 08:59:56.739302 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/26816754-07f0-4343-aade-0fa746b266f5-operator-scripts\") pod \"heat-5376-account-create-update-cg684\" (UID: \"26816754-07f0-4343-aade-0fa746b266f5\") " pod="openstack/heat-5376-account-create-update-cg684" Nov 27 08:59:56 crc kubenswrapper[4971]: I1127 08:59:56.759205 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ssnpn\" (UniqueName: \"kubernetes.io/projected/26816754-07f0-4343-aade-0fa746b266f5-kube-api-access-ssnpn\") pod \"heat-5376-account-create-update-cg684\" (UID: \"26816754-07f0-4343-aade-0fa746b266f5\") " pod="openstack/heat-5376-account-create-update-cg684" Nov 27 08:59:56 crc kubenswrapper[4971]: I1127 08:59:56.778059 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-js7nf" Nov 27 08:59:56 crc kubenswrapper[4971]: I1127 08:59:56.848873 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-5376-account-create-update-cg684" Nov 27 08:59:57 crc kubenswrapper[4971]: I1127 08:59:57.317162 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-js7nf"] Nov 27 08:59:57 crc kubenswrapper[4971]: W1127 08:59:57.345930 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcbf1aa5f_efca_4f73_a186_3f02978ce830.slice/crio-d293c90e07f6706a39bfee7f27750525fbc51eaa1f1f69ebce3fa79e61ef5e00 WatchSource:0}: Error finding container d293c90e07f6706a39bfee7f27750525fbc51eaa1f1f69ebce3fa79e61ef5e00: Status 404 returned error can't find the container with id d293c90e07f6706a39bfee7f27750525fbc51eaa1f1f69ebce3fa79e61ef5e00 Nov 27 08:59:57 crc kubenswrapper[4971]: W1127 08:59:57.409005 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod26816754_07f0_4343_aade_0fa746b266f5.slice/crio-a36b4a795cb8d434999bcfeee96c2e19c821bdd4da4c19b8185849ea0a415983 WatchSource:0}: Error finding container a36b4a795cb8d434999bcfeee96c2e19c821bdd4da4c19b8185849ea0a415983: Status 404 returned error can't find the container with id a36b4a795cb8d434999bcfeee96c2e19c821bdd4da4c19b8185849ea0a415983 Nov 27 08:59:57 crc kubenswrapper[4971]: I1127 08:59:57.418181 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-5376-account-create-update-cg684"] Nov 27 08:59:57 crc kubenswrapper[4971]: I1127 08:59:57.506051 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-js7nf" event={"ID":"cbf1aa5f-efca-4f73-a186-3f02978ce830","Type":"ContainerStarted","Data":"d293c90e07f6706a39bfee7f27750525fbc51eaa1f1f69ebce3fa79e61ef5e00"} Nov 27 08:59:57 crc kubenswrapper[4971]: I1127 08:59:57.507762 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-5376-account-create-update-cg684" event={"ID":"26816754-07f0-4343-aade-0fa746b266f5","Type":"ContainerStarted","Data":"a36b4a795cb8d434999bcfeee96c2e19c821bdd4da4c19b8185849ea0a415983"} Nov 27 08:59:58 crc kubenswrapper[4971]: E1127 08:59:58.085275 4971 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcbf1aa5f_efca_4f73_a186_3f02978ce830.slice/crio-aa10c907aab40ea2c7653e0033c009d4ba39daf586efcf20be088ba5896c59b7.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcbf1aa5f_efca_4f73_a186_3f02978ce830.slice/crio-conmon-aa10c907aab40ea2c7653e0033c009d4ba39daf586efcf20be088ba5896c59b7.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod26816754_07f0_4343_aade_0fa746b266f5.slice/crio-ae9393d475c10e39cddc53af063afbfafcacad911f9526112f0f0f8efd3f32b9.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod26816754_07f0_4343_aade_0fa746b266f5.slice/crio-conmon-ae9393d475c10e39cddc53af063afbfafcacad911f9526112f0f0f8efd3f32b9.scope\": RecentStats: unable to find data in memory cache]" Nov 27 08:59:58 crc kubenswrapper[4971]: I1127 08:59:58.519855 4971 generic.go:334] "Generic (PLEG): container finished" podID="cbf1aa5f-efca-4f73-a186-3f02978ce830" containerID="aa10c907aab40ea2c7653e0033c009d4ba39daf586efcf20be088ba5896c59b7" exitCode=0 Nov 27 08:59:58 crc kubenswrapper[4971]: I1127 08:59:58.519951 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-js7nf" event={"ID":"cbf1aa5f-efca-4f73-a186-3f02978ce830","Type":"ContainerDied","Data":"aa10c907aab40ea2c7653e0033c009d4ba39daf586efcf20be088ba5896c59b7"} Nov 27 08:59:58 crc kubenswrapper[4971]: I1127 08:59:58.521992 4971 generic.go:334] "Generic (PLEG): container finished" podID="26816754-07f0-4343-aade-0fa746b266f5" containerID="ae9393d475c10e39cddc53af063afbfafcacad911f9526112f0f0f8efd3f32b9" exitCode=0 Nov 27 08:59:58 crc kubenswrapper[4971]: I1127 08:59:58.522039 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-5376-account-create-update-cg684" event={"ID":"26816754-07f0-4343-aade-0fa746b266f5","Type":"ContainerDied","Data":"ae9393d475c10e39cddc53af063afbfafcacad911f9526112f0f0f8efd3f32b9"} Nov 27 08:59:59 crc kubenswrapper[4971]: I1127 08:59:59.993402 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-5376-account-create-update-cg684" Nov 27 09:00:00 crc kubenswrapper[4971]: I1127 09:00:00.000710 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-js7nf" Nov 27 09:00:00 crc kubenswrapper[4971]: I1127 09:00:00.117365 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/26816754-07f0-4343-aade-0fa746b266f5-operator-scripts\") pod \"26816754-07f0-4343-aade-0fa746b266f5\" (UID: \"26816754-07f0-4343-aade-0fa746b266f5\") " Nov 27 09:00:00 crc kubenswrapper[4971]: I1127 09:00:00.117571 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cbf1aa5f-efca-4f73-a186-3f02978ce830-operator-scripts\") pod \"cbf1aa5f-efca-4f73-a186-3f02978ce830\" (UID: \"cbf1aa5f-efca-4f73-a186-3f02978ce830\") " Nov 27 09:00:00 crc kubenswrapper[4971]: I1127 09:00:00.117923 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8bjtb\" (UniqueName: \"kubernetes.io/projected/cbf1aa5f-efca-4f73-a186-3f02978ce830-kube-api-access-8bjtb\") pod \"cbf1aa5f-efca-4f73-a186-3f02978ce830\" (UID: \"cbf1aa5f-efca-4f73-a186-3f02978ce830\") " Nov 27 09:00:00 crc kubenswrapper[4971]: I1127 09:00:00.117988 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ssnpn\" (UniqueName: \"kubernetes.io/projected/26816754-07f0-4343-aade-0fa746b266f5-kube-api-access-ssnpn\") pod \"26816754-07f0-4343-aade-0fa746b266f5\" (UID: \"26816754-07f0-4343-aade-0fa746b266f5\") " Nov 27 09:00:00 crc kubenswrapper[4971]: I1127 09:00:00.120264 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cbf1aa5f-efca-4f73-a186-3f02978ce830-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "cbf1aa5f-efca-4f73-a186-3f02978ce830" (UID: "cbf1aa5f-efca-4f73-a186-3f02978ce830"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:00:00 crc kubenswrapper[4971]: I1127 09:00:00.120907 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26816754-07f0-4343-aade-0fa746b266f5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "26816754-07f0-4343-aade-0fa746b266f5" (UID: "26816754-07f0-4343-aade-0fa746b266f5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:00:00 crc kubenswrapper[4971]: I1127 09:00:00.128181 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cbf1aa5f-efca-4f73-a186-3f02978ce830-kube-api-access-8bjtb" (OuterVolumeSpecName: "kube-api-access-8bjtb") pod "cbf1aa5f-efca-4f73-a186-3f02978ce830" (UID: "cbf1aa5f-efca-4f73-a186-3f02978ce830"). InnerVolumeSpecName "kube-api-access-8bjtb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:00:00 crc kubenswrapper[4971]: I1127 09:00:00.128804 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26816754-07f0-4343-aade-0fa746b266f5-kube-api-access-ssnpn" (OuterVolumeSpecName: "kube-api-access-ssnpn") pod "26816754-07f0-4343-aade-0fa746b266f5" (UID: "26816754-07f0-4343-aade-0fa746b266f5"). InnerVolumeSpecName "kube-api-access-ssnpn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:00:00 crc kubenswrapper[4971]: I1127 09:00:00.190903 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403900-m8c8b"] Nov 27 09:00:00 crc kubenswrapper[4971]: E1127 09:00:00.191947 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbf1aa5f-efca-4f73-a186-3f02978ce830" containerName="mariadb-database-create" Nov 27 09:00:00 crc kubenswrapper[4971]: I1127 09:00:00.191964 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbf1aa5f-efca-4f73-a186-3f02978ce830" containerName="mariadb-database-create" Nov 27 09:00:00 crc kubenswrapper[4971]: E1127 09:00:00.191976 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26816754-07f0-4343-aade-0fa746b266f5" containerName="mariadb-account-create-update" Nov 27 09:00:00 crc kubenswrapper[4971]: I1127 09:00:00.191982 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="26816754-07f0-4343-aade-0fa746b266f5" containerName="mariadb-account-create-update" Nov 27 09:00:00 crc kubenswrapper[4971]: I1127 09:00:00.192172 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbf1aa5f-efca-4f73-a186-3f02978ce830" containerName="mariadb-database-create" Nov 27 09:00:00 crc kubenswrapper[4971]: I1127 09:00:00.192220 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="26816754-07f0-4343-aade-0fa746b266f5" containerName="mariadb-account-create-update" Nov 27 09:00:00 crc kubenswrapper[4971]: I1127 09:00:00.193012 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403900-m8c8b" Nov 27 09:00:00 crc kubenswrapper[4971]: I1127 09:00:00.195780 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 27 09:00:00 crc kubenswrapper[4971]: I1127 09:00:00.196367 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 27 09:00:00 crc kubenswrapper[4971]: I1127 09:00:00.211235 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403900-m8c8b"] Nov 27 09:00:00 crc kubenswrapper[4971]: I1127 09:00:00.220489 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8bjtb\" (UniqueName: \"kubernetes.io/projected/cbf1aa5f-efca-4f73-a186-3f02978ce830-kube-api-access-8bjtb\") on node \"crc\" DevicePath \"\"" Nov 27 09:00:00 crc kubenswrapper[4971]: I1127 09:00:00.220862 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ssnpn\" (UniqueName: \"kubernetes.io/projected/26816754-07f0-4343-aade-0fa746b266f5-kube-api-access-ssnpn\") on node \"crc\" DevicePath \"\"" Nov 27 09:00:00 crc kubenswrapper[4971]: I1127 09:00:00.220948 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/26816754-07f0-4343-aade-0fa746b266f5-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 09:00:00 crc kubenswrapper[4971]: I1127 09:00:00.221024 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cbf1aa5f-efca-4f73-a186-3f02978ce830-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 09:00:00 crc kubenswrapper[4971]: I1127 09:00:00.323338 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/59eb86a0-923c-4cc0-bd18-74fdcc5d129b-config-volume\") pod \"collect-profiles-29403900-m8c8b\" (UID: \"59eb86a0-923c-4cc0-bd18-74fdcc5d129b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403900-m8c8b" Nov 27 09:00:00 crc kubenswrapper[4971]: I1127 09:00:00.323592 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/59eb86a0-923c-4cc0-bd18-74fdcc5d129b-secret-volume\") pod \"collect-profiles-29403900-m8c8b\" (UID: \"59eb86a0-923c-4cc0-bd18-74fdcc5d129b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403900-m8c8b" Nov 27 09:00:00 crc kubenswrapper[4971]: I1127 09:00:00.323645 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lhr54\" (UniqueName: \"kubernetes.io/projected/59eb86a0-923c-4cc0-bd18-74fdcc5d129b-kube-api-access-lhr54\") pod \"collect-profiles-29403900-m8c8b\" (UID: \"59eb86a0-923c-4cc0-bd18-74fdcc5d129b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403900-m8c8b" Nov 27 09:00:00 crc kubenswrapper[4971]: I1127 09:00:00.426985 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/59eb86a0-923c-4cc0-bd18-74fdcc5d129b-secret-volume\") pod \"collect-profiles-29403900-m8c8b\" (UID: \"59eb86a0-923c-4cc0-bd18-74fdcc5d129b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403900-m8c8b" Nov 27 09:00:00 crc kubenswrapper[4971]: I1127 09:00:00.427600 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lhr54\" (UniqueName: \"kubernetes.io/projected/59eb86a0-923c-4cc0-bd18-74fdcc5d129b-kube-api-access-lhr54\") pod \"collect-profiles-29403900-m8c8b\" (UID: \"59eb86a0-923c-4cc0-bd18-74fdcc5d129b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403900-m8c8b" Nov 27 09:00:00 crc kubenswrapper[4971]: I1127 09:00:00.428500 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/59eb86a0-923c-4cc0-bd18-74fdcc5d129b-config-volume\") pod \"collect-profiles-29403900-m8c8b\" (UID: \"59eb86a0-923c-4cc0-bd18-74fdcc5d129b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403900-m8c8b" Nov 27 09:00:00 crc kubenswrapper[4971]: I1127 09:00:00.430057 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/59eb86a0-923c-4cc0-bd18-74fdcc5d129b-config-volume\") pod \"collect-profiles-29403900-m8c8b\" (UID: \"59eb86a0-923c-4cc0-bd18-74fdcc5d129b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403900-m8c8b" Nov 27 09:00:00 crc kubenswrapper[4971]: I1127 09:00:00.433372 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/59eb86a0-923c-4cc0-bd18-74fdcc5d129b-secret-volume\") pod \"collect-profiles-29403900-m8c8b\" (UID: \"59eb86a0-923c-4cc0-bd18-74fdcc5d129b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403900-m8c8b" Nov 27 09:00:00 crc kubenswrapper[4971]: I1127 09:00:00.449697 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lhr54\" (UniqueName: \"kubernetes.io/projected/59eb86a0-923c-4cc0-bd18-74fdcc5d129b-kube-api-access-lhr54\") pod \"collect-profiles-29403900-m8c8b\" (UID: \"59eb86a0-923c-4cc0-bd18-74fdcc5d129b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403900-m8c8b" Nov 27 09:00:00 crc kubenswrapper[4971]: I1127 09:00:00.554664 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403900-m8c8b" Nov 27 09:00:00 crc kubenswrapper[4971]: I1127 09:00:00.573979 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-js7nf" Nov 27 09:00:00 crc kubenswrapper[4971]: I1127 09:00:00.573974 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-js7nf" event={"ID":"cbf1aa5f-efca-4f73-a186-3f02978ce830","Type":"ContainerDied","Data":"d293c90e07f6706a39bfee7f27750525fbc51eaa1f1f69ebce3fa79e61ef5e00"} Nov 27 09:00:00 crc kubenswrapper[4971]: I1127 09:00:00.574569 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d293c90e07f6706a39bfee7f27750525fbc51eaa1f1f69ebce3fa79e61ef5e00" Nov 27 09:00:00 crc kubenswrapper[4971]: I1127 09:00:00.579308 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-5376-account-create-update-cg684" event={"ID":"26816754-07f0-4343-aade-0fa746b266f5","Type":"ContainerDied","Data":"a36b4a795cb8d434999bcfeee96c2e19c821bdd4da4c19b8185849ea0a415983"} Nov 27 09:00:00 crc kubenswrapper[4971]: I1127 09:00:00.579371 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a36b4a795cb8d434999bcfeee96c2e19c821bdd4da4c19b8185849ea0a415983" Nov 27 09:00:00 crc kubenswrapper[4971]: I1127 09:00:00.579467 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-5376-account-create-update-cg684" Nov 27 09:00:01 crc kubenswrapper[4971]: W1127 09:00:01.066524 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod59eb86a0_923c_4cc0_bd18_74fdcc5d129b.slice/crio-bdf7b4378bffc765aa8a4026356feba5b675bec959f4e62402a80eedd708fe2c WatchSource:0}: Error finding container bdf7b4378bffc765aa8a4026356feba5b675bec959f4e62402a80eedd708fe2c: Status 404 returned error can't find the container with id bdf7b4378bffc765aa8a4026356feba5b675bec959f4e62402a80eedd708fe2c Nov 27 09:00:01 crc kubenswrapper[4971]: I1127 09:00:01.076309 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403900-m8c8b"] Nov 27 09:00:01 crc kubenswrapper[4971]: I1127 09:00:01.586778 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-sync-4mph7"] Nov 27 09:00:01 crc kubenswrapper[4971]: I1127 09:00:01.588793 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-4mph7" Nov 27 09:00:01 crc kubenswrapper[4971]: I1127 09:00:01.594381 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-p9kls" Nov 27 09:00:01 crc kubenswrapper[4971]: I1127 09:00:01.594708 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Nov 27 09:00:01 crc kubenswrapper[4971]: I1127 09:00:01.598720 4971 generic.go:334] "Generic (PLEG): container finished" podID="59eb86a0-923c-4cc0-bd18-74fdcc5d129b" containerID="bf2326358d4a5bbd000535154a56d2c0bec8f0bf5e381043fed2e43204616727" exitCode=0 Nov 27 09:00:01 crc kubenswrapper[4971]: I1127 09:00:01.598788 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403900-m8c8b" event={"ID":"59eb86a0-923c-4cc0-bd18-74fdcc5d129b","Type":"ContainerDied","Data":"bf2326358d4a5bbd000535154a56d2c0bec8f0bf5e381043fed2e43204616727"} Nov 27 09:00:01 crc kubenswrapper[4971]: I1127 09:00:01.598829 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403900-m8c8b" event={"ID":"59eb86a0-923c-4cc0-bd18-74fdcc5d129b","Type":"ContainerStarted","Data":"bdf7b4378bffc765aa8a4026356feba5b675bec959f4e62402a80eedd708fe2c"} Nov 27 09:00:01 crc kubenswrapper[4971]: I1127 09:00:01.603142 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-4mph7"] Nov 27 09:00:01 crc kubenswrapper[4971]: I1127 09:00:01.659005 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aeb9e021-f0d3-4d35-92f6-22e3c934f8f1-config-data\") pod \"heat-db-sync-4mph7\" (UID: \"aeb9e021-f0d3-4d35-92f6-22e3c934f8f1\") " pod="openstack/heat-db-sync-4mph7" Nov 27 09:00:01 crc kubenswrapper[4971]: I1127 09:00:01.659132 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aeb9e021-f0d3-4d35-92f6-22e3c934f8f1-combined-ca-bundle\") pod \"heat-db-sync-4mph7\" (UID: \"aeb9e021-f0d3-4d35-92f6-22e3c934f8f1\") " pod="openstack/heat-db-sync-4mph7" Nov 27 09:00:01 crc kubenswrapper[4971]: I1127 09:00:01.659199 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qhxng\" (UniqueName: \"kubernetes.io/projected/aeb9e021-f0d3-4d35-92f6-22e3c934f8f1-kube-api-access-qhxng\") pod \"heat-db-sync-4mph7\" (UID: \"aeb9e021-f0d3-4d35-92f6-22e3c934f8f1\") " pod="openstack/heat-db-sync-4mph7" Nov 27 09:00:01 crc kubenswrapper[4971]: I1127 09:00:01.761927 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aeb9e021-f0d3-4d35-92f6-22e3c934f8f1-config-data\") pod \"heat-db-sync-4mph7\" (UID: \"aeb9e021-f0d3-4d35-92f6-22e3c934f8f1\") " pod="openstack/heat-db-sync-4mph7" Nov 27 09:00:01 crc kubenswrapper[4971]: I1127 09:00:01.762364 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aeb9e021-f0d3-4d35-92f6-22e3c934f8f1-combined-ca-bundle\") pod \"heat-db-sync-4mph7\" (UID: \"aeb9e021-f0d3-4d35-92f6-22e3c934f8f1\") " pod="openstack/heat-db-sync-4mph7" Nov 27 09:00:01 crc kubenswrapper[4971]: I1127 09:00:01.762515 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qhxng\" (UniqueName: \"kubernetes.io/projected/aeb9e021-f0d3-4d35-92f6-22e3c934f8f1-kube-api-access-qhxng\") pod \"heat-db-sync-4mph7\" (UID: \"aeb9e021-f0d3-4d35-92f6-22e3c934f8f1\") " pod="openstack/heat-db-sync-4mph7" Nov 27 09:00:01 crc kubenswrapper[4971]: I1127 09:00:01.771036 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aeb9e021-f0d3-4d35-92f6-22e3c934f8f1-config-data\") pod \"heat-db-sync-4mph7\" (UID: \"aeb9e021-f0d3-4d35-92f6-22e3c934f8f1\") " pod="openstack/heat-db-sync-4mph7" Nov 27 09:00:01 crc kubenswrapper[4971]: I1127 09:00:01.781991 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qhxng\" (UniqueName: \"kubernetes.io/projected/aeb9e021-f0d3-4d35-92f6-22e3c934f8f1-kube-api-access-qhxng\") pod \"heat-db-sync-4mph7\" (UID: \"aeb9e021-f0d3-4d35-92f6-22e3c934f8f1\") " pod="openstack/heat-db-sync-4mph7" Nov 27 09:00:01 crc kubenswrapper[4971]: I1127 09:00:01.787576 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aeb9e021-f0d3-4d35-92f6-22e3c934f8f1-combined-ca-bundle\") pod \"heat-db-sync-4mph7\" (UID: \"aeb9e021-f0d3-4d35-92f6-22e3c934f8f1\") " pod="openstack/heat-db-sync-4mph7" Nov 27 09:00:01 crc kubenswrapper[4971]: I1127 09:00:01.923571 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-4mph7" Nov 27 09:00:02 crc kubenswrapper[4971]: I1127 09:00:02.421726 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-4mph7"] Nov 27 09:00:02 crc kubenswrapper[4971]: W1127 09:00:02.428683 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaeb9e021_f0d3_4d35_92f6_22e3c934f8f1.slice/crio-0a9725fe1d71f06d6ba9ac0fdbf49ff36bf5defafbbf2296d576483655166956 WatchSource:0}: Error finding container 0a9725fe1d71f06d6ba9ac0fdbf49ff36bf5defafbbf2296d576483655166956: Status 404 returned error can't find the container with id 0a9725fe1d71f06d6ba9ac0fdbf49ff36bf5defafbbf2296d576483655166956 Nov 27 09:00:02 crc kubenswrapper[4971]: I1127 09:00:02.610402 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-4mph7" event={"ID":"aeb9e021-f0d3-4d35-92f6-22e3c934f8f1","Type":"ContainerStarted","Data":"0a9725fe1d71f06d6ba9ac0fdbf49ff36bf5defafbbf2296d576483655166956"} Nov 27 09:00:03 crc kubenswrapper[4971]: I1127 09:00:03.019668 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403900-m8c8b" Nov 27 09:00:03 crc kubenswrapper[4971]: I1127 09:00:03.098832 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-7f8c46cf7c-qfvm8" podUID="440b3490-db25-483b-af49-43a5db3530ec" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.108:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.108:8080: connect: connection refused" Nov 27 09:00:03 crc kubenswrapper[4971]: I1127 09:00:03.104119 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/59eb86a0-923c-4cc0-bd18-74fdcc5d129b-config-volume\") pod \"59eb86a0-923c-4cc0-bd18-74fdcc5d129b\" (UID: \"59eb86a0-923c-4cc0-bd18-74fdcc5d129b\") " Nov 27 09:00:03 crc kubenswrapper[4971]: I1127 09:00:03.104556 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/59eb86a0-923c-4cc0-bd18-74fdcc5d129b-secret-volume\") pod \"59eb86a0-923c-4cc0-bd18-74fdcc5d129b\" (UID: \"59eb86a0-923c-4cc0-bd18-74fdcc5d129b\") " Nov 27 09:00:03 crc kubenswrapper[4971]: I1127 09:00:03.104660 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lhr54\" (UniqueName: \"kubernetes.io/projected/59eb86a0-923c-4cc0-bd18-74fdcc5d129b-kube-api-access-lhr54\") pod \"59eb86a0-923c-4cc0-bd18-74fdcc5d129b\" (UID: \"59eb86a0-923c-4cc0-bd18-74fdcc5d129b\") " Nov 27 09:00:03 crc kubenswrapper[4971]: I1127 09:00:03.105999 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/59eb86a0-923c-4cc0-bd18-74fdcc5d129b-config-volume" (OuterVolumeSpecName: "config-volume") pod "59eb86a0-923c-4cc0-bd18-74fdcc5d129b" (UID: "59eb86a0-923c-4cc0-bd18-74fdcc5d129b"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:00:03 crc kubenswrapper[4971]: I1127 09:00:03.106592 4971 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/59eb86a0-923c-4cc0-bd18-74fdcc5d129b-config-volume\") on node \"crc\" DevicePath \"\"" Nov 27 09:00:03 crc kubenswrapper[4971]: I1127 09:00:03.112455 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59eb86a0-923c-4cc0-bd18-74fdcc5d129b-kube-api-access-lhr54" (OuterVolumeSpecName: "kube-api-access-lhr54") pod "59eb86a0-923c-4cc0-bd18-74fdcc5d129b" (UID: "59eb86a0-923c-4cc0-bd18-74fdcc5d129b"). InnerVolumeSpecName "kube-api-access-lhr54". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:00:03 crc kubenswrapper[4971]: I1127 09:00:03.127045 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59eb86a0-923c-4cc0-bd18-74fdcc5d129b-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "59eb86a0-923c-4cc0-bd18-74fdcc5d129b" (UID: "59eb86a0-923c-4cc0-bd18-74fdcc5d129b"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:00:03 crc kubenswrapper[4971]: I1127 09:00:03.209003 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lhr54\" (UniqueName: \"kubernetes.io/projected/59eb86a0-923c-4cc0-bd18-74fdcc5d129b-kube-api-access-lhr54\") on node \"crc\" DevicePath \"\"" Nov 27 09:00:03 crc kubenswrapper[4971]: I1127 09:00:03.209040 4971 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/59eb86a0-923c-4cc0-bd18-74fdcc5d129b-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 27 09:00:03 crc kubenswrapper[4971]: I1127 09:00:03.626993 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403900-m8c8b" event={"ID":"59eb86a0-923c-4cc0-bd18-74fdcc5d129b","Type":"ContainerDied","Data":"bdf7b4378bffc765aa8a4026356feba5b675bec959f4e62402a80eedd708fe2c"} Nov 27 09:00:03 crc kubenswrapper[4971]: I1127 09:00:03.627684 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bdf7b4378bffc765aa8a4026356feba5b675bec959f4e62402a80eedd708fe2c" Nov 27 09:00:03 crc kubenswrapper[4971]: I1127 09:00:03.627099 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403900-m8c8b" Nov 27 09:00:04 crc kubenswrapper[4971]: I1127 09:00:04.119729 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403855-kblnw"] Nov 27 09:00:04 crc kubenswrapper[4971]: I1127 09:00:04.131416 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403855-kblnw"] Nov 27 09:00:04 crc kubenswrapper[4971]: I1127 09:00:04.566847 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f409368-a7a7-49ff-908c-022268f93d16" path="/var/lib/kubelet/pods/8f409368-a7a7-49ff-908c-022268f93d16/volumes" Nov 27 09:00:05 crc kubenswrapper[4971]: I1127 09:00:05.298683 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-fd46b4db5-sp4l9" Nov 27 09:00:05 crc kubenswrapper[4971]: I1127 09:00:05.298747 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-fd46b4db5-sp4l9" Nov 27 09:00:10 crc kubenswrapper[4971]: I1127 09:00:10.706375 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-4mph7" event={"ID":"aeb9e021-f0d3-4d35-92f6-22e3c934f8f1","Type":"ContainerStarted","Data":"c871e74c68f49246cab2e0749c25bb770dffb1977912d8659a0ae4800495e697"} Nov 27 09:00:10 crc kubenswrapper[4971]: I1127 09:00:10.734369 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-db-sync-4mph7" podStartSLOduration=1.965692734 podStartE2EDuration="9.73434336s" podCreationTimestamp="2025-11-27 09:00:01 +0000 UTC" firstStartedPulling="2025-11-27 09:00:02.433638255 +0000 UTC m=+7640.625682173" lastFinishedPulling="2025-11-27 09:00:10.202288891 +0000 UTC m=+7648.394332799" observedRunningTime="2025-11-27 09:00:10.724337444 +0000 UTC m=+7648.916381362" watchObservedRunningTime="2025-11-27 09:00:10.73434336 +0000 UTC m=+7648.926387278" Nov 27 09:00:12 crc kubenswrapper[4971]: I1127 09:00:12.728281 4971 generic.go:334] "Generic (PLEG): container finished" podID="aeb9e021-f0d3-4d35-92f6-22e3c934f8f1" containerID="c871e74c68f49246cab2e0749c25bb770dffb1977912d8659a0ae4800495e697" exitCode=0 Nov 27 09:00:12 crc kubenswrapper[4971]: I1127 09:00:12.728396 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-4mph7" event={"ID":"aeb9e021-f0d3-4d35-92f6-22e3c934f8f1","Type":"ContainerDied","Data":"c871e74c68f49246cab2e0749c25bb770dffb1977912d8659a0ae4800495e697"} Nov 27 09:00:13 crc kubenswrapper[4971]: I1127 09:00:13.099275 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-7f8c46cf7c-qfvm8" podUID="440b3490-db25-483b-af49-43a5db3530ec" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.108:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.108:8080: connect: connection refused" Nov 27 09:00:13 crc kubenswrapper[4971]: I1127 09:00:13.099460 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7f8c46cf7c-qfvm8" Nov 27 09:00:14 crc kubenswrapper[4971]: I1127 09:00:14.156633 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-4mph7" Nov 27 09:00:14 crc kubenswrapper[4971]: I1127 09:00:14.252450 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aeb9e021-f0d3-4d35-92f6-22e3c934f8f1-config-data\") pod \"aeb9e021-f0d3-4d35-92f6-22e3c934f8f1\" (UID: \"aeb9e021-f0d3-4d35-92f6-22e3c934f8f1\") " Nov 27 09:00:14 crc kubenswrapper[4971]: I1127 09:00:14.252510 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aeb9e021-f0d3-4d35-92f6-22e3c934f8f1-combined-ca-bundle\") pod \"aeb9e021-f0d3-4d35-92f6-22e3c934f8f1\" (UID: \"aeb9e021-f0d3-4d35-92f6-22e3c934f8f1\") " Nov 27 09:00:14 crc kubenswrapper[4971]: I1127 09:00:14.252812 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qhxng\" (UniqueName: \"kubernetes.io/projected/aeb9e021-f0d3-4d35-92f6-22e3c934f8f1-kube-api-access-qhxng\") pod \"aeb9e021-f0d3-4d35-92f6-22e3c934f8f1\" (UID: \"aeb9e021-f0d3-4d35-92f6-22e3c934f8f1\") " Nov 27 09:00:14 crc kubenswrapper[4971]: I1127 09:00:14.277185 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aeb9e021-f0d3-4d35-92f6-22e3c934f8f1-kube-api-access-qhxng" (OuterVolumeSpecName: "kube-api-access-qhxng") pod "aeb9e021-f0d3-4d35-92f6-22e3c934f8f1" (UID: "aeb9e021-f0d3-4d35-92f6-22e3c934f8f1"). InnerVolumeSpecName "kube-api-access-qhxng". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:00:14 crc kubenswrapper[4971]: I1127 09:00:14.305252 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aeb9e021-f0d3-4d35-92f6-22e3c934f8f1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aeb9e021-f0d3-4d35-92f6-22e3c934f8f1" (UID: "aeb9e021-f0d3-4d35-92f6-22e3c934f8f1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:00:14 crc kubenswrapper[4971]: I1127 09:00:14.355714 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qhxng\" (UniqueName: \"kubernetes.io/projected/aeb9e021-f0d3-4d35-92f6-22e3c934f8f1-kube-api-access-qhxng\") on node \"crc\" DevicePath \"\"" Nov 27 09:00:14 crc kubenswrapper[4971]: I1127 09:00:14.355759 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aeb9e021-f0d3-4d35-92f6-22e3c934f8f1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:00:14 crc kubenswrapper[4971]: I1127 09:00:14.359015 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aeb9e021-f0d3-4d35-92f6-22e3c934f8f1-config-data" (OuterVolumeSpecName: "config-data") pod "aeb9e021-f0d3-4d35-92f6-22e3c934f8f1" (UID: "aeb9e021-f0d3-4d35-92f6-22e3c934f8f1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:00:14 crc kubenswrapper[4971]: I1127 09:00:14.457998 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aeb9e021-f0d3-4d35-92f6-22e3c934f8f1-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 09:00:14 crc kubenswrapper[4971]: I1127 09:00:14.755800 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-4mph7" event={"ID":"aeb9e021-f0d3-4d35-92f6-22e3c934f8f1","Type":"ContainerDied","Data":"0a9725fe1d71f06d6ba9ac0fdbf49ff36bf5defafbbf2296d576483655166956"} Nov 27 09:00:14 crc kubenswrapper[4971]: I1127 09:00:14.756189 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0a9725fe1d71f06d6ba9ac0fdbf49ff36bf5defafbbf2296d576483655166956" Nov 27 09:00:14 crc kubenswrapper[4971]: I1127 09:00:14.756453 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-4mph7" Nov 27 09:00:15 crc kubenswrapper[4971]: I1127 09:00:15.300928 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-fd46b4db5-sp4l9" podUID="e03d8c76-8c6c-4945-8fd3-5b3709e91963" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.112:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.112:8080: connect: connection refused" Nov 27 09:00:15 crc kubenswrapper[4971]: I1127 09:00:15.901407 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-engine-59c8bcfc86-ndvzb"] Nov 27 09:00:15 crc kubenswrapper[4971]: E1127 09:00:15.902292 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59eb86a0-923c-4cc0-bd18-74fdcc5d129b" containerName="collect-profiles" Nov 27 09:00:15 crc kubenswrapper[4971]: I1127 09:00:15.902312 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="59eb86a0-923c-4cc0-bd18-74fdcc5d129b" containerName="collect-profiles" Nov 27 09:00:15 crc kubenswrapper[4971]: E1127 09:00:15.902345 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aeb9e021-f0d3-4d35-92f6-22e3c934f8f1" containerName="heat-db-sync" Nov 27 09:00:15 crc kubenswrapper[4971]: I1127 09:00:15.902354 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="aeb9e021-f0d3-4d35-92f6-22e3c934f8f1" containerName="heat-db-sync" Nov 27 09:00:15 crc kubenswrapper[4971]: I1127 09:00:15.902599 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="59eb86a0-923c-4cc0-bd18-74fdcc5d129b" containerName="collect-profiles" Nov 27 09:00:15 crc kubenswrapper[4971]: I1127 09:00:15.902631 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="aeb9e021-f0d3-4d35-92f6-22e3c934f8f1" containerName="heat-db-sync" Nov 27 09:00:15 crc kubenswrapper[4971]: I1127 09:00:15.903452 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-59c8bcfc86-ndvzb" Nov 27 09:00:15 crc kubenswrapper[4971]: I1127 09:00:15.906962 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-engine-config-data" Nov 27 09:00:15 crc kubenswrapper[4971]: I1127 09:00:15.907755 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Nov 27 09:00:15 crc kubenswrapper[4971]: I1127 09:00:15.907834 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-p9kls" Nov 27 09:00:15 crc kubenswrapper[4971]: I1127 09:00:15.920228 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-59c8bcfc86-ndvzb"] Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.021968 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a891ed3-1a18-48f2-8a36-f0cacf4cda34-combined-ca-bundle\") pod \"heat-engine-59c8bcfc86-ndvzb\" (UID: \"4a891ed3-1a18-48f2-8a36-f0cacf4cda34\") " pod="openstack/heat-engine-59c8bcfc86-ndvzb" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.022042 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4a891ed3-1a18-48f2-8a36-f0cacf4cda34-config-data-custom\") pod \"heat-engine-59c8bcfc86-ndvzb\" (UID: \"4a891ed3-1a18-48f2-8a36-f0cacf4cda34\") " pod="openstack/heat-engine-59c8bcfc86-ndvzb" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.022092 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvj4j\" (UniqueName: \"kubernetes.io/projected/4a891ed3-1a18-48f2-8a36-f0cacf4cda34-kube-api-access-rvj4j\") pod \"heat-engine-59c8bcfc86-ndvzb\" (UID: \"4a891ed3-1a18-48f2-8a36-f0cacf4cda34\") " pod="openstack/heat-engine-59c8bcfc86-ndvzb" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.022116 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a891ed3-1a18-48f2-8a36-f0cacf4cda34-config-data\") pod \"heat-engine-59c8bcfc86-ndvzb\" (UID: \"4a891ed3-1a18-48f2-8a36-f0cacf4cda34\") " pod="openstack/heat-engine-59c8bcfc86-ndvzb" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.035106 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-cfnapi-558f5f68c6-gwcv5"] Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.036850 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-558f5f68c6-gwcv5" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.044986 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-cfnapi-config-data" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.055741 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-api-7bc5c74f54-7k5pr"] Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.057194 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-7bc5c74f54-7k5pr" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.064886 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-api-config-data" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.070376 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-558f5f68c6-gwcv5"] Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.081137 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-7bc5c74f54-7k5pr"] Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.129939 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4a891ed3-1a18-48f2-8a36-f0cacf4cda34-config-data-custom\") pod \"heat-engine-59c8bcfc86-ndvzb\" (UID: \"4a891ed3-1a18-48f2-8a36-f0cacf4cda34\") " pod="openstack/heat-engine-59c8bcfc86-ndvzb" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.130024 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d195723-5d1e-4cad-9893-627476bcf980-combined-ca-bundle\") pod \"heat-api-7bc5c74f54-7k5pr\" (UID: \"1d195723-5d1e-4cad-9893-627476bcf980\") " pod="openstack/heat-api-7bc5c74f54-7k5pr" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.130244 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b974245a-b10d-460c-a512-783707368cfb-config-data\") pod \"heat-cfnapi-558f5f68c6-gwcv5\" (UID: \"b974245a-b10d-460c-a512-783707368cfb\") " pod="openstack/heat-cfnapi-558f5f68c6-gwcv5" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.130289 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtjs9\" (UniqueName: \"kubernetes.io/projected/1d195723-5d1e-4cad-9893-627476bcf980-kube-api-access-xtjs9\") pod \"heat-api-7bc5c74f54-7k5pr\" (UID: \"1d195723-5d1e-4cad-9893-627476bcf980\") " pod="openstack/heat-api-7bc5c74f54-7k5pr" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.130499 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvj4j\" (UniqueName: \"kubernetes.io/projected/4a891ed3-1a18-48f2-8a36-f0cacf4cda34-kube-api-access-rvj4j\") pod \"heat-engine-59c8bcfc86-ndvzb\" (UID: \"4a891ed3-1a18-48f2-8a36-f0cacf4cda34\") " pod="openstack/heat-engine-59c8bcfc86-ndvzb" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.130670 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a891ed3-1a18-48f2-8a36-f0cacf4cda34-config-data\") pod \"heat-engine-59c8bcfc86-ndvzb\" (UID: \"4a891ed3-1a18-48f2-8a36-f0cacf4cda34\") " pod="openstack/heat-engine-59c8bcfc86-ndvzb" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.130706 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d195723-5d1e-4cad-9893-627476bcf980-config-data\") pod \"heat-api-7bc5c74f54-7k5pr\" (UID: \"1d195723-5d1e-4cad-9893-627476bcf980\") " pod="openstack/heat-api-7bc5c74f54-7k5pr" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.130763 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5glxx\" (UniqueName: \"kubernetes.io/projected/b974245a-b10d-460c-a512-783707368cfb-kube-api-access-5glxx\") pod \"heat-cfnapi-558f5f68c6-gwcv5\" (UID: \"b974245a-b10d-460c-a512-783707368cfb\") " pod="openstack/heat-cfnapi-558f5f68c6-gwcv5" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.131142 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b974245a-b10d-460c-a512-783707368cfb-combined-ca-bundle\") pod \"heat-cfnapi-558f5f68c6-gwcv5\" (UID: \"b974245a-b10d-460c-a512-783707368cfb\") " pod="openstack/heat-cfnapi-558f5f68c6-gwcv5" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.131235 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b974245a-b10d-460c-a512-783707368cfb-config-data-custom\") pod \"heat-cfnapi-558f5f68c6-gwcv5\" (UID: \"b974245a-b10d-460c-a512-783707368cfb\") " pod="openstack/heat-cfnapi-558f5f68c6-gwcv5" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.131317 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a891ed3-1a18-48f2-8a36-f0cacf4cda34-combined-ca-bundle\") pod \"heat-engine-59c8bcfc86-ndvzb\" (UID: \"4a891ed3-1a18-48f2-8a36-f0cacf4cda34\") " pod="openstack/heat-engine-59c8bcfc86-ndvzb" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.131368 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1d195723-5d1e-4cad-9893-627476bcf980-config-data-custom\") pod \"heat-api-7bc5c74f54-7k5pr\" (UID: \"1d195723-5d1e-4cad-9893-627476bcf980\") " pod="openstack/heat-api-7bc5c74f54-7k5pr" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.145902 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a891ed3-1a18-48f2-8a36-f0cacf4cda34-config-data\") pod \"heat-engine-59c8bcfc86-ndvzb\" (UID: \"4a891ed3-1a18-48f2-8a36-f0cacf4cda34\") " pod="openstack/heat-engine-59c8bcfc86-ndvzb" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.147832 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4a891ed3-1a18-48f2-8a36-f0cacf4cda34-config-data-custom\") pod \"heat-engine-59c8bcfc86-ndvzb\" (UID: \"4a891ed3-1a18-48f2-8a36-f0cacf4cda34\") " pod="openstack/heat-engine-59c8bcfc86-ndvzb" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.158360 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a891ed3-1a18-48f2-8a36-f0cacf4cda34-combined-ca-bundle\") pod \"heat-engine-59c8bcfc86-ndvzb\" (UID: \"4a891ed3-1a18-48f2-8a36-f0cacf4cda34\") " pod="openstack/heat-engine-59c8bcfc86-ndvzb" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.162259 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvj4j\" (UniqueName: \"kubernetes.io/projected/4a891ed3-1a18-48f2-8a36-f0cacf4cda34-kube-api-access-rvj4j\") pod \"heat-engine-59c8bcfc86-ndvzb\" (UID: \"4a891ed3-1a18-48f2-8a36-f0cacf4cda34\") " pod="openstack/heat-engine-59c8bcfc86-ndvzb" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.230054 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-59c8bcfc86-ndvzb" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.233446 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b974245a-b10d-460c-a512-783707368cfb-combined-ca-bundle\") pod \"heat-cfnapi-558f5f68c6-gwcv5\" (UID: \"b974245a-b10d-460c-a512-783707368cfb\") " pod="openstack/heat-cfnapi-558f5f68c6-gwcv5" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.233484 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b974245a-b10d-460c-a512-783707368cfb-config-data-custom\") pod \"heat-cfnapi-558f5f68c6-gwcv5\" (UID: \"b974245a-b10d-460c-a512-783707368cfb\") " pod="openstack/heat-cfnapi-558f5f68c6-gwcv5" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.233514 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1d195723-5d1e-4cad-9893-627476bcf980-config-data-custom\") pod \"heat-api-7bc5c74f54-7k5pr\" (UID: \"1d195723-5d1e-4cad-9893-627476bcf980\") " pod="openstack/heat-api-7bc5c74f54-7k5pr" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.233573 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d195723-5d1e-4cad-9893-627476bcf980-combined-ca-bundle\") pod \"heat-api-7bc5c74f54-7k5pr\" (UID: \"1d195723-5d1e-4cad-9893-627476bcf980\") " pod="openstack/heat-api-7bc5c74f54-7k5pr" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.233601 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b974245a-b10d-460c-a512-783707368cfb-config-data\") pod \"heat-cfnapi-558f5f68c6-gwcv5\" (UID: \"b974245a-b10d-460c-a512-783707368cfb\") " pod="openstack/heat-cfnapi-558f5f68c6-gwcv5" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.233625 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtjs9\" (UniqueName: \"kubernetes.io/projected/1d195723-5d1e-4cad-9893-627476bcf980-kube-api-access-xtjs9\") pod \"heat-api-7bc5c74f54-7k5pr\" (UID: \"1d195723-5d1e-4cad-9893-627476bcf980\") " pod="openstack/heat-api-7bc5c74f54-7k5pr" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.233657 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d195723-5d1e-4cad-9893-627476bcf980-config-data\") pod \"heat-api-7bc5c74f54-7k5pr\" (UID: \"1d195723-5d1e-4cad-9893-627476bcf980\") " pod="openstack/heat-api-7bc5c74f54-7k5pr" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.233682 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5glxx\" (UniqueName: \"kubernetes.io/projected/b974245a-b10d-460c-a512-783707368cfb-kube-api-access-5glxx\") pod \"heat-cfnapi-558f5f68c6-gwcv5\" (UID: \"b974245a-b10d-460c-a512-783707368cfb\") " pod="openstack/heat-cfnapi-558f5f68c6-gwcv5" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.240210 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d195723-5d1e-4cad-9893-627476bcf980-config-data\") pod \"heat-api-7bc5c74f54-7k5pr\" (UID: \"1d195723-5d1e-4cad-9893-627476bcf980\") " pod="openstack/heat-api-7bc5c74f54-7k5pr" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.240633 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1d195723-5d1e-4cad-9893-627476bcf980-config-data-custom\") pod \"heat-api-7bc5c74f54-7k5pr\" (UID: \"1d195723-5d1e-4cad-9893-627476bcf980\") " pod="openstack/heat-api-7bc5c74f54-7k5pr" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.241329 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b974245a-b10d-460c-a512-783707368cfb-config-data\") pod \"heat-cfnapi-558f5f68c6-gwcv5\" (UID: \"b974245a-b10d-460c-a512-783707368cfb\") " pod="openstack/heat-cfnapi-558f5f68c6-gwcv5" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.242411 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b974245a-b10d-460c-a512-783707368cfb-config-data-custom\") pod \"heat-cfnapi-558f5f68c6-gwcv5\" (UID: \"b974245a-b10d-460c-a512-783707368cfb\") " pod="openstack/heat-cfnapi-558f5f68c6-gwcv5" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.245390 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d195723-5d1e-4cad-9893-627476bcf980-combined-ca-bundle\") pod \"heat-api-7bc5c74f54-7k5pr\" (UID: \"1d195723-5d1e-4cad-9893-627476bcf980\") " pod="openstack/heat-api-7bc5c74f54-7k5pr" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.245412 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b974245a-b10d-460c-a512-783707368cfb-combined-ca-bundle\") pod \"heat-cfnapi-558f5f68c6-gwcv5\" (UID: \"b974245a-b10d-460c-a512-783707368cfb\") " pod="openstack/heat-cfnapi-558f5f68c6-gwcv5" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.261385 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5glxx\" (UniqueName: \"kubernetes.io/projected/b974245a-b10d-460c-a512-783707368cfb-kube-api-access-5glxx\") pod \"heat-cfnapi-558f5f68c6-gwcv5\" (UID: \"b974245a-b10d-460c-a512-783707368cfb\") " pod="openstack/heat-cfnapi-558f5f68c6-gwcv5" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.268415 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtjs9\" (UniqueName: \"kubernetes.io/projected/1d195723-5d1e-4cad-9893-627476bcf980-kube-api-access-xtjs9\") pod \"heat-api-7bc5c74f54-7k5pr\" (UID: \"1d195723-5d1e-4cad-9893-627476bcf980\") " pod="openstack/heat-api-7bc5c74f54-7k5pr" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.425495 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-558f5f68c6-gwcv5" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.438770 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-7bc5c74f54-7k5pr" Nov 27 09:00:16 crc kubenswrapper[4971]: I1127 09:00:16.876475 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-59c8bcfc86-ndvzb"] Nov 27 09:00:16 crc kubenswrapper[4971]: W1127 09:00:16.886452 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4a891ed3_1a18_48f2_8a36_f0cacf4cda34.slice/crio-0d1628be88081b53a3c29c18e97e11d9fd07d80105d2ea6e15671b1ac4a837a4 WatchSource:0}: Error finding container 0d1628be88081b53a3c29c18e97e11d9fd07d80105d2ea6e15671b1ac4a837a4: Status 404 returned error can't find the container with id 0d1628be88081b53a3c29c18e97e11d9fd07d80105d2ea6e15671b1ac4a837a4 Nov 27 09:00:17 crc kubenswrapper[4971]: I1127 09:00:17.158999 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-558f5f68c6-gwcv5"] Nov 27 09:00:17 crc kubenswrapper[4971]: I1127 09:00:17.374111 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-7bc5c74f54-7k5pr"] Nov 27 09:00:17 crc kubenswrapper[4971]: W1127 09:00:17.385261 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1d195723_5d1e_4cad_9893_627476bcf980.slice/crio-84cde5786383877e1e86551f769feba9c36392144f5c9aae0a3b257f1322ab71 WatchSource:0}: Error finding container 84cde5786383877e1e86551f769feba9c36392144f5c9aae0a3b257f1322ab71: Status 404 returned error can't find the container with id 84cde5786383877e1e86551f769feba9c36392144f5c9aae0a3b257f1322ab71 Nov 27 09:00:17 crc kubenswrapper[4971]: I1127 09:00:17.821423 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-558f5f68c6-gwcv5" event={"ID":"b974245a-b10d-460c-a512-783707368cfb","Type":"ContainerStarted","Data":"13ec9a63edb938b12b06b7b648bf636a56a4687c334b0535bb4452e418419cf0"} Nov 27 09:00:17 crc kubenswrapper[4971]: I1127 09:00:17.830748 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-7bc5c74f54-7k5pr" event={"ID":"1d195723-5d1e-4cad-9893-627476bcf980","Type":"ContainerStarted","Data":"84cde5786383877e1e86551f769feba9c36392144f5c9aae0a3b257f1322ab71"} Nov 27 09:00:17 crc kubenswrapper[4971]: I1127 09:00:17.839314 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-59c8bcfc86-ndvzb" event={"ID":"4a891ed3-1a18-48f2-8a36-f0cacf4cda34","Type":"ContainerStarted","Data":"a3be37921ce799d6d5e412c6c31b6c6b5d1a9867d42f8fa50a8d7646c05bdc97"} Nov 27 09:00:17 crc kubenswrapper[4971]: I1127 09:00:17.839375 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-59c8bcfc86-ndvzb" event={"ID":"4a891ed3-1a18-48f2-8a36-f0cacf4cda34","Type":"ContainerStarted","Data":"0d1628be88081b53a3c29c18e97e11d9fd07d80105d2ea6e15671b1ac4a837a4"} Nov 27 09:00:17 crc kubenswrapper[4971]: I1127 09:00:17.840044 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-engine-59c8bcfc86-ndvzb" Nov 27 09:00:17 crc kubenswrapper[4971]: I1127 09:00:17.885009 4971 generic.go:334] "Generic (PLEG): container finished" podID="440b3490-db25-483b-af49-43a5db3530ec" containerID="95c5457291cada2eb4c410ff6b81ff9b19eac730b04348ee2a1e7e60c4aef082" exitCode=137 Nov 27 09:00:17 crc kubenswrapper[4971]: I1127 09:00:17.885099 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7f8c46cf7c-qfvm8" event={"ID":"440b3490-db25-483b-af49-43a5db3530ec","Type":"ContainerDied","Data":"95c5457291cada2eb4c410ff6b81ff9b19eac730b04348ee2a1e7e60c4aef082"} Nov 27 09:00:18 crc kubenswrapper[4971]: I1127 09:00:18.200019 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7f8c46cf7c-qfvm8" Nov 27 09:00:18 crc kubenswrapper[4971]: I1127 09:00:18.236094 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-engine-59c8bcfc86-ndvzb" podStartSLOduration=3.236063177 podStartE2EDuration="3.236063177s" podCreationTimestamp="2025-11-27 09:00:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 09:00:17.877775268 +0000 UTC m=+7656.069819196" watchObservedRunningTime="2025-11-27 09:00:18.236063177 +0000 UTC m=+7656.428107085" Nov 27 09:00:18 crc kubenswrapper[4971]: I1127 09:00:18.305574 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dzsr9\" (UniqueName: \"kubernetes.io/projected/440b3490-db25-483b-af49-43a5db3530ec-kube-api-access-dzsr9\") pod \"440b3490-db25-483b-af49-43a5db3530ec\" (UID: \"440b3490-db25-483b-af49-43a5db3530ec\") " Nov 27 09:00:18 crc kubenswrapper[4971]: I1127 09:00:18.305863 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/440b3490-db25-483b-af49-43a5db3530ec-logs\") pod \"440b3490-db25-483b-af49-43a5db3530ec\" (UID: \"440b3490-db25-483b-af49-43a5db3530ec\") " Nov 27 09:00:18 crc kubenswrapper[4971]: I1127 09:00:18.305914 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/440b3490-db25-483b-af49-43a5db3530ec-config-data\") pod \"440b3490-db25-483b-af49-43a5db3530ec\" (UID: \"440b3490-db25-483b-af49-43a5db3530ec\") " Nov 27 09:00:18 crc kubenswrapper[4971]: I1127 09:00:18.305959 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/440b3490-db25-483b-af49-43a5db3530ec-scripts\") pod \"440b3490-db25-483b-af49-43a5db3530ec\" (UID: \"440b3490-db25-483b-af49-43a5db3530ec\") " Nov 27 09:00:18 crc kubenswrapper[4971]: I1127 09:00:18.306038 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/440b3490-db25-483b-af49-43a5db3530ec-horizon-secret-key\") pod \"440b3490-db25-483b-af49-43a5db3530ec\" (UID: \"440b3490-db25-483b-af49-43a5db3530ec\") " Nov 27 09:00:18 crc kubenswrapper[4971]: I1127 09:00:18.306620 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/440b3490-db25-483b-af49-43a5db3530ec-logs" (OuterVolumeSpecName: "logs") pod "440b3490-db25-483b-af49-43a5db3530ec" (UID: "440b3490-db25-483b-af49-43a5db3530ec"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:00:18 crc kubenswrapper[4971]: I1127 09:00:18.307155 4971 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/440b3490-db25-483b-af49-43a5db3530ec-logs\") on node \"crc\" DevicePath \"\"" Nov 27 09:00:18 crc kubenswrapper[4971]: I1127 09:00:18.315714 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/440b3490-db25-483b-af49-43a5db3530ec-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "440b3490-db25-483b-af49-43a5db3530ec" (UID: "440b3490-db25-483b-af49-43a5db3530ec"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:00:18 crc kubenswrapper[4971]: I1127 09:00:18.319702 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/440b3490-db25-483b-af49-43a5db3530ec-kube-api-access-dzsr9" (OuterVolumeSpecName: "kube-api-access-dzsr9") pod "440b3490-db25-483b-af49-43a5db3530ec" (UID: "440b3490-db25-483b-af49-43a5db3530ec"). InnerVolumeSpecName "kube-api-access-dzsr9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:00:18 crc kubenswrapper[4971]: I1127 09:00:18.361339 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/440b3490-db25-483b-af49-43a5db3530ec-scripts" (OuterVolumeSpecName: "scripts") pod "440b3490-db25-483b-af49-43a5db3530ec" (UID: "440b3490-db25-483b-af49-43a5db3530ec"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:00:18 crc kubenswrapper[4971]: I1127 09:00:18.363215 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/440b3490-db25-483b-af49-43a5db3530ec-config-data" (OuterVolumeSpecName: "config-data") pod "440b3490-db25-483b-af49-43a5db3530ec" (UID: "440b3490-db25-483b-af49-43a5db3530ec"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:00:18 crc kubenswrapper[4971]: I1127 09:00:18.411408 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/440b3490-db25-483b-af49-43a5db3530ec-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 09:00:18 crc kubenswrapper[4971]: I1127 09:00:18.411461 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/440b3490-db25-483b-af49-43a5db3530ec-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 09:00:18 crc kubenswrapper[4971]: I1127 09:00:18.411474 4971 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/440b3490-db25-483b-af49-43a5db3530ec-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Nov 27 09:00:18 crc kubenswrapper[4971]: I1127 09:00:18.411488 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dzsr9\" (UniqueName: \"kubernetes.io/projected/440b3490-db25-483b-af49-43a5db3530ec-kube-api-access-dzsr9\") on node \"crc\" DevicePath \"\"" Nov 27 09:00:18 crc kubenswrapper[4971]: E1127 09:00:18.824570 4971 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod440b3490_db25_483b_af49_43a5db3530ec.slice\": RecentStats: unable to find data in memory cache]" Nov 27 09:00:18 crc kubenswrapper[4971]: I1127 09:00:18.903861 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7f8c46cf7c-qfvm8" event={"ID":"440b3490-db25-483b-af49-43a5db3530ec","Type":"ContainerDied","Data":"feebe1cad6aa7ceaa0ff83bcc3f5cc86f289be27a8e2233f1e40c17c6489085d"} Nov 27 09:00:18 crc kubenswrapper[4971]: I1127 09:00:18.903909 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7f8c46cf7c-qfvm8" Nov 27 09:00:18 crc kubenswrapper[4971]: I1127 09:00:18.903951 4971 scope.go:117] "RemoveContainer" containerID="47503c28fed2774d18f524614f9f31f9c58db58a509057b8aba5ab54bb60aefd" Nov 27 09:00:18 crc kubenswrapper[4971]: I1127 09:00:18.949022 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7f8c46cf7c-qfvm8"] Nov 27 09:00:18 crc kubenswrapper[4971]: I1127 09:00:18.959017 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-7f8c46cf7c-qfvm8"] Nov 27 09:00:19 crc kubenswrapper[4971]: I1127 09:00:19.309838 4971 scope.go:117] "RemoveContainer" containerID="95c5457291cada2eb4c410ff6b81ff9b19eac730b04348ee2a1e7e60c4aef082" Nov 27 09:00:20 crc kubenswrapper[4971]: I1127 09:00:20.563160 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="440b3490-db25-483b-af49-43a5db3530ec" path="/var/lib/kubelet/pods/440b3490-db25-483b-af49-43a5db3530ec/volumes" Nov 27 09:00:20 crc kubenswrapper[4971]: I1127 09:00:20.938020 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-558f5f68c6-gwcv5" event={"ID":"b974245a-b10d-460c-a512-783707368cfb","Type":"ContainerStarted","Data":"7b0ed4683480b81a76474e9f65330a1fe2b6898e76e6dba1d87198e98965cc59"} Nov 27 09:00:20 crc kubenswrapper[4971]: I1127 09:00:20.938208 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-cfnapi-558f5f68c6-gwcv5" Nov 27 09:00:20 crc kubenswrapper[4971]: I1127 09:00:20.942164 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-7bc5c74f54-7k5pr" event={"ID":"1d195723-5d1e-4cad-9893-627476bcf980","Type":"ContainerStarted","Data":"2e7c5aaf18ade0ddf874d3d5bfcf6c8977d20992a15895c993458efda15361ee"} Nov 27 09:00:20 crc kubenswrapper[4971]: I1127 09:00:20.942244 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-api-7bc5c74f54-7k5pr" Nov 27 09:00:20 crc kubenswrapper[4971]: I1127 09:00:20.992428 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-api-7bc5c74f54-7k5pr" podStartSLOduration=3.249057179 podStartE2EDuration="5.992402296s" podCreationTimestamp="2025-11-27 09:00:15 +0000 UTC" firstStartedPulling="2025-11-27 09:00:17.397221255 +0000 UTC m=+7655.589265173" lastFinishedPulling="2025-11-27 09:00:20.140566372 +0000 UTC m=+7658.332610290" observedRunningTime="2025-11-27 09:00:20.987975759 +0000 UTC m=+7659.180019677" watchObservedRunningTime="2025-11-27 09:00:20.992402296 +0000 UTC m=+7659.184446214" Nov 27 09:00:21 crc kubenswrapper[4971]: I1127 09:00:21.000511 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-cfnapi-558f5f68c6-gwcv5" podStartSLOduration=3.023196264 podStartE2EDuration="6.000479047s" podCreationTimestamp="2025-11-27 09:00:15 +0000 UTC" firstStartedPulling="2025-11-27 09:00:17.168719475 +0000 UTC m=+7655.360763393" lastFinishedPulling="2025-11-27 09:00:20.146002258 +0000 UTC m=+7658.338046176" observedRunningTime="2025-11-27 09:00:20.958498094 +0000 UTC m=+7659.150542022" watchObservedRunningTime="2025-11-27 09:00:21.000479047 +0000 UTC m=+7659.192522965" Nov 27 09:00:28 crc kubenswrapper[4971]: I1127 09:00:28.003838 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-cfnapi-558f5f68c6-gwcv5" Nov 27 09:00:28 crc kubenswrapper[4971]: I1127 09:00:28.182657 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-fd46b4db5-sp4l9" Nov 27 09:00:28 crc kubenswrapper[4971]: I1127 09:00:28.288460 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-api-7bc5c74f54-7k5pr" Nov 27 09:00:30 crc kubenswrapper[4971]: I1127 09:00:30.239708 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-fd46b4db5-sp4l9" Nov 27 09:00:30 crc kubenswrapper[4971]: I1127 09:00:30.387119 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5dbbdfc9bf-94jbx"] Nov 27 09:00:30 crc kubenswrapper[4971]: I1127 09:00:30.387410 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-5dbbdfc9bf-94jbx" podUID="d3243de9-39c3-4511-9e89-131659e2179a" containerName="horizon-log" containerID="cri-o://0e05a2f0c75fe999e426ce0eb0775cfc00e011e3ef4ed7fe1873919354d956aa" gracePeriod=30 Nov 27 09:00:30 crc kubenswrapper[4971]: I1127 09:00:30.387584 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-5dbbdfc9bf-94jbx" podUID="d3243de9-39c3-4511-9e89-131659e2179a" containerName="horizon" containerID="cri-o://9074e2006c1a346a0d34c275b536f69c2b2351cf01413463b6854cf83232ce6f" gracePeriod=30 Nov 27 09:00:33 crc kubenswrapper[4971]: I1127 09:00:33.719898 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5dbbdfc9bf-94jbx" podUID="d3243de9-39c3-4511-9e89-131659e2179a" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.109:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.109:8080: connect: connection refused" Nov 27 09:00:34 crc kubenswrapper[4971]: I1127 09:00:34.072255 4971 generic.go:334] "Generic (PLEG): container finished" podID="d3243de9-39c3-4511-9e89-131659e2179a" containerID="9074e2006c1a346a0d34c275b536f69c2b2351cf01413463b6854cf83232ce6f" exitCode=0 Nov 27 09:00:34 crc kubenswrapper[4971]: I1127 09:00:34.072314 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5dbbdfc9bf-94jbx" event={"ID":"d3243de9-39c3-4511-9e89-131659e2179a","Type":"ContainerDied","Data":"9074e2006c1a346a0d34c275b536f69c2b2351cf01413463b6854cf83232ce6f"} Nov 27 09:00:36 crc kubenswrapper[4971]: I1127 09:00:36.272453 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-engine-59c8bcfc86-ndvzb" Nov 27 09:00:43 crc kubenswrapper[4971]: I1127 09:00:43.720950 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5dbbdfc9bf-94jbx" podUID="d3243de9-39c3-4511-9e89-131659e2179a" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.109:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.109:8080: connect: connection refused" Nov 27 09:00:53 crc kubenswrapper[4971]: I1127 09:00:53.043199 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-jszcg"] Nov 27 09:00:53 crc kubenswrapper[4971]: I1127 09:00:53.084724 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-dc82-account-create-update-9kklc"] Nov 27 09:00:53 crc kubenswrapper[4971]: I1127 09:00:53.094436 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-jszcg"] Nov 27 09:00:53 crc kubenswrapper[4971]: I1127 09:00:53.106427 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-dc82-account-create-update-9kklc"] Nov 27 09:00:53 crc kubenswrapper[4971]: I1127 09:00:53.115878 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105tpqp"] Nov 27 09:00:53 crc kubenswrapper[4971]: E1127 09:00:53.116456 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="440b3490-db25-483b-af49-43a5db3530ec" containerName="horizon" Nov 27 09:00:53 crc kubenswrapper[4971]: I1127 09:00:53.116475 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="440b3490-db25-483b-af49-43a5db3530ec" containerName="horizon" Nov 27 09:00:53 crc kubenswrapper[4971]: E1127 09:00:53.116504 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="440b3490-db25-483b-af49-43a5db3530ec" containerName="horizon-log" Nov 27 09:00:53 crc kubenswrapper[4971]: I1127 09:00:53.116513 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="440b3490-db25-483b-af49-43a5db3530ec" containerName="horizon-log" Nov 27 09:00:53 crc kubenswrapper[4971]: I1127 09:00:53.116742 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="440b3490-db25-483b-af49-43a5db3530ec" containerName="horizon-log" Nov 27 09:00:53 crc kubenswrapper[4971]: I1127 09:00:53.116782 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="440b3490-db25-483b-af49-43a5db3530ec" containerName="horizon" Nov 27 09:00:53 crc kubenswrapper[4971]: I1127 09:00:53.118385 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105tpqp" Nov 27 09:00:53 crc kubenswrapper[4971]: I1127 09:00:53.122522 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Nov 27 09:00:53 crc kubenswrapper[4971]: I1127 09:00:53.126286 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105tpqp"] Nov 27 09:00:53 crc kubenswrapper[4971]: I1127 09:00:53.271808 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ada68182-ff97-4a6f-83a5-10dba9af7324-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105tpqp\" (UID: \"ada68182-ff97-4a6f-83a5-10dba9af7324\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105tpqp" Nov 27 09:00:53 crc kubenswrapper[4971]: I1127 09:00:53.271929 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hvzqp\" (UniqueName: \"kubernetes.io/projected/ada68182-ff97-4a6f-83a5-10dba9af7324-kube-api-access-hvzqp\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105tpqp\" (UID: \"ada68182-ff97-4a6f-83a5-10dba9af7324\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105tpqp" Nov 27 09:00:53 crc kubenswrapper[4971]: I1127 09:00:53.271952 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ada68182-ff97-4a6f-83a5-10dba9af7324-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105tpqp\" (UID: \"ada68182-ff97-4a6f-83a5-10dba9af7324\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105tpqp" Nov 27 09:00:53 crc kubenswrapper[4971]: I1127 09:00:53.374254 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ada68182-ff97-4a6f-83a5-10dba9af7324-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105tpqp\" (UID: \"ada68182-ff97-4a6f-83a5-10dba9af7324\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105tpqp" Nov 27 09:00:53 crc kubenswrapper[4971]: I1127 09:00:53.374338 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ada68182-ff97-4a6f-83a5-10dba9af7324-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105tpqp\" (UID: \"ada68182-ff97-4a6f-83a5-10dba9af7324\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105tpqp" Nov 27 09:00:53 crc kubenswrapper[4971]: I1127 09:00:53.374374 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hvzqp\" (UniqueName: \"kubernetes.io/projected/ada68182-ff97-4a6f-83a5-10dba9af7324-kube-api-access-hvzqp\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105tpqp\" (UID: \"ada68182-ff97-4a6f-83a5-10dba9af7324\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105tpqp" Nov 27 09:00:53 crc kubenswrapper[4971]: I1127 09:00:53.374888 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ada68182-ff97-4a6f-83a5-10dba9af7324-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105tpqp\" (UID: \"ada68182-ff97-4a6f-83a5-10dba9af7324\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105tpqp" Nov 27 09:00:53 crc kubenswrapper[4971]: I1127 09:00:53.374962 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ada68182-ff97-4a6f-83a5-10dba9af7324-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105tpqp\" (UID: \"ada68182-ff97-4a6f-83a5-10dba9af7324\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105tpqp" Nov 27 09:00:53 crc kubenswrapper[4971]: I1127 09:00:53.396454 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hvzqp\" (UniqueName: \"kubernetes.io/projected/ada68182-ff97-4a6f-83a5-10dba9af7324-kube-api-access-hvzqp\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105tpqp\" (UID: \"ada68182-ff97-4a6f-83a5-10dba9af7324\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105tpqp" Nov 27 09:00:53 crc kubenswrapper[4971]: I1127 09:00:53.458862 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105tpqp" Nov 27 09:00:53 crc kubenswrapper[4971]: I1127 09:00:53.719982 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5dbbdfc9bf-94jbx" podUID="d3243de9-39c3-4511-9e89-131659e2179a" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.109:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.109:8080: connect: connection refused" Nov 27 09:00:53 crc kubenswrapper[4971]: I1127 09:00:53.720506 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5dbbdfc9bf-94jbx" Nov 27 09:00:54 crc kubenswrapper[4971]: I1127 09:00:54.042372 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105tpqp"] Nov 27 09:00:54 crc kubenswrapper[4971]: I1127 09:00:54.304290 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105tpqp" event={"ID":"ada68182-ff97-4a6f-83a5-10dba9af7324","Type":"ContainerStarted","Data":"3a59fcea13f1fd3e31e4372f46f53a8e71ebfb601c7e8b012ca10644d120c30c"} Nov 27 09:00:54 crc kubenswrapper[4971]: I1127 09:00:54.304758 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105tpqp" event={"ID":"ada68182-ff97-4a6f-83a5-10dba9af7324","Type":"ContainerStarted","Data":"a00c865ff3f51fa8917fb95dc82571134d40f47dfac254234b836433e4e1bc5b"} Nov 27 09:00:54 crc kubenswrapper[4971]: I1127 09:00:54.564146 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5edbdbbc-7378-431c-aa48-e3f1c19ef1af" path="/var/lib/kubelet/pods/5edbdbbc-7378-431c-aa48-e3f1c19ef1af/volumes" Nov 27 09:00:54 crc kubenswrapper[4971]: I1127 09:00:54.565101 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e966785d-966d-479c-8bff-9ed9214a5162" path="/var/lib/kubelet/pods/e966785d-966d-479c-8bff-9ed9214a5162/volumes" Nov 27 09:00:54 crc kubenswrapper[4971]: I1127 09:00:54.760282 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5m5cg"] Nov 27 09:00:54 crc kubenswrapper[4971]: I1127 09:00:54.762905 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5m5cg" Nov 27 09:00:54 crc kubenswrapper[4971]: I1127 09:00:54.773671 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5m5cg"] Nov 27 09:00:54 crc kubenswrapper[4971]: I1127 09:00:54.783186 4971 scope.go:117] "RemoveContainer" containerID="43de497582f93c27b22667ff78779b7acd0e3dee68a6f43624fe5880dc2e2a1c" Nov 27 09:00:54 crc kubenswrapper[4971]: I1127 09:00:54.827079 4971 scope.go:117] "RemoveContainer" containerID="6532fa198ed3bcaa1fa758d5c5adda55eeaf9f943f209671bdb0ae910d9af30b" Nov 27 09:00:54 crc kubenswrapper[4971]: I1127 09:00:54.880603 4971 scope.go:117] "RemoveContainer" containerID="dc4a5a7ba4e0734b4a4819b8af4682864054b3b22e1bea26f0e720371fa4ff31" Nov 27 09:00:54 crc kubenswrapper[4971]: I1127 09:00:54.908809 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9mwq4\" (UniqueName: \"kubernetes.io/projected/4196dda6-b233-434f-bc33-61f0506fb55b-kube-api-access-9mwq4\") pod \"redhat-operators-5m5cg\" (UID: \"4196dda6-b233-434f-bc33-61f0506fb55b\") " pod="openshift-marketplace/redhat-operators-5m5cg" Nov 27 09:00:54 crc kubenswrapper[4971]: I1127 09:00:54.909041 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4196dda6-b233-434f-bc33-61f0506fb55b-utilities\") pod \"redhat-operators-5m5cg\" (UID: \"4196dda6-b233-434f-bc33-61f0506fb55b\") " pod="openshift-marketplace/redhat-operators-5m5cg" Nov 27 09:00:54 crc kubenswrapper[4971]: I1127 09:00:54.909206 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4196dda6-b233-434f-bc33-61f0506fb55b-catalog-content\") pod \"redhat-operators-5m5cg\" (UID: \"4196dda6-b233-434f-bc33-61f0506fb55b\") " pod="openshift-marketplace/redhat-operators-5m5cg" Nov 27 09:00:54 crc kubenswrapper[4971]: I1127 09:00:54.945900 4971 scope.go:117] "RemoveContainer" containerID="9da6c734226223aace17cf0a409c888b16889ef45e1f5ca74431516f4e0282f4" Nov 27 09:00:54 crc kubenswrapper[4971]: I1127 09:00:54.976384 4971 scope.go:117] "RemoveContainer" containerID="a399d2c0445ad8d19248882370913509dc9c9e9bc68625f0cee5248cd88d4039" Nov 27 09:00:55 crc kubenswrapper[4971]: I1127 09:00:55.012643 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4196dda6-b233-434f-bc33-61f0506fb55b-utilities\") pod \"redhat-operators-5m5cg\" (UID: \"4196dda6-b233-434f-bc33-61f0506fb55b\") " pod="openshift-marketplace/redhat-operators-5m5cg" Nov 27 09:00:55 crc kubenswrapper[4971]: I1127 09:00:55.012804 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4196dda6-b233-434f-bc33-61f0506fb55b-catalog-content\") pod \"redhat-operators-5m5cg\" (UID: \"4196dda6-b233-434f-bc33-61f0506fb55b\") " pod="openshift-marketplace/redhat-operators-5m5cg" Nov 27 09:00:55 crc kubenswrapper[4971]: I1127 09:00:55.012994 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9mwq4\" (UniqueName: \"kubernetes.io/projected/4196dda6-b233-434f-bc33-61f0506fb55b-kube-api-access-9mwq4\") pod \"redhat-operators-5m5cg\" (UID: \"4196dda6-b233-434f-bc33-61f0506fb55b\") " pod="openshift-marketplace/redhat-operators-5m5cg" Nov 27 09:00:55 crc kubenswrapper[4971]: I1127 09:00:55.013320 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4196dda6-b233-434f-bc33-61f0506fb55b-utilities\") pod \"redhat-operators-5m5cg\" (UID: \"4196dda6-b233-434f-bc33-61f0506fb55b\") " pod="openshift-marketplace/redhat-operators-5m5cg" Nov 27 09:00:55 crc kubenswrapper[4971]: I1127 09:00:55.013780 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4196dda6-b233-434f-bc33-61f0506fb55b-catalog-content\") pod \"redhat-operators-5m5cg\" (UID: \"4196dda6-b233-434f-bc33-61f0506fb55b\") " pod="openshift-marketplace/redhat-operators-5m5cg" Nov 27 09:00:55 crc kubenswrapper[4971]: I1127 09:00:55.043125 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9mwq4\" (UniqueName: \"kubernetes.io/projected/4196dda6-b233-434f-bc33-61f0506fb55b-kube-api-access-9mwq4\") pod \"redhat-operators-5m5cg\" (UID: \"4196dda6-b233-434f-bc33-61f0506fb55b\") " pod="openshift-marketplace/redhat-operators-5m5cg" Nov 27 09:00:55 crc kubenswrapper[4971]: I1127 09:00:55.094744 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5m5cg" Nov 27 09:00:55 crc kubenswrapper[4971]: I1127 09:00:55.327071 4971 generic.go:334] "Generic (PLEG): container finished" podID="ada68182-ff97-4a6f-83a5-10dba9af7324" containerID="3a59fcea13f1fd3e31e4372f46f53a8e71ebfb601c7e8b012ca10644d120c30c" exitCode=0 Nov 27 09:00:55 crc kubenswrapper[4971]: I1127 09:00:55.327157 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105tpqp" event={"ID":"ada68182-ff97-4a6f-83a5-10dba9af7324","Type":"ContainerDied","Data":"3a59fcea13f1fd3e31e4372f46f53a8e71ebfb601c7e8b012ca10644d120c30c"} Nov 27 09:00:55 crc kubenswrapper[4971]: I1127 09:00:55.666409 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5m5cg"] Nov 27 09:00:56 crc kubenswrapper[4971]: I1127 09:00:56.339843 4971 generic.go:334] "Generic (PLEG): container finished" podID="4196dda6-b233-434f-bc33-61f0506fb55b" containerID="931beb9b5904eabdaf82774082e7ec5a66076177c9e5d6d0b67ea3f986c6ddba" exitCode=0 Nov 27 09:00:56 crc kubenswrapper[4971]: I1127 09:00:56.340282 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5m5cg" event={"ID":"4196dda6-b233-434f-bc33-61f0506fb55b","Type":"ContainerDied","Data":"931beb9b5904eabdaf82774082e7ec5a66076177c9e5d6d0b67ea3f986c6ddba"} Nov 27 09:00:56 crc kubenswrapper[4971]: I1127 09:00:56.340315 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5m5cg" event={"ID":"4196dda6-b233-434f-bc33-61f0506fb55b","Type":"ContainerStarted","Data":"09517be3c15a20c50793e86ea7a1c487ba6160a9949d0493df62f84da1dad13f"} Nov 27 09:00:56 crc kubenswrapper[4971]: I1127 09:00:56.414200 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 09:00:56 crc kubenswrapper[4971]: I1127 09:00:56.414280 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 09:01:00 crc kubenswrapper[4971]: I1127 09:01:00.163641 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29403901-xc2hf"] Nov 27 09:01:00 crc kubenswrapper[4971]: I1127 09:01:00.166035 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29403901-xc2hf" Nov 27 09:01:00 crc kubenswrapper[4971]: I1127 09:01:00.189798 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29403901-xc2hf"] Nov 27 09:01:00 crc kubenswrapper[4971]: I1127 09:01:00.247456 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mnvqh\" (UniqueName: \"kubernetes.io/projected/3a9eaaae-c63a-4924-8833-138a78aacd29-kube-api-access-mnvqh\") pod \"keystone-cron-29403901-xc2hf\" (UID: \"3a9eaaae-c63a-4924-8833-138a78aacd29\") " pod="openstack/keystone-cron-29403901-xc2hf" Nov 27 09:01:00 crc kubenswrapper[4971]: I1127 09:01:00.247610 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a9eaaae-c63a-4924-8833-138a78aacd29-combined-ca-bundle\") pod \"keystone-cron-29403901-xc2hf\" (UID: \"3a9eaaae-c63a-4924-8833-138a78aacd29\") " pod="openstack/keystone-cron-29403901-xc2hf" Nov 27 09:01:00 crc kubenswrapper[4971]: I1127 09:01:00.247676 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a9eaaae-c63a-4924-8833-138a78aacd29-config-data\") pod \"keystone-cron-29403901-xc2hf\" (UID: \"3a9eaaae-c63a-4924-8833-138a78aacd29\") " pod="openstack/keystone-cron-29403901-xc2hf" Nov 27 09:01:00 crc kubenswrapper[4971]: I1127 09:01:00.247735 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3a9eaaae-c63a-4924-8833-138a78aacd29-fernet-keys\") pod \"keystone-cron-29403901-xc2hf\" (UID: \"3a9eaaae-c63a-4924-8833-138a78aacd29\") " pod="openstack/keystone-cron-29403901-xc2hf" Nov 27 09:01:00 crc kubenswrapper[4971]: I1127 09:01:00.350155 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a9eaaae-c63a-4924-8833-138a78aacd29-config-data\") pod \"keystone-cron-29403901-xc2hf\" (UID: \"3a9eaaae-c63a-4924-8833-138a78aacd29\") " pod="openstack/keystone-cron-29403901-xc2hf" Nov 27 09:01:00 crc kubenswrapper[4971]: I1127 09:01:00.350222 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3a9eaaae-c63a-4924-8833-138a78aacd29-fernet-keys\") pod \"keystone-cron-29403901-xc2hf\" (UID: \"3a9eaaae-c63a-4924-8833-138a78aacd29\") " pod="openstack/keystone-cron-29403901-xc2hf" Nov 27 09:01:00 crc kubenswrapper[4971]: I1127 09:01:00.350321 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mnvqh\" (UniqueName: \"kubernetes.io/projected/3a9eaaae-c63a-4924-8833-138a78aacd29-kube-api-access-mnvqh\") pod \"keystone-cron-29403901-xc2hf\" (UID: \"3a9eaaae-c63a-4924-8833-138a78aacd29\") " pod="openstack/keystone-cron-29403901-xc2hf" Nov 27 09:01:00 crc kubenswrapper[4971]: I1127 09:01:00.350368 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a9eaaae-c63a-4924-8833-138a78aacd29-combined-ca-bundle\") pod \"keystone-cron-29403901-xc2hf\" (UID: \"3a9eaaae-c63a-4924-8833-138a78aacd29\") " pod="openstack/keystone-cron-29403901-xc2hf" Nov 27 09:01:00 crc kubenswrapper[4971]: I1127 09:01:00.358406 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3a9eaaae-c63a-4924-8833-138a78aacd29-fernet-keys\") pod \"keystone-cron-29403901-xc2hf\" (UID: \"3a9eaaae-c63a-4924-8833-138a78aacd29\") " pod="openstack/keystone-cron-29403901-xc2hf" Nov 27 09:01:00 crc kubenswrapper[4971]: I1127 09:01:00.358482 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a9eaaae-c63a-4924-8833-138a78aacd29-config-data\") pod \"keystone-cron-29403901-xc2hf\" (UID: \"3a9eaaae-c63a-4924-8833-138a78aacd29\") " pod="openstack/keystone-cron-29403901-xc2hf" Nov 27 09:01:00 crc kubenswrapper[4971]: I1127 09:01:00.360307 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a9eaaae-c63a-4924-8833-138a78aacd29-combined-ca-bundle\") pod \"keystone-cron-29403901-xc2hf\" (UID: \"3a9eaaae-c63a-4924-8833-138a78aacd29\") " pod="openstack/keystone-cron-29403901-xc2hf" Nov 27 09:01:00 crc kubenswrapper[4971]: I1127 09:01:00.369313 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mnvqh\" (UniqueName: \"kubernetes.io/projected/3a9eaaae-c63a-4924-8833-138a78aacd29-kube-api-access-mnvqh\") pod \"keystone-cron-29403901-xc2hf\" (UID: \"3a9eaaae-c63a-4924-8833-138a78aacd29\") " pod="openstack/keystone-cron-29403901-xc2hf" Nov 27 09:01:00 crc kubenswrapper[4971]: I1127 09:01:00.493378 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29403901-xc2hf" Nov 27 09:01:00 crc kubenswrapper[4971]: I1127 09:01:00.919930 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5dbbdfc9bf-94jbx" Nov 27 09:01:01 crc kubenswrapper[4971]: I1127 09:01:01.067754 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29403901-xc2hf"] Nov 27 09:01:01 crc kubenswrapper[4971]: I1127 09:01:01.068619 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d3243de9-39c3-4511-9e89-131659e2179a-scripts\") pod \"d3243de9-39c3-4511-9e89-131659e2179a\" (UID: \"d3243de9-39c3-4511-9e89-131659e2179a\") " Nov 27 09:01:01 crc kubenswrapper[4971]: I1127 09:01:01.068682 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qqf8b\" (UniqueName: \"kubernetes.io/projected/d3243de9-39c3-4511-9e89-131659e2179a-kube-api-access-qqf8b\") pod \"d3243de9-39c3-4511-9e89-131659e2179a\" (UID: \"d3243de9-39c3-4511-9e89-131659e2179a\") " Nov 27 09:01:01 crc kubenswrapper[4971]: I1127 09:01:01.068820 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d3243de9-39c3-4511-9e89-131659e2179a-logs\") pod \"d3243de9-39c3-4511-9e89-131659e2179a\" (UID: \"d3243de9-39c3-4511-9e89-131659e2179a\") " Nov 27 09:01:01 crc kubenswrapper[4971]: I1127 09:01:01.068985 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d3243de9-39c3-4511-9e89-131659e2179a-horizon-secret-key\") pod \"d3243de9-39c3-4511-9e89-131659e2179a\" (UID: \"d3243de9-39c3-4511-9e89-131659e2179a\") " Nov 27 09:01:01 crc kubenswrapper[4971]: I1127 09:01:01.069029 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d3243de9-39c3-4511-9e89-131659e2179a-config-data\") pod \"d3243de9-39c3-4511-9e89-131659e2179a\" (UID: \"d3243de9-39c3-4511-9e89-131659e2179a\") " Nov 27 09:01:01 crc kubenswrapper[4971]: I1127 09:01:01.070173 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d3243de9-39c3-4511-9e89-131659e2179a-logs" (OuterVolumeSpecName: "logs") pod "d3243de9-39c3-4511-9e89-131659e2179a" (UID: "d3243de9-39c3-4511-9e89-131659e2179a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:01:01 crc kubenswrapper[4971]: I1127 09:01:01.077104 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3243de9-39c3-4511-9e89-131659e2179a-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "d3243de9-39c3-4511-9e89-131659e2179a" (UID: "d3243de9-39c3-4511-9e89-131659e2179a"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:01:01 crc kubenswrapper[4971]: I1127 09:01:01.077639 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3243de9-39c3-4511-9e89-131659e2179a-kube-api-access-qqf8b" (OuterVolumeSpecName: "kube-api-access-qqf8b") pod "d3243de9-39c3-4511-9e89-131659e2179a" (UID: "d3243de9-39c3-4511-9e89-131659e2179a"). InnerVolumeSpecName "kube-api-access-qqf8b". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:01:01 crc kubenswrapper[4971]: I1127 09:01:01.118353 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3243de9-39c3-4511-9e89-131659e2179a-scripts" (OuterVolumeSpecName: "scripts") pod "d3243de9-39c3-4511-9e89-131659e2179a" (UID: "d3243de9-39c3-4511-9e89-131659e2179a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:01:01 crc kubenswrapper[4971]: I1127 09:01:01.126999 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3243de9-39c3-4511-9e89-131659e2179a-config-data" (OuterVolumeSpecName: "config-data") pod "d3243de9-39c3-4511-9e89-131659e2179a" (UID: "d3243de9-39c3-4511-9e89-131659e2179a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:01:01 crc kubenswrapper[4971]: I1127 09:01:01.172832 4971 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d3243de9-39c3-4511-9e89-131659e2179a-logs\") on node \"crc\" DevicePath \"\"" Nov 27 09:01:01 crc kubenswrapper[4971]: I1127 09:01:01.172873 4971 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d3243de9-39c3-4511-9e89-131659e2179a-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Nov 27 09:01:01 crc kubenswrapper[4971]: I1127 09:01:01.172886 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d3243de9-39c3-4511-9e89-131659e2179a-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 09:01:01 crc kubenswrapper[4971]: I1127 09:01:01.172898 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d3243de9-39c3-4511-9e89-131659e2179a-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 09:01:01 crc kubenswrapper[4971]: I1127 09:01:01.172912 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qqf8b\" (UniqueName: \"kubernetes.io/projected/d3243de9-39c3-4511-9e89-131659e2179a-kube-api-access-qqf8b\") on node \"crc\" DevicePath \"\"" Nov 27 09:01:01 crc kubenswrapper[4971]: I1127 09:01:01.395027 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5m5cg" event={"ID":"4196dda6-b233-434f-bc33-61f0506fb55b","Type":"ContainerStarted","Data":"1c757d026188f0673cce1b9e6ae9004292ea77c85be8a956ff6268e29492acb2"} Nov 27 09:01:01 crc kubenswrapper[4971]: I1127 09:01:01.397728 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29403901-xc2hf" event={"ID":"3a9eaaae-c63a-4924-8833-138a78aacd29","Type":"ContainerStarted","Data":"4729aa13bfcbb51351b914061f97dc3b76270542ec4d20828e3dbae6bbfc6d50"} Nov 27 09:01:01 crc kubenswrapper[4971]: I1127 09:01:01.397764 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29403901-xc2hf" event={"ID":"3a9eaaae-c63a-4924-8833-138a78aacd29","Type":"ContainerStarted","Data":"710d005e23c0bd47aac6f86c72481255f4d7e9b63f066e80321d4b86481376ff"} Nov 27 09:01:01 crc kubenswrapper[4971]: I1127 09:01:01.400034 4971 generic.go:334] "Generic (PLEG): container finished" podID="d3243de9-39c3-4511-9e89-131659e2179a" containerID="0e05a2f0c75fe999e426ce0eb0775cfc00e011e3ef4ed7fe1873919354d956aa" exitCode=137 Nov 27 09:01:01 crc kubenswrapper[4971]: I1127 09:01:01.400065 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5dbbdfc9bf-94jbx" Nov 27 09:01:01 crc kubenswrapper[4971]: I1127 09:01:01.400080 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5dbbdfc9bf-94jbx" event={"ID":"d3243de9-39c3-4511-9e89-131659e2179a","Type":"ContainerDied","Data":"0e05a2f0c75fe999e426ce0eb0775cfc00e011e3ef4ed7fe1873919354d956aa"} Nov 27 09:01:01 crc kubenswrapper[4971]: I1127 09:01:01.400464 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5dbbdfc9bf-94jbx" event={"ID":"d3243de9-39c3-4511-9e89-131659e2179a","Type":"ContainerDied","Data":"ce02a92d5d0f116031f97f0b6cb98f532280a725ac29cbf6b4697e49498198f1"} Nov 27 09:01:01 crc kubenswrapper[4971]: I1127 09:01:01.400495 4971 scope.go:117] "RemoveContainer" containerID="9074e2006c1a346a0d34c275b536f69c2b2351cf01413463b6854cf83232ce6f" Nov 27 09:01:01 crc kubenswrapper[4971]: I1127 09:01:01.409772 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105tpqp" event={"ID":"ada68182-ff97-4a6f-83a5-10dba9af7324","Type":"ContainerStarted","Data":"aca78d491f5f1d03a76dae989333f3c2893019287e717e249378b9b153c8341c"} Nov 27 09:01:01 crc kubenswrapper[4971]: I1127 09:01:01.471224 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29403901-xc2hf" podStartSLOduration=1.471195896 podStartE2EDuration="1.471195896s" podCreationTimestamp="2025-11-27 09:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 09:01:01.446029985 +0000 UTC m=+7699.638073933" watchObservedRunningTime="2025-11-27 09:01:01.471195896 +0000 UTC m=+7699.663239814" Nov 27 09:01:01 crc kubenswrapper[4971]: I1127 09:01:01.511459 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5dbbdfc9bf-94jbx"] Nov 27 09:01:01 crc kubenswrapper[4971]: I1127 09:01:01.522428 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-5dbbdfc9bf-94jbx"] Nov 27 09:01:01 crc kubenswrapper[4971]: I1127 09:01:01.582853 4971 scope.go:117] "RemoveContainer" containerID="0e05a2f0c75fe999e426ce0eb0775cfc00e011e3ef4ed7fe1873919354d956aa" Nov 27 09:01:01 crc kubenswrapper[4971]: I1127 09:01:01.619812 4971 scope.go:117] "RemoveContainer" containerID="9074e2006c1a346a0d34c275b536f69c2b2351cf01413463b6854cf83232ce6f" Nov 27 09:01:01 crc kubenswrapper[4971]: E1127 09:01:01.620658 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9074e2006c1a346a0d34c275b536f69c2b2351cf01413463b6854cf83232ce6f\": container with ID starting with 9074e2006c1a346a0d34c275b536f69c2b2351cf01413463b6854cf83232ce6f not found: ID does not exist" containerID="9074e2006c1a346a0d34c275b536f69c2b2351cf01413463b6854cf83232ce6f" Nov 27 09:01:01 crc kubenswrapper[4971]: I1127 09:01:01.620705 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9074e2006c1a346a0d34c275b536f69c2b2351cf01413463b6854cf83232ce6f"} err="failed to get container status \"9074e2006c1a346a0d34c275b536f69c2b2351cf01413463b6854cf83232ce6f\": rpc error: code = NotFound desc = could not find container \"9074e2006c1a346a0d34c275b536f69c2b2351cf01413463b6854cf83232ce6f\": container with ID starting with 9074e2006c1a346a0d34c275b536f69c2b2351cf01413463b6854cf83232ce6f not found: ID does not exist" Nov 27 09:01:01 crc kubenswrapper[4971]: I1127 09:01:01.620733 4971 scope.go:117] "RemoveContainer" containerID="0e05a2f0c75fe999e426ce0eb0775cfc00e011e3ef4ed7fe1873919354d956aa" Nov 27 09:01:01 crc kubenswrapper[4971]: E1127 09:01:01.621426 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e05a2f0c75fe999e426ce0eb0775cfc00e011e3ef4ed7fe1873919354d956aa\": container with ID starting with 0e05a2f0c75fe999e426ce0eb0775cfc00e011e3ef4ed7fe1873919354d956aa not found: ID does not exist" containerID="0e05a2f0c75fe999e426ce0eb0775cfc00e011e3ef4ed7fe1873919354d956aa" Nov 27 09:01:01 crc kubenswrapper[4971]: I1127 09:01:01.621462 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e05a2f0c75fe999e426ce0eb0775cfc00e011e3ef4ed7fe1873919354d956aa"} err="failed to get container status \"0e05a2f0c75fe999e426ce0eb0775cfc00e011e3ef4ed7fe1873919354d956aa\": rpc error: code = NotFound desc = could not find container \"0e05a2f0c75fe999e426ce0eb0775cfc00e011e3ef4ed7fe1873919354d956aa\": container with ID starting with 0e05a2f0c75fe999e426ce0eb0775cfc00e011e3ef4ed7fe1873919354d956aa not found: ID does not exist" Nov 27 09:01:02 crc kubenswrapper[4971]: I1127 09:01:02.421453 4971 generic.go:334] "Generic (PLEG): container finished" podID="4196dda6-b233-434f-bc33-61f0506fb55b" containerID="1c757d026188f0673cce1b9e6ae9004292ea77c85be8a956ff6268e29492acb2" exitCode=0 Nov 27 09:01:02 crc kubenswrapper[4971]: I1127 09:01:02.421551 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5m5cg" event={"ID":"4196dda6-b233-434f-bc33-61f0506fb55b","Type":"ContainerDied","Data":"1c757d026188f0673cce1b9e6ae9004292ea77c85be8a956ff6268e29492acb2"} Nov 27 09:01:02 crc kubenswrapper[4971]: I1127 09:01:02.564088 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3243de9-39c3-4511-9e89-131659e2179a" path="/var/lib/kubelet/pods/d3243de9-39c3-4511-9e89-131659e2179a/volumes" Nov 27 09:01:03 crc kubenswrapper[4971]: I1127 09:01:03.436671 4971 generic.go:334] "Generic (PLEG): container finished" podID="ada68182-ff97-4a6f-83a5-10dba9af7324" containerID="aca78d491f5f1d03a76dae989333f3c2893019287e717e249378b9b153c8341c" exitCode=0 Nov 27 09:01:03 crc kubenswrapper[4971]: I1127 09:01:03.436722 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105tpqp" event={"ID":"ada68182-ff97-4a6f-83a5-10dba9af7324","Type":"ContainerDied","Data":"aca78d491f5f1d03a76dae989333f3c2893019287e717e249378b9b153c8341c"} Nov 27 09:01:04 crc kubenswrapper[4971]: I1127 09:01:04.450431 4971 generic.go:334] "Generic (PLEG): container finished" podID="ada68182-ff97-4a6f-83a5-10dba9af7324" containerID="12f5e0c93729e78153d9ea18b9182fc3cb232231ed61af0959a84e64bb738df4" exitCode=0 Nov 27 09:01:04 crc kubenswrapper[4971]: I1127 09:01:04.450509 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105tpqp" event={"ID":"ada68182-ff97-4a6f-83a5-10dba9af7324","Type":"ContainerDied","Data":"12f5e0c93729e78153d9ea18b9182fc3cb232231ed61af0959a84e64bb738df4"} Nov 27 09:01:04 crc kubenswrapper[4971]: I1127 09:01:04.454236 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5m5cg" event={"ID":"4196dda6-b233-434f-bc33-61f0506fb55b","Type":"ContainerStarted","Data":"758d50d493fdf728d4d179385fbf8d577e4830a2037c29e3efb3322c2e3543c0"} Nov 27 09:01:04 crc kubenswrapper[4971]: I1127 09:01:04.456186 4971 generic.go:334] "Generic (PLEG): container finished" podID="3a9eaaae-c63a-4924-8833-138a78aacd29" containerID="4729aa13bfcbb51351b914061f97dc3b76270542ec4d20828e3dbae6bbfc6d50" exitCode=0 Nov 27 09:01:04 crc kubenswrapper[4971]: I1127 09:01:04.456227 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29403901-xc2hf" event={"ID":"3a9eaaae-c63a-4924-8833-138a78aacd29","Type":"ContainerDied","Data":"4729aa13bfcbb51351b914061f97dc3b76270542ec4d20828e3dbae6bbfc6d50"} Nov 27 09:01:04 crc kubenswrapper[4971]: I1127 09:01:04.503913 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5m5cg" podStartSLOduration=2.787520828 podStartE2EDuration="10.503886986s" podCreationTimestamp="2025-11-27 09:00:54 +0000 UTC" firstStartedPulling="2025-11-27 09:00:56.342793881 +0000 UTC m=+7694.534837799" lastFinishedPulling="2025-11-27 09:01:04.059160029 +0000 UTC m=+7702.251203957" observedRunningTime="2025-11-27 09:01:04.496052301 +0000 UTC m=+7702.688096219" watchObservedRunningTime="2025-11-27 09:01:04.503886986 +0000 UTC m=+7702.695930904" Nov 27 09:01:05 crc kubenswrapper[4971]: I1127 09:01:05.044325 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-s8sgc"] Nov 27 09:01:05 crc kubenswrapper[4971]: I1127 09:01:05.058716 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-s8sgc"] Nov 27 09:01:05 crc kubenswrapper[4971]: I1127 09:01:05.095662 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5m5cg" Nov 27 09:01:05 crc kubenswrapper[4971]: I1127 09:01:05.095764 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5m5cg" Nov 27 09:01:05 crc kubenswrapper[4971]: I1127 09:01:05.886106 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105tpqp" Nov 27 09:01:05 crc kubenswrapper[4971]: I1127 09:01:05.892972 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29403901-xc2hf" Nov 27 09:01:05 crc kubenswrapper[4971]: I1127 09:01:05.976696 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a9eaaae-c63a-4924-8833-138a78aacd29-combined-ca-bundle\") pod \"3a9eaaae-c63a-4924-8833-138a78aacd29\" (UID: \"3a9eaaae-c63a-4924-8833-138a78aacd29\") " Nov 27 09:01:05 crc kubenswrapper[4971]: I1127 09:01:05.976898 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ada68182-ff97-4a6f-83a5-10dba9af7324-bundle\") pod \"ada68182-ff97-4a6f-83a5-10dba9af7324\" (UID: \"ada68182-ff97-4a6f-83a5-10dba9af7324\") " Nov 27 09:01:05 crc kubenswrapper[4971]: I1127 09:01:05.976978 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a9eaaae-c63a-4924-8833-138a78aacd29-config-data\") pod \"3a9eaaae-c63a-4924-8833-138a78aacd29\" (UID: \"3a9eaaae-c63a-4924-8833-138a78aacd29\") " Nov 27 09:01:05 crc kubenswrapper[4971]: I1127 09:01:05.977016 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnvqh\" (UniqueName: \"kubernetes.io/projected/3a9eaaae-c63a-4924-8833-138a78aacd29-kube-api-access-mnvqh\") pod \"3a9eaaae-c63a-4924-8833-138a78aacd29\" (UID: \"3a9eaaae-c63a-4924-8833-138a78aacd29\") " Nov 27 09:01:05 crc kubenswrapper[4971]: I1127 09:01:05.979367 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ada68182-ff97-4a6f-83a5-10dba9af7324-util\") pod \"ada68182-ff97-4a6f-83a5-10dba9af7324\" (UID: \"ada68182-ff97-4a6f-83a5-10dba9af7324\") " Nov 27 09:01:05 crc kubenswrapper[4971]: I1127 09:01:05.979440 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3a9eaaae-c63a-4924-8833-138a78aacd29-fernet-keys\") pod \"3a9eaaae-c63a-4924-8833-138a78aacd29\" (UID: \"3a9eaaae-c63a-4924-8833-138a78aacd29\") " Nov 27 09:01:05 crc kubenswrapper[4971]: I1127 09:01:05.979525 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hvzqp\" (UniqueName: \"kubernetes.io/projected/ada68182-ff97-4a6f-83a5-10dba9af7324-kube-api-access-hvzqp\") pod \"ada68182-ff97-4a6f-83a5-10dba9af7324\" (UID: \"ada68182-ff97-4a6f-83a5-10dba9af7324\") " Nov 27 09:01:05 crc kubenswrapper[4971]: I1127 09:01:05.981428 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ada68182-ff97-4a6f-83a5-10dba9af7324-bundle" (OuterVolumeSpecName: "bundle") pod "ada68182-ff97-4a6f-83a5-10dba9af7324" (UID: "ada68182-ff97-4a6f-83a5-10dba9af7324"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:01:05 crc kubenswrapper[4971]: I1127 09:01:05.984887 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a9eaaae-c63a-4924-8833-138a78aacd29-kube-api-access-mnvqh" (OuterVolumeSpecName: "kube-api-access-mnvqh") pod "3a9eaaae-c63a-4924-8833-138a78aacd29" (UID: "3a9eaaae-c63a-4924-8833-138a78aacd29"). InnerVolumeSpecName "kube-api-access-mnvqh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:01:05 crc kubenswrapper[4971]: I1127 09:01:05.985066 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ada68182-ff97-4a6f-83a5-10dba9af7324-kube-api-access-hvzqp" (OuterVolumeSpecName: "kube-api-access-hvzqp") pod "ada68182-ff97-4a6f-83a5-10dba9af7324" (UID: "ada68182-ff97-4a6f-83a5-10dba9af7324"). InnerVolumeSpecName "kube-api-access-hvzqp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:01:05 crc kubenswrapper[4971]: I1127 09:01:05.985457 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a9eaaae-c63a-4924-8833-138a78aacd29-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "3a9eaaae-c63a-4924-8833-138a78aacd29" (UID: "3a9eaaae-c63a-4924-8833-138a78aacd29"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:01:05 crc kubenswrapper[4971]: I1127 09:01:05.994461 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ada68182-ff97-4a6f-83a5-10dba9af7324-util" (OuterVolumeSpecName: "util") pod "ada68182-ff97-4a6f-83a5-10dba9af7324" (UID: "ada68182-ff97-4a6f-83a5-10dba9af7324"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:01:06 crc kubenswrapper[4971]: I1127 09:01:06.008097 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a9eaaae-c63a-4924-8833-138a78aacd29-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3a9eaaae-c63a-4924-8833-138a78aacd29" (UID: "3a9eaaae-c63a-4924-8833-138a78aacd29"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:01:06 crc kubenswrapper[4971]: I1127 09:01:06.036307 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a9eaaae-c63a-4924-8833-138a78aacd29-config-data" (OuterVolumeSpecName: "config-data") pod "3a9eaaae-c63a-4924-8833-138a78aacd29" (UID: "3a9eaaae-c63a-4924-8833-138a78aacd29"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:01:06 crc kubenswrapper[4971]: I1127 09:01:06.083381 4971 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ada68182-ff97-4a6f-83a5-10dba9af7324-util\") on node \"crc\" DevicePath \"\"" Nov 27 09:01:06 crc kubenswrapper[4971]: I1127 09:01:06.083430 4971 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3a9eaaae-c63a-4924-8833-138a78aacd29-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 27 09:01:06 crc kubenswrapper[4971]: I1127 09:01:06.083444 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hvzqp\" (UniqueName: \"kubernetes.io/projected/ada68182-ff97-4a6f-83a5-10dba9af7324-kube-api-access-hvzqp\") on node \"crc\" DevicePath \"\"" Nov 27 09:01:06 crc kubenswrapper[4971]: I1127 09:01:06.083457 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a9eaaae-c63a-4924-8833-138a78aacd29-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:01:06 crc kubenswrapper[4971]: I1127 09:01:06.083467 4971 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ada68182-ff97-4a6f-83a5-10dba9af7324-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:01:06 crc kubenswrapper[4971]: I1127 09:01:06.083476 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a9eaaae-c63a-4924-8833-138a78aacd29-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 09:01:06 crc kubenswrapper[4971]: I1127 09:01:06.083488 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnvqh\" (UniqueName: \"kubernetes.io/projected/3a9eaaae-c63a-4924-8833-138a78aacd29-kube-api-access-mnvqh\") on node \"crc\" DevicePath \"\"" Nov 27 09:01:06 crc kubenswrapper[4971]: I1127 09:01:06.156983 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-5m5cg" podUID="4196dda6-b233-434f-bc33-61f0506fb55b" containerName="registry-server" probeResult="failure" output=< Nov 27 09:01:06 crc kubenswrapper[4971]: timeout: failed to connect service ":50051" within 1s Nov 27 09:01:06 crc kubenswrapper[4971]: > Nov 27 09:01:06 crc kubenswrapper[4971]: I1127 09:01:06.480417 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29403901-xc2hf" Nov 27 09:01:06 crc kubenswrapper[4971]: I1127 09:01:06.482646 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29403901-xc2hf" event={"ID":"3a9eaaae-c63a-4924-8833-138a78aacd29","Type":"ContainerDied","Data":"710d005e23c0bd47aac6f86c72481255f4d7e9b63f066e80321d4b86481376ff"} Nov 27 09:01:06 crc kubenswrapper[4971]: I1127 09:01:06.482700 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="710d005e23c0bd47aac6f86c72481255f4d7e9b63f066e80321d4b86481376ff" Nov 27 09:01:06 crc kubenswrapper[4971]: I1127 09:01:06.489392 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105tpqp" event={"ID":"ada68182-ff97-4a6f-83a5-10dba9af7324","Type":"ContainerDied","Data":"a00c865ff3f51fa8917fb95dc82571134d40f47dfac254234b836433e4e1bc5b"} Nov 27 09:01:06 crc kubenswrapper[4971]: I1127 09:01:06.489440 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a00c865ff3f51fa8917fb95dc82571134d40f47dfac254234b836433e4e1bc5b" Nov 27 09:01:06 crc kubenswrapper[4971]: I1127 09:01:06.489597 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105tpqp" Nov 27 09:01:06 crc kubenswrapper[4971]: I1127 09:01:06.570830 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b89043e-9696-49f5-900b-5ea4c74851a6" path="/var/lib/kubelet/pods/7b89043e-9696-49f5-900b-5ea4c74851a6/volumes" Nov 27 09:01:15 crc kubenswrapper[4971]: I1127 09:01:15.899634 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-shr67"] Nov 27 09:01:15 crc kubenswrapper[4971]: E1127 09:01:15.901184 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ada68182-ff97-4a6f-83a5-10dba9af7324" containerName="pull" Nov 27 09:01:15 crc kubenswrapper[4971]: I1127 09:01:15.901202 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="ada68182-ff97-4a6f-83a5-10dba9af7324" containerName="pull" Nov 27 09:01:15 crc kubenswrapper[4971]: E1127 09:01:15.901220 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3243de9-39c3-4511-9e89-131659e2179a" containerName="horizon" Nov 27 09:01:15 crc kubenswrapper[4971]: I1127 09:01:15.901230 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3243de9-39c3-4511-9e89-131659e2179a" containerName="horizon" Nov 27 09:01:15 crc kubenswrapper[4971]: E1127 09:01:15.901244 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a9eaaae-c63a-4924-8833-138a78aacd29" containerName="keystone-cron" Nov 27 09:01:15 crc kubenswrapper[4971]: I1127 09:01:15.901251 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a9eaaae-c63a-4924-8833-138a78aacd29" containerName="keystone-cron" Nov 27 09:01:15 crc kubenswrapper[4971]: E1127 09:01:15.901270 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ada68182-ff97-4a6f-83a5-10dba9af7324" containerName="extract" Nov 27 09:01:15 crc kubenswrapper[4971]: I1127 09:01:15.901276 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="ada68182-ff97-4a6f-83a5-10dba9af7324" containerName="extract" Nov 27 09:01:15 crc kubenswrapper[4971]: E1127 09:01:15.901297 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3243de9-39c3-4511-9e89-131659e2179a" containerName="horizon-log" Nov 27 09:01:15 crc kubenswrapper[4971]: I1127 09:01:15.901303 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3243de9-39c3-4511-9e89-131659e2179a" containerName="horizon-log" Nov 27 09:01:15 crc kubenswrapper[4971]: E1127 09:01:15.901322 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ada68182-ff97-4a6f-83a5-10dba9af7324" containerName="util" Nov 27 09:01:15 crc kubenswrapper[4971]: I1127 09:01:15.901328 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="ada68182-ff97-4a6f-83a5-10dba9af7324" containerName="util" Nov 27 09:01:15 crc kubenswrapper[4971]: I1127 09:01:15.901558 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a9eaaae-c63a-4924-8833-138a78aacd29" containerName="keystone-cron" Nov 27 09:01:15 crc kubenswrapper[4971]: I1127 09:01:15.901578 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3243de9-39c3-4511-9e89-131659e2179a" containerName="horizon-log" Nov 27 09:01:15 crc kubenswrapper[4971]: I1127 09:01:15.901600 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3243de9-39c3-4511-9e89-131659e2179a" containerName="horizon" Nov 27 09:01:15 crc kubenswrapper[4971]: I1127 09:01:15.901609 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="ada68182-ff97-4a6f-83a5-10dba9af7324" containerName="extract" Nov 27 09:01:15 crc kubenswrapper[4971]: I1127 09:01:15.902591 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-shr67" Nov 27 09:01:15 crc kubenswrapper[4971]: I1127 09:01:15.910635 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Nov 27 09:01:15 crc kubenswrapper[4971]: I1127 09:01:15.910656 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-x6h6l" Nov 27 09:01:15 crc kubenswrapper[4971]: I1127 09:01:15.911423 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Nov 27 09:01:15 crc kubenswrapper[4971]: I1127 09:01:15.913779 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-shr67"] Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.014481 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrp48\" (UniqueName: \"kubernetes.io/projected/d80dae31-1743-4758-a865-83748b028b5d-kube-api-access-zrp48\") pod \"obo-prometheus-operator-668cf9dfbb-shr67\" (UID: \"d80dae31-1743-4758-a865-83748b028b5d\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-shr67" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.045202 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-75dd79885d-5mct7"] Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.047012 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-75dd79885d-5mct7" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.050355 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-m8jhv" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.050746 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.083776 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-75dd79885d-87mfz"] Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.085478 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-75dd79885d-87mfz" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.104303 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-75dd79885d-5mct7"] Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.123241 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/074b15e8-70dd-4d14-beee-731393461a46-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-75dd79885d-5mct7\" (UID: \"074b15e8-70dd-4d14-beee-731393461a46\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-75dd79885d-5mct7" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.123323 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/074b15e8-70dd-4d14-beee-731393461a46-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-75dd79885d-5mct7\" (UID: \"074b15e8-70dd-4d14-beee-731393461a46\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-75dd79885d-5mct7" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.123390 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/25a6f6c7-3cbb-42a4-a2a3-ba9268fac935-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-75dd79885d-87mfz\" (UID: \"25a6f6c7-3cbb-42a4-a2a3-ba9268fac935\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-75dd79885d-87mfz" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.123460 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrp48\" (UniqueName: \"kubernetes.io/projected/d80dae31-1743-4758-a865-83748b028b5d-kube-api-access-zrp48\") pod \"obo-prometheus-operator-668cf9dfbb-shr67\" (UID: \"d80dae31-1743-4758-a865-83748b028b5d\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-shr67" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.123511 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/25a6f6c7-3cbb-42a4-a2a3-ba9268fac935-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-75dd79885d-87mfz\" (UID: \"25a6f6c7-3cbb-42a4-a2a3-ba9268fac935\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-75dd79885d-87mfz" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.128593 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-75dd79885d-87mfz"] Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.169783 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrp48\" (UniqueName: \"kubernetes.io/projected/d80dae31-1743-4758-a865-83748b028b5d-kube-api-access-zrp48\") pod \"obo-prometheus-operator-668cf9dfbb-shr67\" (UID: \"d80dae31-1743-4758-a865-83748b028b5d\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-shr67" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.197008 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-5m5cg" podUID="4196dda6-b233-434f-bc33-61f0506fb55b" containerName="registry-server" probeResult="failure" output=< Nov 27 09:01:16 crc kubenswrapper[4971]: timeout: failed to connect service ":50051" within 1s Nov 27 09:01:16 crc kubenswrapper[4971]: > Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.214419 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-v8snv"] Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.215894 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-v8snv" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.219249 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-gshd2" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.219251 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.226361 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/25a6f6c7-3cbb-42a4-a2a3-ba9268fac935-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-75dd79885d-87mfz\" (UID: \"25a6f6c7-3cbb-42a4-a2a3-ba9268fac935\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-75dd79885d-87mfz" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.226486 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/25a6f6c7-3cbb-42a4-a2a3-ba9268fac935-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-75dd79885d-87mfz\" (UID: \"25a6f6c7-3cbb-42a4-a2a3-ba9268fac935\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-75dd79885d-87mfz" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.226680 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/074b15e8-70dd-4d14-beee-731393461a46-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-75dd79885d-5mct7\" (UID: \"074b15e8-70dd-4d14-beee-731393461a46\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-75dd79885d-5mct7" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.226727 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/074b15e8-70dd-4d14-beee-731393461a46-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-75dd79885d-5mct7\" (UID: \"074b15e8-70dd-4d14-beee-731393461a46\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-75dd79885d-5mct7" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.231317 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-shr67" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.234356 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/074b15e8-70dd-4d14-beee-731393461a46-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-75dd79885d-5mct7\" (UID: \"074b15e8-70dd-4d14-beee-731393461a46\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-75dd79885d-5mct7" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.248298 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-v8snv"] Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.266046 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/25a6f6c7-3cbb-42a4-a2a3-ba9268fac935-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-75dd79885d-87mfz\" (UID: \"25a6f6c7-3cbb-42a4-a2a3-ba9268fac935\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-75dd79885d-87mfz" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.268200 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/25a6f6c7-3cbb-42a4-a2a3-ba9268fac935-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-75dd79885d-87mfz\" (UID: \"25a6f6c7-3cbb-42a4-a2a3-ba9268fac935\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-75dd79885d-87mfz" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.296245 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/074b15e8-70dd-4d14-beee-731393461a46-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-75dd79885d-5mct7\" (UID: \"074b15e8-70dd-4d14-beee-731393461a46\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-75dd79885d-5mct7" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.329374 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/8306e952-474c-42d2-b1a2-d99d7c0c3fef-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-v8snv\" (UID: \"8306e952-474c-42d2-b1a2-d99d7c0c3fef\") " pod="openshift-operators/observability-operator-d8bb48f5d-v8snv" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.329519 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wnspc\" (UniqueName: \"kubernetes.io/projected/8306e952-474c-42d2-b1a2-d99d7c0c3fef-kube-api-access-wnspc\") pod \"observability-operator-d8bb48f5d-v8snv\" (UID: \"8306e952-474c-42d2-b1a2-d99d7c0c3fef\") " pod="openshift-operators/observability-operator-d8bb48f5d-v8snv" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.377696 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-75dd79885d-5mct7" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.428024 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-75dd79885d-87mfz" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.430548 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/8306e952-474c-42d2-b1a2-d99d7c0c3fef-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-v8snv\" (UID: \"8306e952-474c-42d2-b1a2-d99d7c0c3fef\") " pod="openshift-operators/observability-operator-d8bb48f5d-v8snv" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.430608 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wnspc\" (UniqueName: \"kubernetes.io/projected/8306e952-474c-42d2-b1a2-d99d7c0c3fef-kube-api-access-wnspc\") pod \"observability-operator-d8bb48f5d-v8snv\" (UID: \"8306e952-474c-42d2-b1a2-d99d7c0c3fef\") " pod="openshift-operators/observability-operator-d8bb48f5d-v8snv" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.449017 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/8306e952-474c-42d2-b1a2-d99d7c0c3fef-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-v8snv\" (UID: \"8306e952-474c-42d2-b1a2-d99d7c0c3fef\") " pod="openshift-operators/observability-operator-d8bb48f5d-v8snv" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.474883 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wnspc\" (UniqueName: \"kubernetes.io/projected/8306e952-474c-42d2-b1a2-d99d7c0c3fef-kube-api-access-wnspc\") pod \"observability-operator-d8bb48f5d-v8snv\" (UID: \"8306e952-474c-42d2-b1a2-d99d7c0c3fef\") " pod="openshift-operators/observability-operator-d8bb48f5d-v8snv" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.540497 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5446b9c989-45hp5"] Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.543860 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-45hp5" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.557168 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-tjcd2" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.648847 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdlt2\" (UniqueName: \"kubernetes.io/projected/0e006a4c-d74e-4f76-8310-2c017bd69a5d-kube-api-access-pdlt2\") pod \"perses-operator-5446b9c989-45hp5\" (UID: \"0e006a4c-d74e-4f76-8310-2c017bd69a5d\") " pod="openshift-operators/perses-operator-5446b9c989-45hp5" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.648912 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/0e006a4c-d74e-4f76-8310-2c017bd69a5d-openshift-service-ca\") pod \"perses-operator-5446b9c989-45hp5\" (UID: \"0e006a4c-d74e-4f76-8310-2c017bd69a5d\") " pod="openshift-operators/perses-operator-5446b9c989-45hp5" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.696147 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-45hp5"] Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.705051 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-v8snv" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.816793 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdlt2\" (UniqueName: \"kubernetes.io/projected/0e006a4c-d74e-4f76-8310-2c017bd69a5d-kube-api-access-pdlt2\") pod \"perses-operator-5446b9c989-45hp5\" (UID: \"0e006a4c-d74e-4f76-8310-2c017bd69a5d\") " pod="openshift-operators/perses-operator-5446b9c989-45hp5" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.816888 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/0e006a4c-d74e-4f76-8310-2c017bd69a5d-openshift-service-ca\") pod \"perses-operator-5446b9c989-45hp5\" (UID: \"0e006a4c-d74e-4f76-8310-2c017bd69a5d\") " pod="openshift-operators/perses-operator-5446b9c989-45hp5" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.859464 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/0e006a4c-d74e-4f76-8310-2c017bd69a5d-openshift-service-ca\") pod \"perses-operator-5446b9c989-45hp5\" (UID: \"0e006a4c-d74e-4f76-8310-2c017bd69a5d\") " pod="openshift-operators/perses-operator-5446b9c989-45hp5" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.878298 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdlt2\" (UniqueName: \"kubernetes.io/projected/0e006a4c-d74e-4f76-8310-2c017bd69a5d-kube-api-access-pdlt2\") pod \"perses-operator-5446b9c989-45hp5\" (UID: \"0e006a4c-d74e-4f76-8310-2c017bd69a5d\") " pod="openshift-operators/perses-operator-5446b9c989-45hp5" Nov 27 09:01:16 crc kubenswrapper[4971]: I1127 09:01:16.905202 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-45hp5" Nov 27 09:01:17 crc kubenswrapper[4971]: I1127 09:01:17.228428 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-shr67"] Nov 27 09:01:17 crc kubenswrapper[4971]: I1127 09:01:17.308504 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-75dd79885d-87mfz"] Nov 27 09:01:17 crc kubenswrapper[4971]: I1127 09:01:17.455914 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-75dd79885d-5mct7"] Nov 27 09:01:17 crc kubenswrapper[4971]: I1127 09:01:17.617835 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-v8snv"] Nov 27 09:01:17 crc kubenswrapper[4971]: I1127 09:01:17.737439 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-75dd79885d-87mfz" event={"ID":"25a6f6c7-3cbb-42a4-a2a3-ba9268fac935","Type":"ContainerStarted","Data":"51787b2c8a9a20e6270ab9abf343285807ac5ed54f71cffb9dc0251bfbc3cdfe"} Nov 27 09:01:17 crc kubenswrapper[4971]: I1127 09:01:17.739456 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-shr67" event={"ID":"d80dae31-1743-4758-a865-83748b028b5d","Type":"ContainerStarted","Data":"3efc2cbaeb59359dbc430b2fc79b54c910e4ae07a8158dd7621861f7a063fd92"} Nov 27 09:01:17 crc kubenswrapper[4971]: I1127 09:01:17.741519 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-75dd79885d-5mct7" event={"ID":"074b15e8-70dd-4d14-beee-731393461a46","Type":"ContainerStarted","Data":"e1b081f399933b771b8f7a44643b3266bf4d02f7c081f394b677659f9c781816"} Nov 27 09:01:17 crc kubenswrapper[4971]: I1127 09:01:17.743440 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-v8snv" event={"ID":"8306e952-474c-42d2-b1a2-d99d7c0c3fef","Type":"ContainerStarted","Data":"0a599589826508948cb580859933755e5949646f20ec2d484acf6c94410d2af6"} Nov 27 09:01:17 crc kubenswrapper[4971]: I1127 09:01:17.763786 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-45hp5"] Nov 27 09:01:17 crc kubenswrapper[4971]: W1127 09:01:17.767438 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0e006a4c_d74e_4f76_8310_2c017bd69a5d.slice/crio-58372cc30da1d39cd73cc5ea0a9f39690cb522965229566352ed54378977092e WatchSource:0}: Error finding container 58372cc30da1d39cd73cc5ea0a9f39690cb522965229566352ed54378977092e: Status 404 returned error can't find the container with id 58372cc30da1d39cd73cc5ea0a9f39690cb522965229566352ed54378977092e Nov 27 09:01:18 crc kubenswrapper[4971]: I1127 09:01:18.784087 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-45hp5" event={"ID":"0e006a4c-d74e-4f76-8310-2c017bd69a5d","Type":"ContainerStarted","Data":"58372cc30da1d39cd73cc5ea0a9f39690cb522965229566352ed54378977092e"} Nov 27 09:01:26 crc kubenswrapper[4971]: I1127 09:01:26.182369 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-5m5cg" podUID="4196dda6-b233-434f-bc33-61f0506fb55b" containerName="registry-server" probeResult="failure" output=< Nov 27 09:01:26 crc kubenswrapper[4971]: timeout: failed to connect service ":50051" within 1s Nov 27 09:01:26 crc kubenswrapper[4971]: > Nov 27 09:01:26 crc kubenswrapper[4971]: I1127 09:01:26.413405 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 09:01:26 crc kubenswrapper[4971]: I1127 09:01:26.413477 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 09:01:28 crc kubenswrapper[4971]: I1127 09:01:28.040002 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-671e-account-create-update-sgvgw"] Nov 27 09:01:28 crc kubenswrapper[4971]: I1127 09:01:28.050272 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-671e-account-create-update-sgvgw"] Nov 27 09:01:28 crc kubenswrapper[4971]: I1127 09:01:28.563124 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0aa92f81-6dce-45d4-ad97-f736266bcc69" path="/var/lib/kubelet/pods/0aa92f81-6dce-45d4-ad97-f736266bcc69/volumes" Nov 27 09:01:29 crc kubenswrapper[4971]: I1127 09:01:29.028382 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-xscwm"] Nov 27 09:01:29 crc kubenswrapper[4971]: I1127 09:01:29.039909 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-xscwm"] Nov 27 09:01:30 crc kubenswrapper[4971]: I1127 09:01:30.569268 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="558798ff-c172-44e0-b329-79cb68adc93e" path="/var/lib/kubelet/pods/558798ff-c172-44e0-b329-79cb68adc93e/volumes" Nov 27 09:01:31 crc kubenswrapper[4971]: I1127 09:01:31.945711 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-v8snv" event={"ID":"8306e952-474c-42d2-b1a2-d99d7c0c3fef","Type":"ContainerStarted","Data":"4ecbce9ebb2e682891a176de275b4f5b61dd4fccad8a3aa703470fc07eac2789"} Nov 27 09:01:31 crc kubenswrapper[4971]: I1127 09:01:31.946719 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-d8bb48f5d-v8snv" Nov 27 09:01:31 crc kubenswrapper[4971]: I1127 09:01:31.948357 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-45hp5" event={"ID":"0e006a4c-d74e-4f76-8310-2c017bd69a5d","Type":"ContainerStarted","Data":"d7b1a5e4e153f5cbdcdcd944042f5b3a4264b169c8c83ed0b4669e0cb75a7bdc"} Nov 27 09:01:31 crc kubenswrapper[4971]: I1127 09:01:31.948492 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5446b9c989-45hp5" Nov 27 09:01:31 crc kubenswrapper[4971]: I1127 09:01:31.976025 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-75dd79885d-87mfz" event={"ID":"25a6f6c7-3cbb-42a4-a2a3-ba9268fac935","Type":"ContainerStarted","Data":"384dbe90ffca31d84b882f1c4b83c8fb4086aeb4504c855e173bd86c77783ec8"} Nov 27 09:01:31 crc kubenswrapper[4971]: I1127 09:01:31.985119 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-shr67" event={"ID":"d80dae31-1743-4758-a865-83748b028b5d","Type":"ContainerStarted","Data":"81e4fe4a44974a4e8bb2882a1eb25f0d67055c99ad8f30bfc61a4990e4821926"} Nov 27 09:01:31 crc kubenswrapper[4971]: I1127 09:01:31.996128 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-75dd79885d-5mct7" event={"ID":"074b15e8-70dd-4d14-beee-731393461a46","Type":"ContainerStarted","Data":"52e40718da8085522699189ede11491717f9ecfd8b9061b9845ca225e8daf12e"} Nov 27 09:01:32 crc kubenswrapper[4971]: I1127 09:01:32.034909 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-d8bb48f5d-v8snv" podStartSLOduration=2.959126957 podStartE2EDuration="16.0348809s" podCreationTimestamp="2025-11-27 09:01:16 +0000 UTC" firstStartedPulling="2025-11-27 09:01:17.621053437 +0000 UTC m=+7715.813097355" lastFinishedPulling="2025-11-27 09:01:30.69680738 +0000 UTC m=+7728.888851298" observedRunningTime="2025-11-27 09:01:32.010616125 +0000 UTC m=+7730.202660063" watchObservedRunningTime="2025-11-27 09:01:32.0348809 +0000 UTC m=+7730.226924818" Nov 27 09:01:32 crc kubenswrapper[4971]: I1127 09:01:32.056383 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-d8bb48f5d-v8snv" Nov 27 09:01:32 crc kubenswrapper[4971]: I1127 09:01:32.096339 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-75dd79885d-87mfz" podStartSLOduration=2.9107142489999998 podStartE2EDuration="16.09630396s" podCreationTimestamp="2025-11-27 09:01:16 +0000 UTC" firstStartedPulling="2025-11-27 09:01:17.343741319 +0000 UTC m=+7715.535785237" lastFinishedPulling="2025-11-27 09:01:30.52933103 +0000 UTC m=+7728.721374948" observedRunningTime="2025-11-27 09:01:32.058880207 +0000 UTC m=+7730.250924135" watchObservedRunningTime="2025-11-27 09:01:32.09630396 +0000 UTC m=+7730.288347898" Nov 27 09:01:32 crc kubenswrapper[4971]: I1127 09:01:32.142101 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-shr67" podStartSLOduration=3.7436798319999998 podStartE2EDuration="17.142070402s" podCreationTimestamp="2025-11-27 09:01:15 +0000 UTC" firstStartedPulling="2025-11-27 09:01:17.231410449 +0000 UTC m=+7715.423454367" lastFinishedPulling="2025-11-27 09:01:30.629801019 +0000 UTC m=+7728.821844937" observedRunningTime="2025-11-27 09:01:32.110859757 +0000 UTC m=+7730.302903705" watchObservedRunningTime="2025-11-27 09:01:32.142070402 +0000 UTC m=+7730.334114320" Nov 27 09:01:32 crc kubenswrapper[4971]: I1127 09:01:32.190504 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-75dd79885d-5mct7" podStartSLOduration=4.11311118 podStartE2EDuration="17.190483939s" podCreationTimestamp="2025-11-27 09:01:15 +0000 UTC" firstStartedPulling="2025-11-27 09:01:17.452018822 +0000 UTC m=+7715.644062740" lastFinishedPulling="2025-11-27 09:01:30.529391581 +0000 UTC m=+7728.721435499" observedRunningTime="2025-11-27 09:01:32.165069201 +0000 UTC m=+7730.357113139" watchObservedRunningTime="2025-11-27 09:01:32.190483939 +0000 UTC m=+7730.382527857" Nov 27 09:01:32 crc kubenswrapper[4971]: I1127 09:01:32.227287 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5446b9c989-45hp5" podStartSLOduration=3.469024391 podStartE2EDuration="16.227260334s" podCreationTimestamp="2025-11-27 09:01:16 +0000 UTC" firstStartedPulling="2025-11-27 09:01:17.771800717 +0000 UTC m=+7715.963844635" lastFinishedPulling="2025-11-27 09:01:30.53003666 +0000 UTC m=+7728.722080578" observedRunningTime="2025-11-27 09:01:32.220222072 +0000 UTC m=+7730.412266000" watchObservedRunningTime="2025-11-27 09:01:32.227260334 +0000 UTC m=+7730.419304242" Nov 27 09:01:35 crc kubenswrapper[4971]: I1127 09:01:35.155307 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-5m5cg" Nov 27 09:01:35 crc kubenswrapper[4971]: I1127 09:01:35.210598 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-5m5cg" Nov 27 09:01:36 crc kubenswrapper[4971]: I1127 09:01:36.909969 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5446b9c989-45hp5" Nov 27 09:01:36 crc kubenswrapper[4971]: I1127 09:01:36.968828 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5m5cg"] Nov 27 09:01:37 crc kubenswrapper[4971]: I1127 09:01:37.046714 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-2g9m5"] Nov 27 09:01:37 crc kubenswrapper[4971]: I1127 09:01:37.058972 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-2g9m5"] Nov 27 09:01:37 crc kubenswrapper[4971]: I1127 09:01:37.064444 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-5m5cg" podUID="4196dda6-b233-434f-bc33-61f0506fb55b" containerName="registry-server" containerID="cri-o://758d50d493fdf728d4d179385fbf8d577e4830a2037c29e3efb3322c2e3543c0" gracePeriod=2 Nov 27 09:01:38 crc kubenswrapper[4971]: I1127 09:01:37.699285 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5m5cg" Nov 27 09:01:38 crc kubenswrapper[4971]: I1127 09:01:37.765504 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9mwq4\" (UniqueName: \"kubernetes.io/projected/4196dda6-b233-434f-bc33-61f0506fb55b-kube-api-access-9mwq4\") pod \"4196dda6-b233-434f-bc33-61f0506fb55b\" (UID: \"4196dda6-b233-434f-bc33-61f0506fb55b\") " Nov 27 09:01:38 crc kubenswrapper[4971]: I1127 09:01:37.765895 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4196dda6-b233-434f-bc33-61f0506fb55b-utilities\") pod \"4196dda6-b233-434f-bc33-61f0506fb55b\" (UID: \"4196dda6-b233-434f-bc33-61f0506fb55b\") " Nov 27 09:01:38 crc kubenswrapper[4971]: I1127 09:01:37.765960 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4196dda6-b233-434f-bc33-61f0506fb55b-catalog-content\") pod \"4196dda6-b233-434f-bc33-61f0506fb55b\" (UID: \"4196dda6-b233-434f-bc33-61f0506fb55b\") " Nov 27 09:01:38 crc kubenswrapper[4971]: I1127 09:01:37.768291 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4196dda6-b233-434f-bc33-61f0506fb55b-utilities" (OuterVolumeSpecName: "utilities") pod "4196dda6-b233-434f-bc33-61f0506fb55b" (UID: "4196dda6-b233-434f-bc33-61f0506fb55b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:01:38 crc kubenswrapper[4971]: I1127 09:01:37.776853 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4196dda6-b233-434f-bc33-61f0506fb55b-kube-api-access-9mwq4" (OuterVolumeSpecName: "kube-api-access-9mwq4") pod "4196dda6-b233-434f-bc33-61f0506fb55b" (UID: "4196dda6-b233-434f-bc33-61f0506fb55b"). InnerVolumeSpecName "kube-api-access-9mwq4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:01:38 crc kubenswrapper[4971]: I1127 09:01:37.872610 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4196dda6-b233-434f-bc33-61f0506fb55b-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 09:01:38 crc kubenswrapper[4971]: I1127 09:01:37.872660 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9mwq4\" (UniqueName: \"kubernetes.io/projected/4196dda6-b233-434f-bc33-61f0506fb55b-kube-api-access-9mwq4\") on node \"crc\" DevicePath \"\"" Nov 27 09:01:38 crc kubenswrapper[4971]: I1127 09:01:38.002123 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4196dda6-b233-434f-bc33-61f0506fb55b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4196dda6-b233-434f-bc33-61f0506fb55b" (UID: "4196dda6-b233-434f-bc33-61f0506fb55b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:01:38 crc kubenswrapper[4971]: I1127 09:01:38.081269 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4196dda6-b233-434f-bc33-61f0506fb55b-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 09:01:38 crc kubenswrapper[4971]: I1127 09:01:38.105152 4971 generic.go:334] "Generic (PLEG): container finished" podID="4196dda6-b233-434f-bc33-61f0506fb55b" containerID="758d50d493fdf728d4d179385fbf8d577e4830a2037c29e3efb3322c2e3543c0" exitCode=0 Nov 27 09:01:38 crc kubenswrapper[4971]: I1127 09:01:38.105211 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5m5cg" event={"ID":"4196dda6-b233-434f-bc33-61f0506fb55b","Type":"ContainerDied","Data":"758d50d493fdf728d4d179385fbf8d577e4830a2037c29e3efb3322c2e3543c0"} Nov 27 09:01:38 crc kubenswrapper[4971]: I1127 09:01:38.105247 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5m5cg" event={"ID":"4196dda6-b233-434f-bc33-61f0506fb55b","Type":"ContainerDied","Data":"09517be3c15a20c50793e86ea7a1c487ba6160a9949d0493df62f84da1dad13f"} Nov 27 09:01:38 crc kubenswrapper[4971]: I1127 09:01:38.105274 4971 scope.go:117] "RemoveContainer" containerID="758d50d493fdf728d4d179385fbf8d577e4830a2037c29e3efb3322c2e3543c0" Nov 27 09:01:38 crc kubenswrapper[4971]: I1127 09:01:38.105581 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5m5cg" Nov 27 09:01:38 crc kubenswrapper[4971]: I1127 09:01:38.233805 4971 scope.go:117] "RemoveContainer" containerID="1c757d026188f0673cce1b9e6ae9004292ea77c85be8a956ff6268e29492acb2" Nov 27 09:01:38 crc kubenswrapper[4971]: I1127 09:01:38.234060 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5m5cg"] Nov 27 09:01:38 crc kubenswrapper[4971]: I1127 09:01:38.254213 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-5m5cg"] Nov 27 09:01:38 crc kubenswrapper[4971]: I1127 09:01:38.318222 4971 scope.go:117] "RemoveContainer" containerID="931beb9b5904eabdaf82774082e7ec5a66076177c9e5d6d0b67ea3f986c6ddba" Nov 27 09:01:38 crc kubenswrapper[4971]: I1127 09:01:38.358559 4971 scope.go:117] "RemoveContainer" containerID="758d50d493fdf728d4d179385fbf8d577e4830a2037c29e3efb3322c2e3543c0" Nov 27 09:01:38 crc kubenswrapper[4971]: E1127 09:01:38.359564 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"758d50d493fdf728d4d179385fbf8d577e4830a2037c29e3efb3322c2e3543c0\": container with ID starting with 758d50d493fdf728d4d179385fbf8d577e4830a2037c29e3efb3322c2e3543c0 not found: ID does not exist" containerID="758d50d493fdf728d4d179385fbf8d577e4830a2037c29e3efb3322c2e3543c0" Nov 27 09:01:38 crc kubenswrapper[4971]: I1127 09:01:38.359630 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"758d50d493fdf728d4d179385fbf8d577e4830a2037c29e3efb3322c2e3543c0"} err="failed to get container status \"758d50d493fdf728d4d179385fbf8d577e4830a2037c29e3efb3322c2e3543c0\": rpc error: code = NotFound desc = could not find container \"758d50d493fdf728d4d179385fbf8d577e4830a2037c29e3efb3322c2e3543c0\": container with ID starting with 758d50d493fdf728d4d179385fbf8d577e4830a2037c29e3efb3322c2e3543c0 not found: ID does not exist" Nov 27 09:01:38 crc kubenswrapper[4971]: I1127 09:01:38.359674 4971 scope.go:117] "RemoveContainer" containerID="1c757d026188f0673cce1b9e6ae9004292ea77c85be8a956ff6268e29492acb2" Nov 27 09:01:38 crc kubenswrapper[4971]: E1127 09:01:38.360022 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c757d026188f0673cce1b9e6ae9004292ea77c85be8a956ff6268e29492acb2\": container with ID starting with 1c757d026188f0673cce1b9e6ae9004292ea77c85be8a956ff6268e29492acb2 not found: ID does not exist" containerID="1c757d026188f0673cce1b9e6ae9004292ea77c85be8a956ff6268e29492acb2" Nov 27 09:01:38 crc kubenswrapper[4971]: I1127 09:01:38.360058 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c757d026188f0673cce1b9e6ae9004292ea77c85be8a956ff6268e29492acb2"} err="failed to get container status \"1c757d026188f0673cce1b9e6ae9004292ea77c85be8a956ff6268e29492acb2\": rpc error: code = NotFound desc = could not find container \"1c757d026188f0673cce1b9e6ae9004292ea77c85be8a956ff6268e29492acb2\": container with ID starting with 1c757d026188f0673cce1b9e6ae9004292ea77c85be8a956ff6268e29492acb2 not found: ID does not exist" Nov 27 09:01:38 crc kubenswrapper[4971]: I1127 09:01:38.360078 4971 scope.go:117] "RemoveContainer" containerID="931beb9b5904eabdaf82774082e7ec5a66076177c9e5d6d0b67ea3f986c6ddba" Nov 27 09:01:38 crc kubenswrapper[4971]: E1127 09:01:38.360299 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"931beb9b5904eabdaf82774082e7ec5a66076177c9e5d6d0b67ea3f986c6ddba\": container with ID starting with 931beb9b5904eabdaf82774082e7ec5a66076177c9e5d6d0b67ea3f986c6ddba not found: ID does not exist" containerID="931beb9b5904eabdaf82774082e7ec5a66076177c9e5d6d0b67ea3f986c6ddba" Nov 27 09:01:38 crc kubenswrapper[4971]: I1127 09:01:38.360329 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"931beb9b5904eabdaf82774082e7ec5a66076177c9e5d6d0b67ea3f986c6ddba"} err="failed to get container status \"931beb9b5904eabdaf82774082e7ec5a66076177c9e5d6d0b67ea3f986c6ddba\": rpc error: code = NotFound desc = could not find container \"931beb9b5904eabdaf82774082e7ec5a66076177c9e5d6d0b67ea3f986c6ddba\": container with ID starting with 931beb9b5904eabdaf82774082e7ec5a66076177c9e5d6d0b67ea3f986c6ddba not found: ID does not exist" Nov 27 09:01:38 crc kubenswrapper[4971]: I1127 09:01:38.565337 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845" path="/var/lib/kubelet/pods/0b8c8f77-3f99-4f77-bbbd-bb90d3ad8845/volumes" Nov 27 09:01:38 crc kubenswrapper[4971]: I1127 09:01:38.566719 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4196dda6-b233-434f-bc33-61f0506fb55b" path="/var/lib/kubelet/pods/4196dda6-b233-434f-bc33-61f0506fb55b/volumes" Nov 27 09:01:39 crc kubenswrapper[4971]: I1127 09:01:39.598702 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Nov 27 09:01:39 crc kubenswrapper[4971]: I1127 09:01:39.599024 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2" containerName="openstackclient" containerID="cri-o://3765b97474f278be19917650cfbfdf8fd192f74de6b098bd04bcc4bea85fbacf" gracePeriod=2 Nov 27 09:01:39 crc kubenswrapper[4971]: I1127 09:01:39.615059 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Nov 27 09:01:39 crc kubenswrapper[4971]: I1127 09:01:39.734602 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Nov 27 09:01:39 crc kubenswrapper[4971]: E1127 09:01:39.735066 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4196dda6-b233-434f-bc33-61f0506fb55b" containerName="registry-server" Nov 27 09:01:39 crc kubenswrapper[4971]: I1127 09:01:39.735079 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="4196dda6-b233-434f-bc33-61f0506fb55b" containerName="registry-server" Nov 27 09:01:39 crc kubenswrapper[4971]: E1127 09:01:39.735106 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4196dda6-b233-434f-bc33-61f0506fb55b" containerName="extract-content" Nov 27 09:01:39 crc kubenswrapper[4971]: I1127 09:01:39.735114 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="4196dda6-b233-434f-bc33-61f0506fb55b" containerName="extract-content" Nov 27 09:01:39 crc kubenswrapper[4971]: E1127 09:01:39.735135 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4196dda6-b233-434f-bc33-61f0506fb55b" containerName="extract-utilities" Nov 27 09:01:39 crc kubenswrapper[4971]: I1127 09:01:39.735142 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="4196dda6-b233-434f-bc33-61f0506fb55b" containerName="extract-utilities" Nov 27 09:01:39 crc kubenswrapper[4971]: E1127 09:01:39.735191 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2" containerName="openstackclient" Nov 27 09:01:39 crc kubenswrapper[4971]: I1127 09:01:39.735197 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2" containerName="openstackclient" Nov 27 09:01:39 crc kubenswrapper[4971]: I1127 09:01:39.735396 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="4196dda6-b233-434f-bc33-61f0506fb55b" containerName="registry-server" Nov 27 09:01:39 crc kubenswrapper[4971]: I1127 09:01:39.735423 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2" containerName="openstackclient" Nov 27 09:01:39 crc kubenswrapper[4971]: I1127 09:01:39.736416 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 27 09:01:39 crc kubenswrapper[4971]: I1127 09:01:39.749723 4971 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2" podUID="307e1bff-561c-4b41-8ac4-40a403f1936c" Nov 27 09:01:39 crc kubenswrapper[4971]: I1127 09:01:39.758219 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Nov 27 09:01:39 crc kubenswrapper[4971]: I1127 09:01:39.835675 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q5b95\" (UniqueName: \"kubernetes.io/projected/307e1bff-561c-4b41-8ac4-40a403f1936c-kube-api-access-q5b95\") pod \"openstackclient\" (UID: \"307e1bff-561c-4b41-8ac4-40a403f1936c\") " pod="openstack/openstackclient" Nov 27 09:01:39 crc kubenswrapper[4971]: I1127 09:01:39.835787 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/307e1bff-561c-4b41-8ac4-40a403f1936c-openstack-config-secret\") pod \"openstackclient\" (UID: \"307e1bff-561c-4b41-8ac4-40a403f1936c\") " pod="openstack/openstackclient" Nov 27 09:01:39 crc kubenswrapper[4971]: I1127 09:01:39.835862 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/307e1bff-561c-4b41-8ac4-40a403f1936c-openstack-config\") pod \"openstackclient\" (UID: \"307e1bff-561c-4b41-8ac4-40a403f1936c\") " pod="openstack/openstackclient" Nov 27 09:01:39 crc kubenswrapper[4971]: I1127 09:01:39.937727 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/307e1bff-561c-4b41-8ac4-40a403f1936c-openstack-config\") pod \"openstackclient\" (UID: \"307e1bff-561c-4b41-8ac4-40a403f1936c\") " pod="openstack/openstackclient" Nov 27 09:01:39 crc kubenswrapper[4971]: I1127 09:01:39.938268 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q5b95\" (UniqueName: \"kubernetes.io/projected/307e1bff-561c-4b41-8ac4-40a403f1936c-kube-api-access-q5b95\") pod \"openstackclient\" (UID: \"307e1bff-561c-4b41-8ac4-40a403f1936c\") " pod="openstack/openstackclient" Nov 27 09:01:39 crc kubenswrapper[4971]: I1127 09:01:39.938328 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/307e1bff-561c-4b41-8ac4-40a403f1936c-openstack-config-secret\") pod \"openstackclient\" (UID: \"307e1bff-561c-4b41-8ac4-40a403f1936c\") " pod="openstack/openstackclient" Nov 27 09:01:39 crc kubenswrapper[4971]: I1127 09:01:39.939996 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/307e1bff-561c-4b41-8ac4-40a403f1936c-openstack-config\") pod \"openstackclient\" (UID: \"307e1bff-561c-4b41-8ac4-40a403f1936c\") " pod="openstack/openstackclient" Nov 27 09:01:39 crc kubenswrapper[4971]: I1127 09:01:39.979486 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/307e1bff-561c-4b41-8ac4-40a403f1936c-openstack-config-secret\") pod \"openstackclient\" (UID: \"307e1bff-561c-4b41-8ac4-40a403f1936c\") " pod="openstack/openstackclient" Nov 27 09:01:40 crc kubenswrapper[4971]: I1127 09:01:40.000551 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q5b95\" (UniqueName: \"kubernetes.io/projected/307e1bff-561c-4b41-8ac4-40a403f1936c-kube-api-access-q5b95\") pod \"openstackclient\" (UID: \"307e1bff-561c-4b41-8ac4-40a403f1936c\") " pod="openstack/openstackclient" Nov 27 09:01:40 crc kubenswrapper[4971]: I1127 09:01:40.062000 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 27 09:01:40 crc kubenswrapper[4971]: I1127 09:01:40.069615 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Nov 27 09:01:40 crc kubenswrapper[4971]: I1127 09:01:40.071286 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 27 09:01:40 crc kubenswrapper[4971]: I1127 09:01:40.083131 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-9mn5m" Nov 27 09:01:40 crc kubenswrapper[4971]: I1127 09:01:40.110687 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 27 09:01:40 crc kubenswrapper[4971]: I1127 09:01:40.148335 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8ghv4\" (UniqueName: \"kubernetes.io/projected/98285765-b7fa-435c-a83f-545e4f7bb7bb-kube-api-access-8ghv4\") pod \"kube-state-metrics-0\" (UID: \"98285765-b7fa-435c-a83f-545e4f7bb7bb\") " pod="openstack/kube-state-metrics-0" Nov 27 09:01:40 crc kubenswrapper[4971]: I1127 09:01:40.267440 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8ghv4\" (UniqueName: \"kubernetes.io/projected/98285765-b7fa-435c-a83f-545e4f7bb7bb-kube-api-access-8ghv4\") pod \"kube-state-metrics-0\" (UID: \"98285765-b7fa-435c-a83f-545e4f7bb7bb\") " pod="openstack/kube-state-metrics-0" Nov 27 09:01:40 crc kubenswrapper[4971]: I1127 09:01:40.329443 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8ghv4\" (UniqueName: \"kubernetes.io/projected/98285765-b7fa-435c-a83f-545e4f7bb7bb-kube-api-access-8ghv4\") pod \"kube-state-metrics-0\" (UID: \"98285765-b7fa-435c-a83f-545e4f7bb7bb\") " pod="openstack/kube-state-metrics-0" Nov 27 09:01:40 crc kubenswrapper[4971]: I1127 09:01:40.421818 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.052850 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/alertmanager-metric-storage-0"] Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.068058 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.071820 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.078388 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-cluster-tls-config" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.078681 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-alertmanager-dockercfg-vp45l" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.078852 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-web-config" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.078988 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-generated" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.079133 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-tls-assets-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.102347 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/5448dcaf-a243-4941-8c62-6bc48cc0f32e-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"5448dcaf-a243-4941-8c62-6bc48cc0f32e\") " pod="openstack/alertmanager-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.102424 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/5448dcaf-a243-4941-8c62-6bc48cc0f32e-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"5448dcaf-a243-4941-8c62-6bc48cc0f32e\") " pod="openstack/alertmanager-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.102470 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/5448dcaf-a243-4941-8c62-6bc48cc0f32e-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"5448dcaf-a243-4941-8c62-6bc48cc0f32e\") " pod="openstack/alertmanager-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.102501 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/5448dcaf-a243-4941-8c62-6bc48cc0f32e-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"5448dcaf-a243-4941-8c62-6bc48cc0f32e\") " pod="openstack/alertmanager-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.102612 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/5448dcaf-a243-4941-8c62-6bc48cc0f32e-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"5448dcaf-a243-4941-8c62-6bc48cc0f32e\") " pod="openstack/alertmanager-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.102863 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7v58l\" (UniqueName: \"kubernetes.io/projected/5448dcaf-a243-4941-8c62-6bc48cc0f32e-kube-api-access-7v58l\") pod \"alertmanager-metric-storage-0\" (UID: \"5448dcaf-a243-4941-8c62-6bc48cc0f32e\") " pod="openstack/alertmanager-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.103100 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/5448dcaf-a243-4941-8c62-6bc48cc0f32e-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"5448dcaf-a243-4941-8c62-6bc48cc0f32e\") " pod="openstack/alertmanager-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.206135 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/5448dcaf-a243-4941-8c62-6bc48cc0f32e-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"5448dcaf-a243-4941-8c62-6bc48cc0f32e\") " pod="openstack/alertmanager-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.206229 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/5448dcaf-a243-4941-8c62-6bc48cc0f32e-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"5448dcaf-a243-4941-8c62-6bc48cc0f32e\") " pod="openstack/alertmanager-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.206265 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/5448dcaf-a243-4941-8c62-6bc48cc0f32e-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"5448dcaf-a243-4941-8c62-6bc48cc0f32e\") " pod="openstack/alertmanager-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.206291 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/5448dcaf-a243-4941-8c62-6bc48cc0f32e-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"5448dcaf-a243-4941-8c62-6bc48cc0f32e\") " pod="openstack/alertmanager-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.206332 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/5448dcaf-a243-4941-8c62-6bc48cc0f32e-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"5448dcaf-a243-4941-8c62-6bc48cc0f32e\") " pod="openstack/alertmanager-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.206375 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7v58l\" (UniqueName: \"kubernetes.io/projected/5448dcaf-a243-4941-8c62-6bc48cc0f32e-kube-api-access-7v58l\") pod \"alertmanager-metric-storage-0\" (UID: \"5448dcaf-a243-4941-8c62-6bc48cc0f32e\") " pod="openstack/alertmanager-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.206439 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/5448dcaf-a243-4941-8c62-6bc48cc0f32e-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"5448dcaf-a243-4941-8c62-6bc48cc0f32e\") " pod="openstack/alertmanager-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.210315 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/5448dcaf-a243-4941-8c62-6bc48cc0f32e-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"5448dcaf-a243-4941-8c62-6bc48cc0f32e\") " pod="openstack/alertmanager-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.224088 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/5448dcaf-a243-4941-8c62-6bc48cc0f32e-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"5448dcaf-a243-4941-8c62-6bc48cc0f32e\") " pod="openstack/alertmanager-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.230555 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/5448dcaf-a243-4941-8c62-6bc48cc0f32e-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"5448dcaf-a243-4941-8c62-6bc48cc0f32e\") " pod="openstack/alertmanager-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.242609 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/5448dcaf-a243-4941-8c62-6bc48cc0f32e-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"5448dcaf-a243-4941-8c62-6bc48cc0f32e\") " pod="openstack/alertmanager-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.258366 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7v58l\" (UniqueName: \"kubernetes.io/projected/5448dcaf-a243-4941-8c62-6bc48cc0f32e-kube-api-access-7v58l\") pod \"alertmanager-metric-storage-0\" (UID: \"5448dcaf-a243-4941-8c62-6bc48cc0f32e\") " pod="openstack/alertmanager-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.261297 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/5448dcaf-a243-4941-8c62-6bc48cc0f32e-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"5448dcaf-a243-4941-8c62-6bc48cc0f32e\") " pod="openstack/alertmanager-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.267687 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/5448dcaf-a243-4941-8c62-6bc48cc0f32e-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"5448dcaf-a243-4941-8c62-6bc48cc0f32e\") " pod="openstack/alertmanager-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.269993 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.408938 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.416976 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.688780 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.696130 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.713845 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.720757 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.720847 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.720979 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.721198 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.721222 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-c2x7r" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.781938 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-d6336ce9-0c5c-4eca-af17-9c255b038f91\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d6336ce9-0c5c-4eca-af17-9c255b038f91\") pod \"prometheus-metric-storage-0\" (UID: \"15d3c3db-5df1-4825-8fc0-3e9a10dfb943\") " pod="openstack/prometheus-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.786751 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/15d3c3db-5df1-4825-8fc0-3e9a10dfb943-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"15d3c3db-5df1-4825-8fc0-3e9a10dfb943\") " pod="openstack/prometheus-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.787475 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/15d3c3db-5df1-4825-8fc0-3e9a10dfb943-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"15d3c3db-5df1-4825-8fc0-3e9a10dfb943\") " pod="openstack/prometheus-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.787519 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/15d3c3db-5df1-4825-8fc0-3e9a10dfb943-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"15d3c3db-5df1-4825-8fc0-3e9a10dfb943\") " pod="openstack/prometheus-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.787615 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nkhcw\" (UniqueName: \"kubernetes.io/projected/15d3c3db-5df1-4825-8fc0-3e9a10dfb943-kube-api-access-nkhcw\") pod \"prometheus-metric-storage-0\" (UID: \"15d3c3db-5df1-4825-8fc0-3e9a10dfb943\") " pod="openstack/prometheus-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.787758 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/15d3c3db-5df1-4825-8fc0-3e9a10dfb943-config\") pod \"prometheus-metric-storage-0\" (UID: \"15d3c3db-5df1-4825-8fc0-3e9a10dfb943\") " pod="openstack/prometheus-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.789794 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/15d3c3db-5df1-4825-8fc0-3e9a10dfb943-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"15d3c3db-5df1-4825-8fc0-3e9a10dfb943\") " pod="openstack/prometheus-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.789877 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/15d3c3db-5df1-4825-8fc0-3e9a10dfb943-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"15d3c3db-5df1-4825-8fc0-3e9a10dfb943\") " pod="openstack/prometheus-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.790041 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.919290 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/15d3c3db-5df1-4825-8fc0-3e9a10dfb943-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"15d3c3db-5df1-4825-8fc0-3e9a10dfb943\") " pod="openstack/prometheus-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.919379 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/15d3c3db-5df1-4825-8fc0-3e9a10dfb943-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"15d3c3db-5df1-4825-8fc0-3e9a10dfb943\") " pod="openstack/prometheus-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.919447 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-d6336ce9-0c5c-4eca-af17-9c255b038f91\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d6336ce9-0c5c-4eca-af17-9c255b038f91\") pod \"prometheus-metric-storage-0\" (UID: \"15d3c3db-5df1-4825-8fc0-3e9a10dfb943\") " pod="openstack/prometheus-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.919520 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/15d3c3db-5df1-4825-8fc0-3e9a10dfb943-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"15d3c3db-5df1-4825-8fc0-3e9a10dfb943\") " pod="openstack/prometheus-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.919642 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/15d3c3db-5df1-4825-8fc0-3e9a10dfb943-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"15d3c3db-5df1-4825-8fc0-3e9a10dfb943\") " pod="openstack/prometheus-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.919672 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/15d3c3db-5df1-4825-8fc0-3e9a10dfb943-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"15d3c3db-5df1-4825-8fc0-3e9a10dfb943\") " pod="openstack/prometheus-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.919714 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nkhcw\" (UniqueName: \"kubernetes.io/projected/15d3c3db-5df1-4825-8fc0-3e9a10dfb943-kube-api-access-nkhcw\") pod \"prometheus-metric-storage-0\" (UID: \"15d3c3db-5df1-4825-8fc0-3e9a10dfb943\") " pod="openstack/prometheus-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.919777 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/15d3c3db-5df1-4825-8fc0-3e9a10dfb943-config\") pod \"prometheus-metric-storage-0\" (UID: \"15d3c3db-5df1-4825-8fc0-3e9a10dfb943\") " pod="openstack/prometheus-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.920619 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/15d3c3db-5df1-4825-8fc0-3e9a10dfb943-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"15d3c3db-5df1-4825-8fc0-3e9a10dfb943\") " pod="openstack/prometheus-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.935654 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/15d3c3db-5df1-4825-8fc0-3e9a10dfb943-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"15d3c3db-5df1-4825-8fc0-3e9a10dfb943\") " pod="openstack/prometheus-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.936748 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/15d3c3db-5df1-4825-8fc0-3e9a10dfb943-config\") pod \"prometheus-metric-storage-0\" (UID: \"15d3c3db-5df1-4825-8fc0-3e9a10dfb943\") " pod="openstack/prometheus-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.938327 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/15d3c3db-5df1-4825-8fc0-3e9a10dfb943-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"15d3c3db-5df1-4825-8fc0-3e9a10dfb943\") " pod="openstack/prometheus-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.941192 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/15d3c3db-5df1-4825-8fc0-3e9a10dfb943-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"15d3c3db-5df1-4825-8fc0-3e9a10dfb943\") " pod="openstack/prometheus-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.941327 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/15d3c3db-5df1-4825-8fc0-3e9a10dfb943-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"15d3c3db-5df1-4825-8fc0-3e9a10dfb943\") " pod="openstack/prometheus-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.950149 4971 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.950237 4971 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-d6336ce9-0c5c-4eca-af17-9c255b038f91\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d6336ce9-0c5c-4eca-af17-9c255b038f91\") pod \"prometheus-metric-storage-0\" (UID: \"15d3c3db-5df1-4825-8fc0-3e9a10dfb943\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/459cfd87d4c454ad8db83fd8a67d71b7339eed47cc3a26c829cb8802711aca92/globalmount\"" pod="openstack/prometheus-metric-storage-0" Nov 27 09:01:41 crc kubenswrapper[4971]: I1127 09:01:41.962958 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nkhcw\" (UniqueName: \"kubernetes.io/projected/15d3c3db-5df1-4825-8fc0-3e9a10dfb943-kube-api-access-nkhcw\") pod \"prometheus-metric-storage-0\" (UID: \"15d3c3db-5df1-4825-8fc0-3e9a10dfb943\") " pod="openstack/prometheus-metric-storage-0" Nov 27 09:01:42 crc kubenswrapper[4971]: I1127 09:01:42.189572 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-d6336ce9-0c5c-4eca-af17-9c255b038f91\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d6336ce9-0c5c-4eca-af17-9c255b038f91\") pod \"prometheus-metric-storage-0\" (UID: \"15d3c3db-5df1-4825-8fc0-3e9a10dfb943\") " pod="openstack/prometheus-metric-storage-0" Nov 27 09:01:42 crc kubenswrapper[4971]: I1127 09:01:42.213842 4971 generic.go:334] "Generic (PLEG): container finished" podID="8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2" containerID="3765b97474f278be19917650cfbfdf8fd192f74de6b098bd04bcc4bea85fbacf" exitCode=137 Nov 27 09:01:42 crc kubenswrapper[4971]: I1127 09:01:42.219395 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"307e1bff-561c-4b41-8ac4-40a403f1936c","Type":"ContainerStarted","Data":"fdf9c0beff9569a04c1bda1569982baf23e8496b979b22a14ea5a688f4d835be"} Nov 27 09:01:42 crc kubenswrapper[4971]: I1127 09:01:42.219455 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"307e1bff-561c-4b41-8ac4-40a403f1936c","Type":"ContainerStarted","Data":"8b14e516f711a335c2f57b910b26cd287429ddb64c27c97b86a5561eb48008da"} Nov 27 09:01:42 crc kubenswrapper[4971]: I1127 09:01:42.227998 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"98285765-b7fa-435c-a83f-545e4f7bb7bb","Type":"ContainerStarted","Data":"649a2ceec0e3b727a4238443810979fe88d573a7a60bcbb1bed2aba2d3ea7200"} Nov 27 09:01:42 crc kubenswrapper[4971]: I1127 09:01:42.258113 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=3.258085017 podStartE2EDuration="3.258085017s" podCreationTimestamp="2025-11-27 09:01:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 09:01:42.24320582 +0000 UTC m=+7740.435249758" watchObservedRunningTime="2025-11-27 09:01:42.258085017 +0000 UTC m=+7740.450128945" Nov 27 09:01:42 crc kubenswrapper[4971]: I1127 09:01:42.355380 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 27 09:01:42 crc kubenswrapper[4971]: I1127 09:01:42.418333 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Nov 27 09:01:42 crc kubenswrapper[4971]: W1127 09:01:42.421350 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5448dcaf_a243_4941_8c62_6bc48cc0f32e.slice/crio-997c1d5bdb12340c26882f6a30696239d6388ad1f07b59b2283485f967c1a925 WatchSource:0}: Error finding container 997c1d5bdb12340c26882f6a30696239d6388ad1f07b59b2283485f967c1a925: Status 404 returned error can't find the container with id 997c1d5bdb12340c26882f6a30696239d6388ad1f07b59b2283485f967c1a925 Nov 27 09:01:42 crc kubenswrapper[4971]: I1127 09:01:42.440659 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jxgpb\" (UniqueName: \"kubernetes.io/projected/8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2-kube-api-access-jxgpb\") pod \"8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2\" (UID: \"8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2\") " Nov 27 09:01:42 crc kubenswrapper[4971]: I1127 09:01:42.440732 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2-openstack-config\") pod \"8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2\" (UID: \"8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2\") " Nov 27 09:01:42 crc kubenswrapper[4971]: I1127 09:01:42.440787 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2-openstack-config-secret\") pod \"8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2\" (UID: \"8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2\") " Nov 27 09:01:42 crc kubenswrapper[4971]: I1127 09:01:42.446909 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2-kube-api-access-jxgpb" (OuterVolumeSpecName: "kube-api-access-jxgpb") pod "8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2" (UID: "8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2"). InnerVolumeSpecName "kube-api-access-jxgpb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:01:42 crc kubenswrapper[4971]: I1127 09:01:42.456662 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Nov 27 09:01:42 crc kubenswrapper[4971]: I1127 09:01:42.496389 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2" (UID: "8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:01:42 crc kubenswrapper[4971]: I1127 09:01:42.548648 4971 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2-openstack-config\") on node \"crc\" DevicePath \"\"" Nov 27 09:01:42 crc kubenswrapper[4971]: I1127 09:01:42.548754 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jxgpb\" (UniqueName: \"kubernetes.io/projected/8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2-kube-api-access-jxgpb\") on node \"crc\" DevicePath \"\"" Nov 27 09:01:42 crc kubenswrapper[4971]: I1127 09:01:42.576029 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2" (UID: "8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:01:42 crc kubenswrapper[4971]: I1127 09:01:42.583200 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2" path="/var/lib/kubelet/pods/8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2/volumes" Nov 27 09:01:42 crc kubenswrapper[4971]: I1127 09:01:42.656580 4971 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8ed0b9fd-77b4-467b-bdcc-88f4e4b956b2-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Nov 27 09:01:43 crc kubenswrapper[4971]: I1127 09:01:43.090790 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Nov 27 09:01:43 crc kubenswrapper[4971]: I1127 09:01:43.243823 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"15d3c3db-5df1-4825-8fc0-3e9a10dfb943","Type":"ContainerStarted","Data":"18674ef733974d92d21f0326e917fe899c62d154b361483ad23aca820801e6bd"} Nov 27 09:01:43 crc kubenswrapper[4971]: I1127 09:01:43.247822 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 27 09:01:43 crc kubenswrapper[4971]: I1127 09:01:43.247825 4971 scope.go:117] "RemoveContainer" containerID="3765b97474f278be19917650cfbfdf8fd192f74de6b098bd04bcc4bea85fbacf" Nov 27 09:01:43 crc kubenswrapper[4971]: I1127 09:01:43.249461 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"5448dcaf-a243-4941-8c62-6bc48cc0f32e","Type":"ContainerStarted","Data":"997c1d5bdb12340c26882f6a30696239d6388ad1f07b59b2283485f967c1a925"} Nov 27 09:01:43 crc kubenswrapper[4971]: I1127 09:01:43.252242 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"98285765-b7fa-435c-a83f-545e4f7bb7bb","Type":"ContainerStarted","Data":"b8d2678c0198e6353ad5a96af8e8da3261800451a945a198fe88def21f54b66e"} Nov 27 09:01:43 crc kubenswrapper[4971]: I1127 09:01:43.252869 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Nov 27 09:01:43 crc kubenswrapper[4971]: I1127 09:01:43.290922 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.690693275 podStartE2EDuration="3.290898487s" podCreationTimestamp="2025-11-27 09:01:40 +0000 UTC" firstStartedPulling="2025-11-27 09:01:41.499747572 +0000 UTC m=+7739.691791490" lastFinishedPulling="2025-11-27 09:01:42.099952784 +0000 UTC m=+7740.291996702" observedRunningTime="2025-11-27 09:01:43.271766109 +0000 UTC m=+7741.463810027" watchObservedRunningTime="2025-11-27 09:01:43.290898487 +0000 UTC m=+7741.482942405" Nov 27 09:01:49 crc kubenswrapper[4971]: I1127 09:01:49.343439 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"5448dcaf-a243-4941-8c62-6bc48cc0f32e","Type":"ContainerStarted","Data":"4b9e15dbf3a6e2d669f2315060c77820a24304830bbdc29942f1507b3e6a78bd"} Nov 27 09:01:49 crc kubenswrapper[4971]: I1127 09:01:49.375152 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"15d3c3db-5df1-4825-8fc0-3e9a10dfb943","Type":"ContainerStarted","Data":"9b02925f77530618ef2802f2b05dace6fe808d604f2ac134bbfa877fcf160e72"} Nov 27 09:01:50 crc kubenswrapper[4971]: I1127 09:01:50.426684 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Nov 27 09:01:51 crc kubenswrapper[4971]: E1127 09:01:51.989024 4971 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8ed0b9fd_77b4_467b_bdcc_88f4e4b956b2.slice\": RecentStats: unable to find data in memory cache]" Nov 27 09:01:55 crc kubenswrapper[4971]: I1127 09:01:55.145254 4971 scope.go:117] "RemoveContainer" containerID="bcc668e4727c4c3eb7c94152d8196bededd688fb51f88a1551267cb0de848fe9" Nov 27 09:01:55 crc kubenswrapper[4971]: I1127 09:01:55.271165 4971 scope.go:117] "RemoveContainer" containerID="ab4142ac42de530cd48d2557569e52f400fe9335280bb740f6a04efe948fd929" Nov 27 09:01:55 crc kubenswrapper[4971]: I1127 09:01:55.331459 4971 scope.go:117] "RemoveContainer" containerID="8aa735447b4a30617a9838859f04b8951ee19a5830412d9ee16084071d62a68c" Nov 27 09:01:55 crc kubenswrapper[4971]: I1127 09:01:55.355332 4971 scope.go:117] "RemoveContainer" containerID="abf97cb26d9d10cfa3a3f85e66a7fda39253ee4584313278aa620c6a419d6a2a" Nov 27 09:01:55 crc kubenswrapper[4971]: I1127 09:01:55.407974 4971 scope.go:117] "RemoveContainer" containerID="9169a0b52d9e84b87d170788f364b54a63a5512693ad23c3f1d33a5fae879542" Nov 27 09:01:55 crc kubenswrapper[4971]: I1127 09:01:55.451361 4971 generic.go:334] "Generic (PLEG): container finished" podID="15d3c3db-5df1-4825-8fc0-3e9a10dfb943" containerID="9b02925f77530618ef2802f2b05dace6fe808d604f2ac134bbfa877fcf160e72" exitCode=0 Nov 27 09:01:55 crc kubenswrapper[4971]: I1127 09:01:55.451437 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"15d3c3db-5df1-4825-8fc0-3e9a10dfb943","Type":"ContainerDied","Data":"9b02925f77530618ef2802f2b05dace6fe808d604f2ac134bbfa877fcf160e72"} Nov 27 09:01:56 crc kubenswrapper[4971]: I1127 09:01:56.413777 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 09:01:56 crc kubenswrapper[4971]: I1127 09:01:56.413920 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 09:01:56 crc kubenswrapper[4971]: I1127 09:01:56.414001 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 09:01:56 crc kubenswrapper[4971]: I1127 09:01:56.415318 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"994a193d8aefca1605e71d1a50a8591da0b0ae972faf08604e8f68555feb1168"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 09:01:56 crc kubenswrapper[4971]: I1127 09:01:56.415447 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://994a193d8aefca1605e71d1a50a8591da0b0ae972faf08604e8f68555feb1168" gracePeriod=600 Nov 27 09:01:56 crc kubenswrapper[4971]: E1127 09:01:56.540416 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:01:57 crc kubenswrapper[4971]: I1127 09:01:57.480629 4971 generic.go:334] "Generic (PLEG): container finished" podID="5448dcaf-a243-4941-8c62-6bc48cc0f32e" containerID="4b9e15dbf3a6e2d669f2315060c77820a24304830bbdc29942f1507b3e6a78bd" exitCode=0 Nov 27 09:01:57 crc kubenswrapper[4971]: I1127 09:01:57.480746 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"5448dcaf-a243-4941-8c62-6bc48cc0f32e","Type":"ContainerDied","Data":"4b9e15dbf3a6e2d669f2315060c77820a24304830bbdc29942f1507b3e6a78bd"} Nov 27 09:01:57 crc kubenswrapper[4971]: I1127 09:01:57.490562 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="994a193d8aefca1605e71d1a50a8591da0b0ae972faf08604e8f68555feb1168" exitCode=0 Nov 27 09:01:57 crc kubenswrapper[4971]: I1127 09:01:57.490611 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"994a193d8aefca1605e71d1a50a8591da0b0ae972faf08604e8f68555feb1168"} Nov 27 09:01:57 crc kubenswrapper[4971]: I1127 09:01:57.490651 4971 scope.go:117] "RemoveContainer" containerID="4de37cdeabf174e17fecf55eb37e927f401aeb831db4b1fc4ee5997316920a31" Nov 27 09:01:57 crc kubenswrapper[4971]: I1127 09:01:57.491245 4971 scope.go:117] "RemoveContainer" containerID="994a193d8aefca1605e71d1a50a8591da0b0ae972faf08604e8f68555feb1168" Nov 27 09:01:57 crc kubenswrapper[4971]: E1127 09:01:57.491572 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:02:02 crc kubenswrapper[4971]: E1127 09:02:02.267424 4971 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8ed0b9fd_77b4_467b_bdcc_88f4e4b956b2.slice\": RecentStats: unable to find data in memory cache]" Nov 27 09:02:03 crc kubenswrapper[4971]: I1127 09:02:03.560756 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"5448dcaf-a243-4941-8c62-6bc48cc0f32e","Type":"ContainerStarted","Data":"7c5735d4ce514bcee2494ea3a5eb6206043e6432aab8f0b08007602d615f3b56"} Nov 27 09:02:03 crc kubenswrapper[4971]: I1127 09:02:03.562703 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"15d3c3db-5df1-4825-8fc0-3e9a10dfb943","Type":"ContainerStarted","Data":"aa4671761a140e38f72f691f5270b8827e0c9ce441b46c3171563ca7b2c3678a"} Nov 27 09:02:08 crc kubenswrapper[4971]: I1127 09:02:08.639239 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"15d3c3db-5df1-4825-8fc0-3e9a10dfb943","Type":"ContainerStarted","Data":"6ba5d51c25a68f378cb36c8838992d829a3b2fa16013f76d62e6f936fa0a9d5a"} Nov 27 09:02:08 crc kubenswrapper[4971]: I1127 09:02:08.643273 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"5448dcaf-a243-4941-8c62-6bc48cc0f32e","Type":"ContainerStarted","Data":"2c66f29a71132a65498826b582b9c83b412be98505cd2fec309541e35e1fed27"} Nov 27 09:02:08 crc kubenswrapper[4971]: I1127 09:02:08.693452 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/alertmanager-metric-storage-0" podStartSLOduration=6.98343018 podStartE2EDuration="27.693430217s" podCreationTimestamp="2025-11-27 09:01:41 +0000 UTC" firstStartedPulling="2025-11-27 09:01:42.426260857 +0000 UTC m=+7740.618304775" lastFinishedPulling="2025-11-27 09:02:03.136260844 +0000 UTC m=+7761.328304812" observedRunningTime="2025-11-27 09:02:08.686368325 +0000 UTC m=+7766.878412243" watchObservedRunningTime="2025-11-27 09:02:08.693430217 +0000 UTC m=+7766.885474135" Nov 27 09:02:09 crc kubenswrapper[4971]: I1127 09:02:09.662676 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/alertmanager-metric-storage-0" Nov 27 09:02:09 crc kubenswrapper[4971]: I1127 09:02:09.666059 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/alertmanager-metric-storage-0" Nov 27 09:02:10 crc kubenswrapper[4971]: I1127 09:02:10.551444 4971 scope.go:117] "RemoveContainer" containerID="994a193d8aefca1605e71d1a50a8591da0b0ae972faf08604e8f68555feb1168" Nov 27 09:02:10 crc kubenswrapper[4971]: E1127 09:02:10.552111 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:02:12 crc kubenswrapper[4971]: E1127 09:02:12.553396 4971 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8ed0b9fd_77b4_467b_bdcc_88f4e4b956b2.slice\": RecentStats: unable to find data in memory cache]" Nov 27 09:02:12 crc kubenswrapper[4971]: I1127 09:02:12.700343 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"15d3c3db-5df1-4825-8fc0-3e9a10dfb943","Type":"ContainerStarted","Data":"efc7a8d3d3c4662273f366779c7e69a67b135bf143a35cc04aac0c118228cf1d"} Nov 27 09:02:12 crc kubenswrapper[4971]: I1127 09:02:12.730826 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=4.020737325 podStartE2EDuration="32.730793823s" podCreationTimestamp="2025-11-27 09:01:40 +0000 UTC" firstStartedPulling="2025-11-27 09:01:43.117984421 +0000 UTC m=+7741.310028339" lastFinishedPulling="2025-11-27 09:02:11.828040919 +0000 UTC m=+7770.020084837" observedRunningTime="2025-11-27 09:02:12.726951003 +0000 UTC m=+7770.918994971" watchObservedRunningTime="2025-11-27 09:02:12.730793823 +0000 UTC m=+7770.922837751" Nov 27 09:02:17 crc kubenswrapper[4971]: I1127 09:02:17.459058 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Nov 27 09:02:19 crc kubenswrapper[4971]: I1127 09:02:19.048013 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-96e4-account-create-update-h8wnp"] Nov 27 09:02:19 crc kubenswrapper[4971]: I1127 09:02:19.057269 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-g4f6p"] Nov 27 09:02:19 crc kubenswrapper[4971]: I1127 09:02:19.065747 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-g4f6p"] Nov 27 09:02:19 crc kubenswrapper[4971]: I1127 09:02:19.075973 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-96e4-account-create-update-h8wnp"] Nov 27 09:02:19 crc kubenswrapper[4971]: I1127 09:02:19.159433 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 27 09:02:19 crc kubenswrapper[4971]: I1127 09:02:19.162437 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 09:02:19 crc kubenswrapper[4971]: I1127 09:02:19.165132 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 27 09:02:19 crc kubenswrapper[4971]: I1127 09:02:19.165138 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 27 09:02:19 crc kubenswrapper[4971]: I1127 09:02:19.179438 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 27 09:02:19 crc kubenswrapper[4971]: I1127 09:02:19.199444 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fnp7l\" (UniqueName: \"kubernetes.io/projected/ecfe453b-4675-4375-b265-5a892560c4bb-kube-api-access-fnp7l\") pod \"ceilometer-0\" (UID: \"ecfe453b-4675-4375-b265-5a892560c4bb\") " pod="openstack/ceilometer-0" Nov 27 09:02:19 crc kubenswrapper[4971]: I1127 09:02:19.199790 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ecfe453b-4675-4375-b265-5a892560c4bb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ecfe453b-4675-4375-b265-5a892560c4bb\") " pod="openstack/ceilometer-0" Nov 27 09:02:19 crc kubenswrapper[4971]: I1127 09:02:19.199837 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecfe453b-4675-4375-b265-5a892560c4bb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ecfe453b-4675-4375-b265-5a892560c4bb\") " pod="openstack/ceilometer-0" Nov 27 09:02:19 crc kubenswrapper[4971]: I1127 09:02:19.199860 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecfe453b-4675-4375-b265-5a892560c4bb-config-data\") pod \"ceilometer-0\" (UID: \"ecfe453b-4675-4375-b265-5a892560c4bb\") " pod="openstack/ceilometer-0" Nov 27 09:02:19 crc kubenswrapper[4971]: I1127 09:02:19.199955 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecfe453b-4675-4375-b265-5a892560c4bb-scripts\") pod \"ceilometer-0\" (UID: \"ecfe453b-4675-4375-b265-5a892560c4bb\") " pod="openstack/ceilometer-0" Nov 27 09:02:19 crc kubenswrapper[4971]: I1127 09:02:19.199988 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ecfe453b-4675-4375-b265-5a892560c4bb-run-httpd\") pod \"ceilometer-0\" (UID: \"ecfe453b-4675-4375-b265-5a892560c4bb\") " pod="openstack/ceilometer-0" Nov 27 09:02:19 crc kubenswrapper[4971]: I1127 09:02:19.200172 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ecfe453b-4675-4375-b265-5a892560c4bb-log-httpd\") pod \"ceilometer-0\" (UID: \"ecfe453b-4675-4375-b265-5a892560c4bb\") " pod="openstack/ceilometer-0" Nov 27 09:02:19 crc kubenswrapper[4971]: I1127 09:02:19.301965 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ecfe453b-4675-4375-b265-5a892560c4bb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ecfe453b-4675-4375-b265-5a892560c4bb\") " pod="openstack/ceilometer-0" Nov 27 09:02:19 crc kubenswrapper[4971]: I1127 09:02:19.302047 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecfe453b-4675-4375-b265-5a892560c4bb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ecfe453b-4675-4375-b265-5a892560c4bb\") " pod="openstack/ceilometer-0" Nov 27 09:02:19 crc kubenswrapper[4971]: I1127 09:02:19.302091 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecfe453b-4675-4375-b265-5a892560c4bb-config-data\") pod \"ceilometer-0\" (UID: \"ecfe453b-4675-4375-b265-5a892560c4bb\") " pod="openstack/ceilometer-0" Nov 27 09:02:19 crc kubenswrapper[4971]: I1127 09:02:19.302152 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecfe453b-4675-4375-b265-5a892560c4bb-scripts\") pod \"ceilometer-0\" (UID: \"ecfe453b-4675-4375-b265-5a892560c4bb\") " pod="openstack/ceilometer-0" Nov 27 09:02:19 crc kubenswrapper[4971]: I1127 09:02:19.302177 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ecfe453b-4675-4375-b265-5a892560c4bb-run-httpd\") pod \"ceilometer-0\" (UID: \"ecfe453b-4675-4375-b265-5a892560c4bb\") " pod="openstack/ceilometer-0" Nov 27 09:02:19 crc kubenswrapper[4971]: I1127 09:02:19.302265 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ecfe453b-4675-4375-b265-5a892560c4bb-log-httpd\") pod \"ceilometer-0\" (UID: \"ecfe453b-4675-4375-b265-5a892560c4bb\") " pod="openstack/ceilometer-0" Nov 27 09:02:19 crc kubenswrapper[4971]: I1127 09:02:19.302353 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fnp7l\" (UniqueName: \"kubernetes.io/projected/ecfe453b-4675-4375-b265-5a892560c4bb-kube-api-access-fnp7l\") pod \"ceilometer-0\" (UID: \"ecfe453b-4675-4375-b265-5a892560c4bb\") " pod="openstack/ceilometer-0" Nov 27 09:02:19 crc kubenswrapper[4971]: I1127 09:02:19.302950 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ecfe453b-4675-4375-b265-5a892560c4bb-log-httpd\") pod \"ceilometer-0\" (UID: \"ecfe453b-4675-4375-b265-5a892560c4bb\") " pod="openstack/ceilometer-0" Nov 27 09:02:19 crc kubenswrapper[4971]: I1127 09:02:19.302965 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ecfe453b-4675-4375-b265-5a892560c4bb-run-httpd\") pod \"ceilometer-0\" (UID: \"ecfe453b-4675-4375-b265-5a892560c4bb\") " pod="openstack/ceilometer-0" Nov 27 09:02:19 crc kubenswrapper[4971]: I1127 09:02:19.310284 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecfe453b-4675-4375-b265-5a892560c4bb-scripts\") pod \"ceilometer-0\" (UID: \"ecfe453b-4675-4375-b265-5a892560c4bb\") " pod="openstack/ceilometer-0" Nov 27 09:02:19 crc kubenswrapper[4971]: I1127 09:02:19.310575 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ecfe453b-4675-4375-b265-5a892560c4bb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ecfe453b-4675-4375-b265-5a892560c4bb\") " pod="openstack/ceilometer-0" Nov 27 09:02:19 crc kubenswrapper[4971]: I1127 09:02:19.310860 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecfe453b-4675-4375-b265-5a892560c4bb-config-data\") pod \"ceilometer-0\" (UID: \"ecfe453b-4675-4375-b265-5a892560c4bb\") " pod="openstack/ceilometer-0" Nov 27 09:02:19 crc kubenswrapper[4971]: I1127 09:02:19.311311 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecfe453b-4675-4375-b265-5a892560c4bb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ecfe453b-4675-4375-b265-5a892560c4bb\") " pod="openstack/ceilometer-0" Nov 27 09:02:19 crc kubenswrapper[4971]: I1127 09:02:19.320996 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fnp7l\" (UniqueName: \"kubernetes.io/projected/ecfe453b-4675-4375-b265-5a892560c4bb-kube-api-access-fnp7l\") pod \"ceilometer-0\" (UID: \"ecfe453b-4675-4375-b265-5a892560c4bb\") " pod="openstack/ceilometer-0" Nov 27 09:02:19 crc kubenswrapper[4971]: I1127 09:02:19.498710 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 09:02:20 crc kubenswrapper[4971]: I1127 09:02:20.052176 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 27 09:02:20 crc kubenswrapper[4971]: I1127 09:02:20.564754 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46a41356-c31e-4965-82a0-e836612bb0be" path="/var/lib/kubelet/pods/46a41356-c31e-4965-82a0-e836612bb0be/volumes" Nov 27 09:02:20 crc kubenswrapper[4971]: I1127 09:02:20.566021 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8fb11aa1-8a1f-4854-80b1-65240a37dacd" path="/var/lib/kubelet/pods/8fb11aa1-8a1f-4854-80b1-65240a37dacd/volumes" Nov 27 09:02:20 crc kubenswrapper[4971]: I1127 09:02:20.806417 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ecfe453b-4675-4375-b265-5a892560c4bb","Type":"ContainerStarted","Data":"9078abc006bf8f53043aea2b909b30301afeaf6e580faee8073a704d479cfb74"} Nov 27 09:02:22 crc kubenswrapper[4971]: E1127 09:02:22.810429 4971 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8ed0b9fd_77b4_467b_bdcc_88f4e4b956b2.slice\": RecentStats: unable to find data in memory cache]" Nov 27 09:02:24 crc kubenswrapper[4971]: I1127 09:02:24.272285 4971 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 27 09:02:24 crc kubenswrapper[4971]: I1127 09:02:24.849920 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ecfe453b-4675-4375-b265-5a892560c4bb","Type":"ContainerStarted","Data":"4065a9ac93d96eeb972bc92cf37d5e337d28e952f7ac822cf5d232d62605fbde"} Nov 27 09:02:25 crc kubenswrapper[4971]: I1127 09:02:25.551193 4971 scope.go:117] "RemoveContainer" containerID="994a193d8aefca1605e71d1a50a8591da0b0ae972faf08604e8f68555feb1168" Nov 27 09:02:25 crc kubenswrapper[4971]: E1127 09:02:25.551727 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:02:25 crc kubenswrapper[4971]: I1127 09:02:25.860772 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ecfe453b-4675-4375-b265-5a892560c4bb","Type":"ContainerStarted","Data":"63272d2818dcccba3530122d6f603b6771cb13cbc7f37e32162bfe5a28a3dcc5"} Nov 27 09:02:26 crc kubenswrapper[4971]: I1127 09:02:26.871412 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ecfe453b-4675-4375-b265-5a892560c4bb","Type":"ContainerStarted","Data":"fdb0cc0273b2b0254c9fbc363a767f4fca5e9dbbfc2550d5a83796409b5c24c4"} Nov 27 09:02:27 crc kubenswrapper[4971]: I1127 09:02:27.459843 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Nov 27 09:02:27 crc kubenswrapper[4971]: I1127 09:02:27.465557 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Nov 27 09:02:27 crc kubenswrapper[4971]: I1127 09:02:27.881923 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Nov 27 09:02:28 crc kubenswrapper[4971]: I1127 09:02:28.894424 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ecfe453b-4675-4375-b265-5a892560c4bb","Type":"ContainerStarted","Data":"9b0932496ad0b5c4f42ab6838e895192cc87b874f748ae91e9e107da2e2d7df9"} Nov 27 09:02:28 crc kubenswrapper[4971]: I1127 09:02:28.894513 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 27 09:02:28 crc kubenswrapper[4971]: I1127 09:02:28.924836 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.89860758 podStartE2EDuration="9.924810609s" podCreationTimestamp="2025-11-27 09:02:19 +0000 UTC" firstStartedPulling="2025-11-27 09:02:20.063649909 +0000 UTC m=+7778.255693827" lastFinishedPulling="2025-11-27 09:02:28.089852938 +0000 UTC m=+7786.281896856" observedRunningTime="2025-11-27 09:02:28.913278578 +0000 UTC m=+7787.105322516" watchObservedRunningTime="2025-11-27 09:02:28.924810609 +0000 UTC m=+7787.116854527" Nov 27 09:02:33 crc kubenswrapper[4971]: E1127 09:02:33.107890 4971 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8ed0b9fd_77b4_467b_bdcc_88f4e4b956b2.slice\": RecentStats: unable to find data in memory cache]" Nov 27 09:02:36 crc kubenswrapper[4971]: I1127 09:02:36.572962 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-create-87nq8"] Nov 27 09:02:36 crc kubenswrapper[4971]: I1127 09:02:36.575768 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-87nq8" Nov 27 09:02:36 crc kubenswrapper[4971]: I1127 09:02:36.580047 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-87nq8"] Nov 27 09:02:36 crc kubenswrapper[4971]: I1127 09:02:36.662008 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-6b4a-account-create-update-bdh9t"] Nov 27 09:02:36 crc kubenswrapper[4971]: I1127 09:02:36.663671 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-6b4a-account-create-update-bdh9t" Nov 27 09:02:36 crc kubenswrapper[4971]: I1127 09:02:36.670645 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-db-secret" Nov 27 09:02:36 crc kubenswrapper[4971]: I1127 09:02:36.682608 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-6b4a-account-create-update-bdh9t"] Nov 27 09:02:36 crc kubenswrapper[4971]: I1127 09:02:36.732285 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b599f\" (UniqueName: \"kubernetes.io/projected/2d7182a5-9bfd-4767-916e-a83a2a32da61-kube-api-access-b599f\") pod \"aodh-db-create-87nq8\" (UID: \"2d7182a5-9bfd-4767-916e-a83a2a32da61\") " pod="openstack/aodh-db-create-87nq8" Nov 27 09:02:36 crc kubenswrapper[4971]: I1127 09:02:36.732399 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8sdg\" (UniqueName: \"kubernetes.io/projected/77cb726e-bcde-40f2-8aee-100180685d40-kube-api-access-z8sdg\") pod \"aodh-6b4a-account-create-update-bdh9t\" (UID: \"77cb726e-bcde-40f2-8aee-100180685d40\") " pod="openstack/aodh-6b4a-account-create-update-bdh9t" Nov 27 09:02:36 crc kubenswrapper[4971]: I1127 09:02:36.732574 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d7182a5-9bfd-4767-916e-a83a2a32da61-operator-scripts\") pod \"aodh-db-create-87nq8\" (UID: \"2d7182a5-9bfd-4767-916e-a83a2a32da61\") " pod="openstack/aodh-db-create-87nq8" Nov 27 09:02:36 crc kubenswrapper[4971]: I1127 09:02:36.732717 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/77cb726e-bcde-40f2-8aee-100180685d40-operator-scripts\") pod \"aodh-6b4a-account-create-update-bdh9t\" (UID: \"77cb726e-bcde-40f2-8aee-100180685d40\") " pod="openstack/aodh-6b4a-account-create-update-bdh9t" Nov 27 09:02:36 crc kubenswrapper[4971]: I1127 09:02:36.834145 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d7182a5-9bfd-4767-916e-a83a2a32da61-operator-scripts\") pod \"aodh-db-create-87nq8\" (UID: \"2d7182a5-9bfd-4767-916e-a83a2a32da61\") " pod="openstack/aodh-db-create-87nq8" Nov 27 09:02:36 crc kubenswrapper[4971]: I1127 09:02:36.834293 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/77cb726e-bcde-40f2-8aee-100180685d40-operator-scripts\") pod \"aodh-6b4a-account-create-update-bdh9t\" (UID: \"77cb726e-bcde-40f2-8aee-100180685d40\") " pod="openstack/aodh-6b4a-account-create-update-bdh9t" Nov 27 09:02:36 crc kubenswrapper[4971]: I1127 09:02:36.834353 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b599f\" (UniqueName: \"kubernetes.io/projected/2d7182a5-9bfd-4767-916e-a83a2a32da61-kube-api-access-b599f\") pod \"aodh-db-create-87nq8\" (UID: \"2d7182a5-9bfd-4767-916e-a83a2a32da61\") " pod="openstack/aodh-db-create-87nq8" Nov 27 09:02:36 crc kubenswrapper[4971]: I1127 09:02:36.834395 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8sdg\" (UniqueName: \"kubernetes.io/projected/77cb726e-bcde-40f2-8aee-100180685d40-kube-api-access-z8sdg\") pod \"aodh-6b4a-account-create-update-bdh9t\" (UID: \"77cb726e-bcde-40f2-8aee-100180685d40\") " pod="openstack/aodh-6b4a-account-create-update-bdh9t" Nov 27 09:02:36 crc kubenswrapper[4971]: I1127 09:02:36.835384 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d7182a5-9bfd-4767-916e-a83a2a32da61-operator-scripts\") pod \"aodh-db-create-87nq8\" (UID: \"2d7182a5-9bfd-4767-916e-a83a2a32da61\") " pod="openstack/aodh-db-create-87nq8" Nov 27 09:02:36 crc kubenswrapper[4971]: I1127 09:02:36.836881 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/77cb726e-bcde-40f2-8aee-100180685d40-operator-scripts\") pod \"aodh-6b4a-account-create-update-bdh9t\" (UID: \"77cb726e-bcde-40f2-8aee-100180685d40\") " pod="openstack/aodh-6b4a-account-create-update-bdh9t" Nov 27 09:02:36 crc kubenswrapper[4971]: I1127 09:02:36.856033 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8sdg\" (UniqueName: \"kubernetes.io/projected/77cb726e-bcde-40f2-8aee-100180685d40-kube-api-access-z8sdg\") pod \"aodh-6b4a-account-create-update-bdh9t\" (UID: \"77cb726e-bcde-40f2-8aee-100180685d40\") " pod="openstack/aodh-6b4a-account-create-update-bdh9t" Nov 27 09:02:36 crc kubenswrapper[4971]: I1127 09:02:36.869300 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b599f\" (UniqueName: \"kubernetes.io/projected/2d7182a5-9bfd-4767-916e-a83a2a32da61-kube-api-access-b599f\") pod \"aodh-db-create-87nq8\" (UID: \"2d7182a5-9bfd-4767-916e-a83a2a32da61\") " pod="openstack/aodh-db-create-87nq8" Nov 27 09:02:36 crc kubenswrapper[4971]: I1127 09:02:36.897246 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-87nq8" Nov 27 09:02:36 crc kubenswrapper[4971]: I1127 09:02:36.986872 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-6b4a-account-create-update-bdh9t" Nov 27 09:02:37 crc kubenswrapper[4971]: I1127 09:02:37.413404 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-6b4a-account-create-update-bdh9t"] Nov 27 09:02:37 crc kubenswrapper[4971]: I1127 09:02:37.434374 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-87nq8"] Nov 27 09:02:37 crc kubenswrapper[4971]: I1127 09:02:37.998701 4971 generic.go:334] "Generic (PLEG): container finished" podID="2d7182a5-9bfd-4767-916e-a83a2a32da61" containerID="5374cad01698933203dda93f1df8ae23bea849c645b6c57bcea55da85e84d865" exitCode=0 Nov 27 09:02:38 crc kubenswrapper[4971]: I1127 09:02:37.998809 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-87nq8" event={"ID":"2d7182a5-9bfd-4767-916e-a83a2a32da61","Type":"ContainerDied","Data":"5374cad01698933203dda93f1df8ae23bea849c645b6c57bcea55da85e84d865"} Nov 27 09:02:38 crc kubenswrapper[4971]: I1127 09:02:37.998861 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-87nq8" event={"ID":"2d7182a5-9bfd-4767-916e-a83a2a32da61","Type":"ContainerStarted","Data":"1b326693f6e6947902e372a3b400298c0b68fbd4d66347f9e9d2409ad7d2cfb4"} Nov 27 09:02:38 crc kubenswrapper[4971]: I1127 09:02:38.001526 4971 generic.go:334] "Generic (PLEG): container finished" podID="77cb726e-bcde-40f2-8aee-100180685d40" containerID="61a4fe1884f6b1d01f71b712b67ad345b23c4270373d14be5e9e9937103dbb01" exitCode=0 Nov 27 09:02:38 crc kubenswrapper[4971]: I1127 09:02:38.001587 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-6b4a-account-create-update-bdh9t" event={"ID":"77cb726e-bcde-40f2-8aee-100180685d40","Type":"ContainerDied","Data":"61a4fe1884f6b1d01f71b712b67ad345b23c4270373d14be5e9e9937103dbb01"} Nov 27 09:02:38 crc kubenswrapper[4971]: I1127 09:02:38.001611 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-6b4a-account-create-update-bdh9t" event={"ID":"77cb726e-bcde-40f2-8aee-100180685d40","Type":"ContainerStarted","Data":"18a96f091bb99fe673bce0cac956e7b680a1dfffc1fdc00313a15970900b2c83"} Nov 27 09:02:39 crc kubenswrapper[4971]: I1127 09:02:39.636175 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-87nq8" Nov 27 09:02:39 crc kubenswrapper[4971]: I1127 09:02:39.724257 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d7182a5-9bfd-4767-916e-a83a2a32da61-operator-scripts\") pod \"2d7182a5-9bfd-4767-916e-a83a2a32da61\" (UID: \"2d7182a5-9bfd-4767-916e-a83a2a32da61\") " Nov 27 09:02:39 crc kubenswrapper[4971]: I1127 09:02:39.724356 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b599f\" (UniqueName: \"kubernetes.io/projected/2d7182a5-9bfd-4767-916e-a83a2a32da61-kube-api-access-b599f\") pod \"2d7182a5-9bfd-4767-916e-a83a2a32da61\" (UID: \"2d7182a5-9bfd-4767-916e-a83a2a32da61\") " Nov 27 09:02:39 crc kubenswrapper[4971]: I1127 09:02:39.724893 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d7182a5-9bfd-4767-916e-a83a2a32da61-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2d7182a5-9bfd-4767-916e-a83a2a32da61" (UID: "2d7182a5-9bfd-4767-916e-a83a2a32da61"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:02:39 crc kubenswrapper[4971]: I1127 09:02:39.725212 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d7182a5-9bfd-4767-916e-a83a2a32da61-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 09:02:39 crc kubenswrapper[4971]: I1127 09:02:39.737878 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d7182a5-9bfd-4767-916e-a83a2a32da61-kube-api-access-b599f" (OuterVolumeSpecName: "kube-api-access-b599f") pod "2d7182a5-9bfd-4767-916e-a83a2a32da61" (UID: "2d7182a5-9bfd-4767-916e-a83a2a32da61"). InnerVolumeSpecName "kube-api-access-b599f". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:02:39 crc kubenswrapper[4971]: I1127 09:02:39.810122 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-6b4a-account-create-update-bdh9t" Nov 27 09:02:39 crc kubenswrapper[4971]: I1127 09:02:39.826878 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b599f\" (UniqueName: \"kubernetes.io/projected/2d7182a5-9bfd-4767-916e-a83a2a32da61-kube-api-access-b599f\") on node \"crc\" DevicePath \"\"" Nov 27 09:02:39 crc kubenswrapper[4971]: I1127 09:02:39.928259 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/77cb726e-bcde-40f2-8aee-100180685d40-operator-scripts\") pod \"77cb726e-bcde-40f2-8aee-100180685d40\" (UID: \"77cb726e-bcde-40f2-8aee-100180685d40\") " Nov 27 09:02:39 crc kubenswrapper[4971]: I1127 09:02:39.928750 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z8sdg\" (UniqueName: \"kubernetes.io/projected/77cb726e-bcde-40f2-8aee-100180685d40-kube-api-access-z8sdg\") pod \"77cb726e-bcde-40f2-8aee-100180685d40\" (UID: \"77cb726e-bcde-40f2-8aee-100180685d40\") " Nov 27 09:02:39 crc kubenswrapper[4971]: I1127 09:02:39.929052 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/77cb726e-bcde-40f2-8aee-100180685d40-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "77cb726e-bcde-40f2-8aee-100180685d40" (UID: "77cb726e-bcde-40f2-8aee-100180685d40"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:02:39 crc kubenswrapper[4971]: I1127 09:02:39.929494 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/77cb726e-bcde-40f2-8aee-100180685d40-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 09:02:39 crc kubenswrapper[4971]: I1127 09:02:39.932753 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77cb726e-bcde-40f2-8aee-100180685d40-kube-api-access-z8sdg" (OuterVolumeSpecName: "kube-api-access-z8sdg") pod "77cb726e-bcde-40f2-8aee-100180685d40" (UID: "77cb726e-bcde-40f2-8aee-100180685d40"). InnerVolumeSpecName "kube-api-access-z8sdg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:02:40 crc kubenswrapper[4971]: I1127 09:02:40.031226 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z8sdg\" (UniqueName: \"kubernetes.io/projected/77cb726e-bcde-40f2-8aee-100180685d40-kube-api-access-z8sdg\") on node \"crc\" DevicePath \"\"" Nov 27 09:02:40 crc kubenswrapper[4971]: I1127 09:02:40.031764 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-6b4a-account-create-update-bdh9t" Nov 27 09:02:40 crc kubenswrapper[4971]: I1127 09:02:40.032637 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-6b4a-account-create-update-bdh9t" event={"ID":"77cb726e-bcde-40f2-8aee-100180685d40","Type":"ContainerDied","Data":"18a96f091bb99fe673bce0cac956e7b680a1dfffc1fdc00313a15970900b2c83"} Nov 27 09:02:40 crc kubenswrapper[4971]: I1127 09:02:40.032685 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="18a96f091bb99fe673bce0cac956e7b680a1dfffc1fdc00313a15970900b2c83" Nov 27 09:02:40 crc kubenswrapper[4971]: I1127 09:02:40.034887 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-87nq8" event={"ID":"2d7182a5-9bfd-4767-916e-a83a2a32da61","Type":"ContainerDied","Data":"1b326693f6e6947902e372a3b400298c0b68fbd4d66347f9e9d2409ad7d2cfb4"} Nov 27 09:02:40 crc kubenswrapper[4971]: I1127 09:02:40.034974 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1b326693f6e6947902e372a3b400298c0b68fbd4d66347f9e9d2409ad7d2cfb4" Nov 27 09:02:40 crc kubenswrapper[4971]: I1127 09:02:40.034920 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-87nq8" Nov 27 09:02:40 crc kubenswrapper[4971]: I1127 09:02:40.551829 4971 scope.go:117] "RemoveContainer" containerID="994a193d8aefca1605e71d1a50a8591da0b0ae972faf08604e8f68555feb1168" Nov 27 09:02:40 crc kubenswrapper[4971]: E1127 09:02:40.552178 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:02:42 crc kubenswrapper[4971]: I1127 09:02:42.022358 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-sync-48fx9"] Nov 27 09:02:42 crc kubenswrapper[4971]: E1127 09:02:42.023383 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77cb726e-bcde-40f2-8aee-100180685d40" containerName="mariadb-account-create-update" Nov 27 09:02:42 crc kubenswrapper[4971]: I1127 09:02:42.023403 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="77cb726e-bcde-40f2-8aee-100180685d40" containerName="mariadb-account-create-update" Nov 27 09:02:42 crc kubenswrapper[4971]: E1127 09:02:42.023450 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d7182a5-9bfd-4767-916e-a83a2a32da61" containerName="mariadb-database-create" Nov 27 09:02:42 crc kubenswrapper[4971]: I1127 09:02:42.023461 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d7182a5-9bfd-4767-916e-a83a2a32da61" containerName="mariadb-database-create" Nov 27 09:02:42 crc kubenswrapper[4971]: I1127 09:02:42.023722 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="77cb726e-bcde-40f2-8aee-100180685d40" containerName="mariadb-account-create-update" Nov 27 09:02:42 crc kubenswrapper[4971]: I1127 09:02:42.023744 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d7182a5-9bfd-4767-916e-a83a2a32da61" containerName="mariadb-database-create" Nov 27 09:02:42 crc kubenswrapper[4971]: I1127 09:02:42.024802 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-48fx9" Nov 27 09:02:42 crc kubenswrapper[4971]: I1127 09:02:42.033217 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Nov 27 09:02:42 crc kubenswrapper[4971]: I1127 09:02:42.033523 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Nov 27 09:02:42 crc kubenswrapper[4971]: I1127 09:02:42.033810 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-5g4lg" Nov 27 09:02:42 crc kubenswrapper[4971]: I1127 09:02:42.037076 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Nov 27 09:02:42 crc kubenswrapper[4971]: I1127 09:02:42.049210 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-48fx9"] Nov 27 09:02:42 crc kubenswrapper[4971]: I1127 09:02:42.079499 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wr5zt\" (UniqueName: \"kubernetes.io/projected/ecce1511-2855-4bd4-be9b-408f300f0507-kube-api-access-wr5zt\") pod \"aodh-db-sync-48fx9\" (UID: \"ecce1511-2855-4bd4-be9b-408f300f0507\") " pod="openstack/aodh-db-sync-48fx9" Nov 27 09:02:42 crc kubenswrapper[4971]: I1127 09:02:42.079629 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecce1511-2855-4bd4-be9b-408f300f0507-combined-ca-bundle\") pod \"aodh-db-sync-48fx9\" (UID: \"ecce1511-2855-4bd4-be9b-408f300f0507\") " pod="openstack/aodh-db-sync-48fx9" Nov 27 09:02:42 crc kubenswrapper[4971]: I1127 09:02:42.079722 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecce1511-2855-4bd4-be9b-408f300f0507-config-data\") pod \"aodh-db-sync-48fx9\" (UID: \"ecce1511-2855-4bd4-be9b-408f300f0507\") " pod="openstack/aodh-db-sync-48fx9" Nov 27 09:02:42 crc kubenswrapper[4971]: I1127 09:02:42.080190 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecce1511-2855-4bd4-be9b-408f300f0507-scripts\") pod \"aodh-db-sync-48fx9\" (UID: \"ecce1511-2855-4bd4-be9b-408f300f0507\") " pod="openstack/aodh-db-sync-48fx9" Nov 27 09:02:42 crc kubenswrapper[4971]: I1127 09:02:42.183264 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecce1511-2855-4bd4-be9b-408f300f0507-scripts\") pod \"aodh-db-sync-48fx9\" (UID: \"ecce1511-2855-4bd4-be9b-408f300f0507\") " pod="openstack/aodh-db-sync-48fx9" Nov 27 09:02:42 crc kubenswrapper[4971]: I1127 09:02:42.183385 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wr5zt\" (UniqueName: \"kubernetes.io/projected/ecce1511-2855-4bd4-be9b-408f300f0507-kube-api-access-wr5zt\") pod \"aodh-db-sync-48fx9\" (UID: \"ecce1511-2855-4bd4-be9b-408f300f0507\") " pod="openstack/aodh-db-sync-48fx9" Nov 27 09:02:42 crc kubenswrapper[4971]: I1127 09:02:42.183481 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecce1511-2855-4bd4-be9b-408f300f0507-combined-ca-bundle\") pod \"aodh-db-sync-48fx9\" (UID: \"ecce1511-2855-4bd4-be9b-408f300f0507\") " pod="openstack/aodh-db-sync-48fx9" Nov 27 09:02:42 crc kubenswrapper[4971]: I1127 09:02:42.183679 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecce1511-2855-4bd4-be9b-408f300f0507-config-data\") pod \"aodh-db-sync-48fx9\" (UID: \"ecce1511-2855-4bd4-be9b-408f300f0507\") " pod="openstack/aodh-db-sync-48fx9" Nov 27 09:02:42 crc kubenswrapper[4971]: I1127 09:02:42.196961 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecce1511-2855-4bd4-be9b-408f300f0507-combined-ca-bundle\") pod \"aodh-db-sync-48fx9\" (UID: \"ecce1511-2855-4bd4-be9b-408f300f0507\") " pod="openstack/aodh-db-sync-48fx9" Nov 27 09:02:42 crc kubenswrapper[4971]: I1127 09:02:42.197472 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecce1511-2855-4bd4-be9b-408f300f0507-config-data\") pod \"aodh-db-sync-48fx9\" (UID: \"ecce1511-2855-4bd4-be9b-408f300f0507\") " pod="openstack/aodh-db-sync-48fx9" Nov 27 09:02:42 crc kubenswrapper[4971]: I1127 09:02:42.198490 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecce1511-2855-4bd4-be9b-408f300f0507-scripts\") pod \"aodh-db-sync-48fx9\" (UID: \"ecce1511-2855-4bd4-be9b-408f300f0507\") " pod="openstack/aodh-db-sync-48fx9" Nov 27 09:02:42 crc kubenswrapper[4971]: I1127 09:02:42.206239 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wr5zt\" (UniqueName: \"kubernetes.io/projected/ecce1511-2855-4bd4-be9b-408f300f0507-kube-api-access-wr5zt\") pod \"aodh-db-sync-48fx9\" (UID: \"ecce1511-2855-4bd4-be9b-408f300f0507\") " pod="openstack/aodh-db-sync-48fx9" Nov 27 09:02:42 crc kubenswrapper[4971]: I1127 09:02:42.361411 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-48fx9" Nov 27 09:02:43 crc kubenswrapper[4971]: I1127 09:02:43.021750 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-48fx9"] Nov 27 09:02:43 crc kubenswrapper[4971]: I1127 09:02:43.081509 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-48fx9" event={"ID":"ecce1511-2855-4bd4-be9b-408f300f0507","Type":"ContainerStarted","Data":"33b7916e9b8ca4147ab47f3ddf8f5a038b0a55748dfde4954b8db40bc6aa5a9f"} Nov 27 09:02:43 crc kubenswrapper[4971]: I1127 09:02:43.093154 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-w997d"] Nov 27 09:02:43 crc kubenswrapper[4971]: I1127 09:02:43.104344 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-w997d"] Nov 27 09:02:44 crc kubenswrapper[4971]: I1127 09:02:44.566520 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="803c80a6-4a93-48a9-b3d6-5d7142c41863" path="/var/lib/kubelet/pods/803c80a6-4a93-48a9-b3d6-5d7142c41863/volumes" Nov 27 09:02:48 crc kubenswrapper[4971]: I1127 09:02:48.689353 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Nov 27 09:02:49 crc kubenswrapper[4971]: I1127 09:02:49.149872 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-48fx9" event={"ID":"ecce1511-2855-4bd4-be9b-408f300f0507","Type":"ContainerStarted","Data":"bef5104008fc165d01339a6f002625a99127c121b3efb2dbc980ae7c2bd7dbfd"} Nov 27 09:02:49 crc kubenswrapper[4971]: I1127 09:02:49.173267 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-db-sync-48fx9" podStartSLOduration=2.511299612 podStartE2EDuration="8.173239378s" podCreationTimestamp="2025-11-27 09:02:41 +0000 UTC" firstStartedPulling="2025-11-27 09:02:43.023934253 +0000 UTC m=+7801.215978171" lastFinishedPulling="2025-11-27 09:02:48.685874009 +0000 UTC m=+7806.877917937" observedRunningTime="2025-11-27 09:02:49.171662723 +0000 UTC m=+7807.363706661" watchObservedRunningTime="2025-11-27 09:02:49.173239378 +0000 UTC m=+7807.365283296" Nov 27 09:02:49 crc kubenswrapper[4971]: I1127 09:02:49.507878 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Nov 27 09:02:51 crc kubenswrapper[4971]: I1127 09:02:51.174208 4971 generic.go:334] "Generic (PLEG): container finished" podID="ecce1511-2855-4bd4-be9b-408f300f0507" containerID="bef5104008fc165d01339a6f002625a99127c121b3efb2dbc980ae7c2bd7dbfd" exitCode=0 Nov 27 09:02:51 crc kubenswrapper[4971]: I1127 09:02:51.174290 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-48fx9" event={"ID":"ecce1511-2855-4bd4-be9b-408f300f0507","Type":"ContainerDied","Data":"bef5104008fc165d01339a6f002625a99127c121b3efb2dbc980ae7c2bd7dbfd"} Nov 27 09:02:52 crc kubenswrapper[4971]: I1127 09:02:52.563264 4971 scope.go:117] "RemoveContainer" containerID="994a193d8aefca1605e71d1a50a8591da0b0ae972faf08604e8f68555feb1168" Nov 27 09:02:52 crc kubenswrapper[4971]: E1127 09:02:52.564298 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:02:52 crc kubenswrapper[4971]: I1127 09:02:52.605905 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-48fx9" Nov 27 09:02:52 crc kubenswrapper[4971]: I1127 09:02:52.740186 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wr5zt\" (UniqueName: \"kubernetes.io/projected/ecce1511-2855-4bd4-be9b-408f300f0507-kube-api-access-wr5zt\") pod \"ecce1511-2855-4bd4-be9b-408f300f0507\" (UID: \"ecce1511-2855-4bd4-be9b-408f300f0507\") " Nov 27 09:02:52 crc kubenswrapper[4971]: I1127 09:02:52.740319 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecce1511-2855-4bd4-be9b-408f300f0507-scripts\") pod \"ecce1511-2855-4bd4-be9b-408f300f0507\" (UID: \"ecce1511-2855-4bd4-be9b-408f300f0507\") " Nov 27 09:02:52 crc kubenswrapper[4971]: I1127 09:02:52.740341 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecce1511-2855-4bd4-be9b-408f300f0507-config-data\") pod \"ecce1511-2855-4bd4-be9b-408f300f0507\" (UID: \"ecce1511-2855-4bd4-be9b-408f300f0507\") " Nov 27 09:02:52 crc kubenswrapper[4971]: I1127 09:02:52.740446 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecce1511-2855-4bd4-be9b-408f300f0507-combined-ca-bundle\") pod \"ecce1511-2855-4bd4-be9b-408f300f0507\" (UID: \"ecce1511-2855-4bd4-be9b-408f300f0507\") " Nov 27 09:02:52 crc kubenswrapper[4971]: I1127 09:02:52.746511 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ecce1511-2855-4bd4-be9b-408f300f0507-kube-api-access-wr5zt" (OuterVolumeSpecName: "kube-api-access-wr5zt") pod "ecce1511-2855-4bd4-be9b-408f300f0507" (UID: "ecce1511-2855-4bd4-be9b-408f300f0507"). InnerVolumeSpecName "kube-api-access-wr5zt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:02:52 crc kubenswrapper[4971]: I1127 09:02:52.746640 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecce1511-2855-4bd4-be9b-408f300f0507-scripts" (OuterVolumeSpecName: "scripts") pod "ecce1511-2855-4bd4-be9b-408f300f0507" (UID: "ecce1511-2855-4bd4-be9b-408f300f0507"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:02:52 crc kubenswrapper[4971]: I1127 09:02:52.769188 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecce1511-2855-4bd4-be9b-408f300f0507-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ecce1511-2855-4bd4-be9b-408f300f0507" (UID: "ecce1511-2855-4bd4-be9b-408f300f0507"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:02:52 crc kubenswrapper[4971]: I1127 09:02:52.770002 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecce1511-2855-4bd4-be9b-408f300f0507-config-data" (OuterVolumeSpecName: "config-data") pod "ecce1511-2855-4bd4-be9b-408f300f0507" (UID: "ecce1511-2855-4bd4-be9b-408f300f0507"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:02:52 crc kubenswrapper[4971]: I1127 09:02:52.843752 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wr5zt\" (UniqueName: \"kubernetes.io/projected/ecce1511-2855-4bd4-be9b-408f300f0507-kube-api-access-wr5zt\") on node \"crc\" DevicePath \"\"" Nov 27 09:02:52 crc kubenswrapper[4971]: I1127 09:02:52.843805 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecce1511-2855-4bd4-be9b-408f300f0507-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 09:02:52 crc kubenswrapper[4971]: I1127 09:02:52.843820 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecce1511-2855-4bd4-be9b-408f300f0507-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 09:02:52 crc kubenswrapper[4971]: I1127 09:02:52.843835 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecce1511-2855-4bd4-be9b-408f300f0507-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:02:53 crc kubenswrapper[4971]: I1127 09:02:53.194715 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-48fx9" event={"ID":"ecce1511-2855-4bd4-be9b-408f300f0507","Type":"ContainerDied","Data":"33b7916e9b8ca4147ab47f3ddf8f5a038b0a55748dfde4954b8db40bc6aa5a9f"} Nov 27 09:02:53 crc kubenswrapper[4971]: I1127 09:02:53.194744 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-48fx9" Nov 27 09:02:53 crc kubenswrapper[4971]: I1127 09:02:53.194758 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="33b7916e9b8ca4147ab47f3ddf8f5a038b0a55748dfde4954b8db40bc6aa5a9f" Nov 27 09:02:55 crc kubenswrapper[4971]: I1127 09:02:55.638114 4971 scope.go:117] "RemoveContainer" containerID="39926ee57c85d2ca278f72689d231b7b6c2326cc4084040de24294226786a3f1" Nov 27 09:02:55 crc kubenswrapper[4971]: I1127 09:02:55.669301 4971 scope.go:117] "RemoveContainer" containerID="194a0db8ab08a82ff3dcb919c604689550acf34b2e905554387735c50d3907a4" Nov 27 09:02:55 crc kubenswrapper[4971]: I1127 09:02:55.744032 4971 scope.go:117] "RemoveContainer" containerID="ee77c16458699ec5f0aa1db1aeaefe51b66693591eac7d455fd7720894485685" Nov 27 09:02:56 crc kubenswrapper[4971]: I1127 09:02:56.589294 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-0"] Nov 27 09:02:56 crc kubenswrapper[4971]: E1127 09:02:56.589957 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecce1511-2855-4bd4-be9b-408f300f0507" containerName="aodh-db-sync" Nov 27 09:02:56 crc kubenswrapper[4971]: I1127 09:02:56.589978 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecce1511-2855-4bd4-be9b-408f300f0507" containerName="aodh-db-sync" Nov 27 09:02:56 crc kubenswrapper[4971]: I1127 09:02:56.590258 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="ecce1511-2855-4bd4-be9b-408f300f0507" containerName="aodh-db-sync" Nov 27 09:02:56 crc kubenswrapper[4971]: I1127 09:02:56.603792 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Nov 27 09:02:56 crc kubenswrapper[4971]: I1127 09:02:56.610969 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-5g4lg" Nov 27 09:02:56 crc kubenswrapper[4971]: I1127 09:02:56.611307 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Nov 27 09:02:56 crc kubenswrapper[4971]: I1127 09:02:56.611488 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Nov 27 09:02:56 crc kubenswrapper[4971]: I1127 09:02:56.611917 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Nov 27 09:02:56 crc kubenswrapper[4971]: I1127 09:02:56.733314 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f88d1fcd-36f3-489a-a844-4d2208822b41-config-data\") pod \"aodh-0\" (UID: \"f88d1fcd-36f3-489a-a844-4d2208822b41\") " pod="openstack/aodh-0" Nov 27 09:02:56 crc kubenswrapper[4971]: I1127 09:02:56.733408 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f88d1fcd-36f3-489a-a844-4d2208822b41-combined-ca-bundle\") pod \"aodh-0\" (UID: \"f88d1fcd-36f3-489a-a844-4d2208822b41\") " pod="openstack/aodh-0" Nov 27 09:02:56 crc kubenswrapper[4971]: I1127 09:02:56.733449 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f88d1fcd-36f3-489a-a844-4d2208822b41-scripts\") pod \"aodh-0\" (UID: \"f88d1fcd-36f3-489a-a844-4d2208822b41\") " pod="openstack/aodh-0" Nov 27 09:02:56 crc kubenswrapper[4971]: I1127 09:02:56.733620 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brjxb\" (UniqueName: \"kubernetes.io/projected/f88d1fcd-36f3-489a-a844-4d2208822b41-kube-api-access-brjxb\") pod \"aodh-0\" (UID: \"f88d1fcd-36f3-489a-a844-4d2208822b41\") " pod="openstack/aodh-0" Nov 27 09:02:56 crc kubenswrapper[4971]: I1127 09:02:56.835823 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f88d1fcd-36f3-489a-a844-4d2208822b41-scripts\") pod \"aodh-0\" (UID: \"f88d1fcd-36f3-489a-a844-4d2208822b41\") " pod="openstack/aodh-0" Nov 27 09:02:56 crc kubenswrapper[4971]: I1127 09:02:56.836445 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brjxb\" (UniqueName: \"kubernetes.io/projected/f88d1fcd-36f3-489a-a844-4d2208822b41-kube-api-access-brjxb\") pod \"aodh-0\" (UID: \"f88d1fcd-36f3-489a-a844-4d2208822b41\") " pod="openstack/aodh-0" Nov 27 09:02:56 crc kubenswrapper[4971]: I1127 09:02:56.837005 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f88d1fcd-36f3-489a-a844-4d2208822b41-config-data\") pod \"aodh-0\" (UID: \"f88d1fcd-36f3-489a-a844-4d2208822b41\") " pod="openstack/aodh-0" Nov 27 09:02:56 crc kubenswrapper[4971]: I1127 09:02:56.837562 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f88d1fcd-36f3-489a-a844-4d2208822b41-combined-ca-bundle\") pod \"aodh-0\" (UID: \"f88d1fcd-36f3-489a-a844-4d2208822b41\") " pod="openstack/aodh-0" Nov 27 09:02:56 crc kubenswrapper[4971]: I1127 09:02:56.845149 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f88d1fcd-36f3-489a-a844-4d2208822b41-combined-ca-bundle\") pod \"aodh-0\" (UID: \"f88d1fcd-36f3-489a-a844-4d2208822b41\") " pod="openstack/aodh-0" Nov 27 09:02:56 crc kubenswrapper[4971]: I1127 09:02:56.845148 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f88d1fcd-36f3-489a-a844-4d2208822b41-scripts\") pod \"aodh-0\" (UID: \"f88d1fcd-36f3-489a-a844-4d2208822b41\") " pod="openstack/aodh-0" Nov 27 09:02:56 crc kubenswrapper[4971]: I1127 09:02:56.858666 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f88d1fcd-36f3-489a-a844-4d2208822b41-config-data\") pod \"aodh-0\" (UID: \"f88d1fcd-36f3-489a-a844-4d2208822b41\") " pod="openstack/aodh-0" Nov 27 09:02:56 crc kubenswrapper[4971]: I1127 09:02:56.862475 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brjxb\" (UniqueName: \"kubernetes.io/projected/f88d1fcd-36f3-489a-a844-4d2208822b41-kube-api-access-brjxb\") pod \"aodh-0\" (UID: \"f88d1fcd-36f3-489a-a844-4d2208822b41\") " pod="openstack/aodh-0" Nov 27 09:02:56 crc kubenswrapper[4971]: I1127 09:02:56.935089 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Nov 27 09:02:58 crc kubenswrapper[4971]: I1127 09:02:58.017933 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Nov 27 09:02:58 crc kubenswrapper[4971]: I1127 09:02:58.243994 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"f88d1fcd-36f3-489a-a844-4d2208822b41","Type":"ContainerStarted","Data":"2cf7b088fd3426070cea80065e692237fcbcbb55ee1b449bbf9d840e7d953e26"} Nov 27 09:02:59 crc kubenswrapper[4971]: I1127 09:02:59.255938 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"f88d1fcd-36f3-489a-a844-4d2208822b41","Type":"ContainerStarted","Data":"b6a0f9bd33db8d8777129298e5608032884e1a4200d4e552700949e53f58b4a8"} Nov 27 09:02:59 crc kubenswrapper[4971]: I1127 09:02:59.364067 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 27 09:02:59 crc kubenswrapper[4971]: I1127 09:02:59.364384 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ecfe453b-4675-4375-b265-5a892560c4bb" containerName="ceilometer-central-agent" containerID="cri-o://4065a9ac93d96eeb972bc92cf37d5e337d28e952f7ac822cf5d232d62605fbde" gracePeriod=30 Nov 27 09:02:59 crc kubenswrapper[4971]: I1127 09:02:59.364687 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ecfe453b-4675-4375-b265-5a892560c4bb" containerName="proxy-httpd" containerID="cri-o://9b0932496ad0b5c4f42ab6838e895192cc87b874f748ae91e9e107da2e2d7df9" gracePeriod=30 Nov 27 09:02:59 crc kubenswrapper[4971]: I1127 09:02:59.364752 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ecfe453b-4675-4375-b265-5a892560c4bb" containerName="ceilometer-notification-agent" containerID="cri-o://63272d2818dcccba3530122d6f603b6771cb13cbc7f37e32162bfe5a28a3dcc5" gracePeriod=30 Nov 27 09:02:59 crc kubenswrapper[4971]: I1127 09:02:59.364897 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ecfe453b-4675-4375-b265-5a892560c4bb" containerName="sg-core" containerID="cri-o://fdb0cc0273b2b0254c9fbc363a767f4fca5e9dbbfc2550d5a83796409b5c24c4" gracePeriod=30 Nov 27 09:03:00 crc kubenswrapper[4971]: I1127 09:03:00.308980 4971 generic.go:334] "Generic (PLEG): container finished" podID="ecfe453b-4675-4375-b265-5a892560c4bb" containerID="9b0932496ad0b5c4f42ab6838e895192cc87b874f748ae91e9e107da2e2d7df9" exitCode=0 Nov 27 09:03:00 crc kubenswrapper[4971]: I1127 09:03:00.309408 4971 generic.go:334] "Generic (PLEG): container finished" podID="ecfe453b-4675-4375-b265-5a892560c4bb" containerID="fdb0cc0273b2b0254c9fbc363a767f4fca5e9dbbfc2550d5a83796409b5c24c4" exitCode=2 Nov 27 09:03:00 crc kubenswrapper[4971]: I1127 09:03:00.309423 4971 generic.go:334] "Generic (PLEG): container finished" podID="ecfe453b-4675-4375-b265-5a892560c4bb" containerID="4065a9ac93d96eeb972bc92cf37d5e337d28e952f7ac822cf5d232d62605fbde" exitCode=0 Nov 27 09:03:00 crc kubenswrapper[4971]: I1127 09:03:00.309445 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ecfe453b-4675-4375-b265-5a892560c4bb","Type":"ContainerDied","Data":"9b0932496ad0b5c4f42ab6838e895192cc87b874f748ae91e9e107da2e2d7df9"} Nov 27 09:03:00 crc kubenswrapper[4971]: I1127 09:03:00.309554 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ecfe453b-4675-4375-b265-5a892560c4bb","Type":"ContainerDied","Data":"fdb0cc0273b2b0254c9fbc363a767f4fca5e9dbbfc2550d5a83796409b5c24c4"} Nov 27 09:03:00 crc kubenswrapper[4971]: I1127 09:03:00.309568 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ecfe453b-4675-4375-b265-5a892560c4bb","Type":"ContainerDied","Data":"4065a9ac93d96eeb972bc92cf37d5e337d28e952f7ac822cf5d232d62605fbde"} Nov 27 09:03:00 crc kubenswrapper[4971]: I1127 09:03:00.313691 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"f88d1fcd-36f3-489a-a844-4d2208822b41","Type":"ContainerStarted","Data":"18049868fab2cbe361350c91d0c8278a8e2046131c236ef088748aecb0d54299"} Nov 27 09:03:01 crc kubenswrapper[4971]: I1127 09:03:01.329321 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"f88d1fcd-36f3-489a-a844-4d2208822b41","Type":"ContainerStarted","Data":"a4abed2cf295c200f8f1f99f739ad9051d25e8714a1184d22e14becae13f5799"} Nov 27 09:03:02 crc kubenswrapper[4971]: I1127 09:03:02.341881 4971 generic.go:334] "Generic (PLEG): container finished" podID="ecfe453b-4675-4375-b265-5a892560c4bb" containerID="63272d2818dcccba3530122d6f603b6771cb13cbc7f37e32162bfe5a28a3dcc5" exitCode=0 Nov 27 09:03:02 crc kubenswrapper[4971]: I1127 09:03:02.342054 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ecfe453b-4675-4375-b265-5a892560c4bb","Type":"ContainerDied","Data":"63272d2818dcccba3530122d6f603b6771cb13cbc7f37e32162bfe5a28a3dcc5"} Nov 27 09:03:02 crc kubenswrapper[4971]: I1127 09:03:02.342376 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ecfe453b-4675-4375-b265-5a892560c4bb","Type":"ContainerDied","Data":"9078abc006bf8f53043aea2b909b30301afeaf6e580faee8073a704d479cfb74"} Nov 27 09:03:02 crc kubenswrapper[4971]: I1127 09:03:02.342400 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9078abc006bf8f53043aea2b909b30301afeaf6e580faee8073a704d479cfb74" Nov 27 09:03:02 crc kubenswrapper[4971]: I1127 09:03:02.342928 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 09:03:02 crc kubenswrapper[4971]: I1127 09:03:02.393792 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ecfe453b-4675-4375-b265-5a892560c4bb-log-httpd\") pod \"ecfe453b-4675-4375-b265-5a892560c4bb\" (UID: \"ecfe453b-4675-4375-b265-5a892560c4bb\") " Nov 27 09:03:02 crc kubenswrapper[4971]: I1127 09:03:02.393831 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecfe453b-4675-4375-b265-5a892560c4bb-scripts\") pod \"ecfe453b-4675-4375-b265-5a892560c4bb\" (UID: \"ecfe453b-4675-4375-b265-5a892560c4bb\") " Nov 27 09:03:02 crc kubenswrapper[4971]: I1127 09:03:02.393871 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecfe453b-4675-4375-b265-5a892560c4bb-combined-ca-bundle\") pod \"ecfe453b-4675-4375-b265-5a892560c4bb\" (UID: \"ecfe453b-4675-4375-b265-5a892560c4bb\") " Nov 27 09:03:02 crc kubenswrapper[4971]: I1127 09:03:02.393888 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ecfe453b-4675-4375-b265-5a892560c4bb-run-httpd\") pod \"ecfe453b-4675-4375-b265-5a892560c4bb\" (UID: \"ecfe453b-4675-4375-b265-5a892560c4bb\") " Nov 27 09:03:02 crc kubenswrapper[4971]: I1127 09:03:02.393948 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ecfe453b-4675-4375-b265-5a892560c4bb-sg-core-conf-yaml\") pod \"ecfe453b-4675-4375-b265-5a892560c4bb\" (UID: \"ecfe453b-4675-4375-b265-5a892560c4bb\") " Nov 27 09:03:02 crc kubenswrapper[4971]: I1127 09:03:02.394159 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fnp7l\" (UniqueName: \"kubernetes.io/projected/ecfe453b-4675-4375-b265-5a892560c4bb-kube-api-access-fnp7l\") pod \"ecfe453b-4675-4375-b265-5a892560c4bb\" (UID: \"ecfe453b-4675-4375-b265-5a892560c4bb\") " Nov 27 09:03:02 crc kubenswrapper[4971]: I1127 09:03:02.394220 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecfe453b-4675-4375-b265-5a892560c4bb-config-data\") pod \"ecfe453b-4675-4375-b265-5a892560c4bb\" (UID: \"ecfe453b-4675-4375-b265-5a892560c4bb\") " Nov 27 09:03:02 crc kubenswrapper[4971]: I1127 09:03:02.397621 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ecfe453b-4675-4375-b265-5a892560c4bb-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ecfe453b-4675-4375-b265-5a892560c4bb" (UID: "ecfe453b-4675-4375-b265-5a892560c4bb"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:03:02 crc kubenswrapper[4971]: I1127 09:03:02.412266 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ecfe453b-4675-4375-b265-5a892560c4bb-kube-api-access-fnp7l" (OuterVolumeSpecName: "kube-api-access-fnp7l") pod "ecfe453b-4675-4375-b265-5a892560c4bb" (UID: "ecfe453b-4675-4375-b265-5a892560c4bb"). InnerVolumeSpecName "kube-api-access-fnp7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:03:02 crc kubenswrapper[4971]: I1127 09:03:02.412940 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ecfe453b-4675-4375-b265-5a892560c4bb-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ecfe453b-4675-4375-b265-5a892560c4bb" (UID: "ecfe453b-4675-4375-b265-5a892560c4bb"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:03:02 crc kubenswrapper[4971]: I1127 09:03:02.412943 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecfe453b-4675-4375-b265-5a892560c4bb-scripts" (OuterVolumeSpecName: "scripts") pod "ecfe453b-4675-4375-b265-5a892560c4bb" (UID: "ecfe453b-4675-4375-b265-5a892560c4bb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:03:02 crc kubenswrapper[4971]: I1127 09:03:02.440574 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecfe453b-4675-4375-b265-5a892560c4bb-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "ecfe453b-4675-4375-b265-5a892560c4bb" (UID: "ecfe453b-4675-4375-b265-5a892560c4bb"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:03:02 crc kubenswrapper[4971]: I1127 09:03:02.483689 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecfe453b-4675-4375-b265-5a892560c4bb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ecfe453b-4675-4375-b265-5a892560c4bb" (UID: "ecfe453b-4675-4375-b265-5a892560c4bb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:03:02 crc kubenswrapper[4971]: I1127 09:03:02.495445 4971 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ecfe453b-4675-4375-b265-5a892560c4bb-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 27 09:03:02 crc kubenswrapper[4971]: I1127 09:03:02.495479 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecfe453b-4675-4375-b265-5a892560c4bb-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 09:03:02 crc kubenswrapper[4971]: I1127 09:03:02.495488 4971 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ecfe453b-4675-4375-b265-5a892560c4bb-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 27 09:03:02 crc kubenswrapper[4971]: I1127 09:03:02.495560 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecfe453b-4675-4375-b265-5a892560c4bb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:03:02 crc kubenswrapper[4971]: I1127 09:03:02.495574 4971 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ecfe453b-4675-4375-b265-5a892560c4bb-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 27 09:03:02 crc kubenswrapper[4971]: I1127 09:03:02.495607 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fnp7l\" (UniqueName: \"kubernetes.io/projected/ecfe453b-4675-4375-b265-5a892560c4bb-kube-api-access-fnp7l\") on node \"crc\" DevicePath \"\"" Nov 27 09:03:02 crc kubenswrapper[4971]: I1127 09:03:02.503870 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecfe453b-4675-4375-b265-5a892560c4bb-config-data" (OuterVolumeSpecName: "config-data") pod "ecfe453b-4675-4375-b265-5a892560c4bb" (UID: "ecfe453b-4675-4375-b265-5a892560c4bb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:03:02 crc kubenswrapper[4971]: I1127 09:03:02.600400 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecfe453b-4675-4375-b265-5a892560c4bb-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.356846 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"f88d1fcd-36f3-489a-a844-4d2208822b41","Type":"ContainerStarted","Data":"8c3c5ae5c8bc6cf29bb39c553369082edc67fc1cfb93258ffa506325ffea43fa"} Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.356870 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.393756 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-0" podStartSLOduration=3.506145748 podStartE2EDuration="7.390512998s" podCreationTimestamp="2025-11-27 09:02:56 +0000 UTC" firstStartedPulling="2025-11-27 09:02:58.047395779 +0000 UTC m=+7816.239439697" lastFinishedPulling="2025-11-27 09:03:01.931763029 +0000 UTC m=+7820.123806947" observedRunningTime="2025-11-27 09:03:03.385619188 +0000 UTC m=+7821.577663116" watchObservedRunningTime="2025-11-27 09:03:03.390512998 +0000 UTC m=+7821.582556906" Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.450747 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.492012 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.498627 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 27 09:03:03 crc kubenswrapper[4971]: E1127 09:03:03.499982 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecfe453b-4675-4375-b265-5a892560c4bb" containerName="ceilometer-notification-agent" Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.500012 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecfe453b-4675-4375-b265-5a892560c4bb" containerName="ceilometer-notification-agent" Nov 27 09:03:03 crc kubenswrapper[4971]: E1127 09:03:03.500056 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecfe453b-4675-4375-b265-5a892560c4bb" containerName="sg-core" Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.500066 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecfe453b-4675-4375-b265-5a892560c4bb" containerName="sg-core" Nov 27 09:03:03 crc kubenswrapper[4971]: E1127 09:03:03.500084 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecfe453b-4675-4375-b265-5a892560c4bb" containerName="ceilometer-central-agent" Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.500092 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecfe453b-4675-4375-b265-5a892560c4bb" containerName="ceilometer-central-agent" Nov 27 09:03:03 crc kubenswrapper[4971]: E1127 09:03:03.500114 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecfe453b-4675-4375-b265-5a892560c4bb" containerName="proxy-httpd" Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.500246 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecfe453b-4675-4375-b265-5a892560c4bb" containerName="proxy-httpd" Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.500578 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="ecfe453b-4675-4375-b265-5a892560c4bb" containerName="proxy-httpd" Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.500623 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="ecfe453b-4675-4375-b265-5a892560c4bb" containerName="sg-core" Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.500638 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="ecfe453b-4675-4375-b265-5a892560c4bb" containerName="ceilometer-notification-agent" Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.500655 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="ecfe453b-4675-4375-b265-5a892560c4bb" containerName="ceilometer-central-agent" Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.503017 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.509189 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.509252 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.540781 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.619569 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/44fc7298-f2a4-4278-9b26-24ad31c66cd5-scripts\") pod \"ceilometer-0\" (UID: \"44fc7298-f2a4-4278-9b26-24ad31c66cd5\") " pod="openstack/ceilometer-0" Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.620581 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/44fc7298-f2a4-4278-9b26-24ad31c66cd5-run-httpd\") pod \"ceilometer-0\" (UID: \"44fc7298-f2a4-4278-9b26-24ad31c66cd5\") " pod="openstack/ceilometer-0" Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.620733 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44fc7298-f2a4-4278-9b26-24ad31c66cd5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"44fc7298-f2a4-4278-9b26-24ad31c66cd5\") " pod="openstack/ceilometer-0" Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.620859 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44fc7298-f2a4-4278-9b26-24ad31c66cd5-config-data\") pod \"ceilometer-0\" (UID: \"44fc7298-f2a4-4278-9b26-24ad31c66cd5\") " pod="openstack/ceilometer-0" Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.620948 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bwt7v\" (UniqueName: \"kubernetes.io/projected/44fc7298-f2a4-4278-9b26-24ad31c66cd5-kube-api-access-bwt7v\") pod \"ceilometer-0\" (UID: \"44fc7298-f2a4-4278-9b26-24ad31c66cd5\") " pod="openstack/ceilometer-0" Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.621172 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/44fc7298-f2a4-4278-9b26-24ad31c66cd5-log-httpd\") pod \"ceilometer-0\" (UID: \"44fc7298-f2a4-4278-9b26-24ad31c66cd5\") " pod="openstack/ceilometer-0" Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.621270 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/44fc7298-f2a4-4278-9b26-24ad31c66cd5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"44fc7298-f2a4-4278-9b26-24ad31c66cd5\") " pod="openstack/ceilometer-0" Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.723933 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/44fc7298-f2a4-4278-9b26-24ad31c66cd5-scripts\") pod \"ceilometer-0\" (UID: \"44fc7298-f2a4-4278-9b26-24ad31c66cd5\") " pod="openstack/ceilometer-0" Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.723991 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/44fc7298-f2a4-4278-9b26-24ad31c66cd5-run-httpd\") pod \"ceilometer-0\" (UID: \"44fc7298-f2a4-4278-9b26-24ad31c66cd5\") " pod="openstack/ceilometer-0" Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.724025 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44fc7298-f2a4-4278-9b26-24ad31c66cd5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"44fc7298-f2a4-4278-9b26-24ad31c66cd5\") " pod="openstack/ceilometer-0" Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.724064 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44fc7298-f2a4-4278-9b26-24ad31c66cd5-config-data\") pod \"ceilometer-0\" (UID: \"44fc7298-f2a4-4278-9b26-24ad31c66cd5\") " pod="openstack/ceilometer-0" Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.724105 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bwt7v\" (UniqueName: \"kubernetes.io/projected/44fc7298-f2a4-4278-9b26-24ad31c66cd5-kube-api-access-bwt7v\") pod \"ceilometer-0\" (UID: \"44fc7298-f2a4-4278-9b26-24ad31c66cd5\") " pod="openstack/ceilometer-0" Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.724179 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/44fc7298-f2a4-4278-9b26-24ad31c66cd5-log-httpd\") pod \"ceilometer-0\" (UID: \"44fc7298-f2a4-4278-9b26-24ad31c66cd5\") " pod="openstack/ceilometer-0" Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.724218 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/44fc7298-f2a4-4278-9b26-24ad31c66cd5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"44fc7298-f2a4-4278-9b26-24ad31c66cd5\") " pod="openstack/ceilometer-0" Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.724523 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/44fc7298-f2a4-4278-9b26-24ad31c66cd5-run-httpd\") pod \"ceilometer-0\" (UID: \"44fc7298-f2a4-4278-9b26-24ad31c66cd5\") " pod="openstack/ceilometer-0" Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.724575 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/44fc7298-f2a4-4278-9b26-24ad31c66cd5-log-httpd\") pod \"ceilometer-0\" (UID: \"44fc7298-f2a4-4278-9b26-24ad31c66cd5\") " pod="openstack/ceilometer-0" Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.728653 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/44fc7298-f2a4-4278-9b26-24ad31c66cd5-scripts\") pod \"ceilometer-0\" (UID: \"44fc7298-f2a4-4278-9b26-24ad31c66cd5\") " pod="openstack/ceilometer-0" Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.728867 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44fc7298-f2a4-4278-9b26-24ad31c66cd5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"44fc7298-f2a4-4278-9b26-24ad31c66cd5\") " pod="openstack/ceilometer-0" Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.740075 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/44fc7298-f2a4-4278-9b26-24ad31c66cd5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"44fc7298-f2a4-4278-9b26-24ad31c66cd5\") " pod="openstack/ceilometer-0" Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.740375 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44fc7298-f2a4-4278-9b26-24ad31c66cd5-config-data\") pod \"ceilometer-0\" (UID: \"44fc7298-f2a4-4278-9b26-24ad31c66cd5\") " pod="openstack/ceilometer-0" Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.755209 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bwt7v\" (UniqueName: \"kubernetes.io/projected/44fc7298-f2a4-4278-9b26-24ad31c66cd5-kube-api-access-bwt7v\") pod \"ceilometer-0\" (UID: \"44fc7298-f2a4-4278-9b26-24ad31c66cd5\") " pod="openstack/ceilometer-0" Nov 27 09:03:03 crc kubenswrapper[4971]: I1127 09:03:03.830630 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 09:03:04 crc kubenswrapper[4971]: I1127 09:03:04.372897 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 27 09:03:04 crc kubenswrapper[4971]: W1127 09:03:04.373462 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod44fc7298_f2a4_4278_9b26_24ad31c66cd5.slice/crio-8890e266f84fc2578d8cbe5b1f5e81bdbbbc584456e7e36e51e2dbfcf8dbdb6e WatchSource:0}: Error finding container 8890e266f84fc2578d8cbe5b1f5e81bdbbbc584456e7e36e51e2dbfcf8dbdb6e: Status 404 returned error can't find the container with id 8890e266f84fc2578d8cbe5b1f5e81bdbbbc584456e7e36e51e2dbfcf8dbdb6e Nov 27 09:03:04 crc kubenswrapper[4971]: I1127 09:03:04.565920 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ecfe453b-4675-4375-b265-5a892560c4bb" path="/var/lib/kubelet/pods/ecfe453b-4675-4375-b265-5a892560c4bb/volumes" Nov 27 09:03:05 crc kubenswrapper[4971]: I1127 09:03:05.384108 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"44fc7298-f2a4-4278-9b26-24ad31c66cd5","Type":"ContainerStarted","Data":"5aae78464f6aa5cd9ef4630c21ac61c7cdce20b91eaa7173b2622ab2f6e96af3"} Nov 27 09:03:05 crc kubenswrapper[4971]: I1127 09:03:05.384528 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"44fc7298-f2a4-4278-9b26-24ad31c66cd5","Type":"ContainerStarted","Data":"8890e266f84fc2578d8cbe5b1f5e81bdbbbc584456e7e36e51e2dbfcf8dbdb6e"} Nov 27 09:03:05 crc kubenswrapper[4971]: I1127 09:03:05.550917 4971 scope.go:117] "RemoveContainer" containerID="994a193d8aefca1605e71d1a50a8591da0b0ae972faf08604e8f68555feb1168" Nov 27 09:03:05 crc kubenswrapper[4971]: E1127 09:03:05.551497 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:03:06 crc kubenswrapper[4971]: I1127 09:03:06.400605 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"44fc7298-f2a4-4278-9b26-24ad31c66cd5","Type":"ContainerStarted","Data":"ae31041b97490942c81f12b5b593965066a09bd39de719e60ba76ce5ed7d9dbd"} Nov 27 09:03:06 crc kubenswrapper[4971]: I1127 09:03:06.400967 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"44fc7298-f2a4-4278-9b26-24ad31c66cd5","Type":"ContainerStarted","Data":"1a70de4bb093cb5e220d019a58f00a9ba6a0edd749dfd99ef96d5eadc0651aed"} Nov 27 09:03:07 crc kubenswrapper[4971]: I1127 09:03:07.416675 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"44fc7298-f2a4-4278-9b26-24ad31c66cd5","Type":"ContainerStarted","Data":"09e43000bb4dd7562a875107fa08a4132d19c8d68bc2e42c7c0d9001d0550af9"} Nov 27 09:03:07 crc kubenswrapper[4971]: I1127 09:03:07.417134 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 27 09:03:07 crc kubenswrapper[4971]: I1127 09:03:07.445859 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.735128967 podStartE2EDuration="4.445840648s" podCreationTimestamp="2025-11-27 09:03:03 +0000 UTC" firstStartedPulling="2025-11-27 09:03:04.375377446 +0000 UTC m=+7822.567421364" lastFinishedPulling="2025-11-27 09:03:07.086089117 +0000 UTC m=+7825.278133045" observedRunningTime="2025-11-27 09:03:07.437763086 +0000 UTC m=+7825.629807004" watchObservedRunningTime="2025-11-27 09:03:07.445840648 +0000 UTC m=+7825.637884566" Nov 27 09:03:09 crc kubenswrapper[4971]: I1127 09:03:09.594512 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-create-5vg66"] Nov 27 09:03:09 crc kubenswrapper[4971]: I1127 09:03:09.596843 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-5vg66" Nov 27 09:03:09 crc kubenswrapper[4971]: I1127 09:03:09.610761 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-5vg66"] Nov 27 09:03:09 crc kubenswrapper[4971]: I1127 09:03:09.659122 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/942471a2-354d-46eb-8043-a0b3de2d23ab-operator-scripts\") pod \"manila-db-create-5vg66\" (UID: \"942471a2-354d-46eb-8043-a0b3de2d23ab\") " pod="openstack/manila-db-create-5vg66" Nov 27 09:03:09 crc kubenswrapper[4971]: I1127 09:03:09.659285 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gmk8m\" (UniqueName: \"kubernetes.io/projected/942471a2-354d-46eb-8043-a0b3de2d23ab-kube-api-access-gmk8m\") pod \"manila-db-create-5vg66\" (UID: \"942471a2-354d-46eb-8043-a0b3de2d23ab\") " pod="openstack/manila-db-create-5vg66" Nov 27 09:03:09 crc kubenswrapper[4971]: I1127 09:03:09.689632 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-6269-account-create-update-9ww84"] Nov 27 09:03:09 crc kubenswrapper[4971]: I1127 09:03:09.691415 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-6269-account-create-update-9ww84" Nov 27 09:03:09 crc kubenswrapper[4971]: I1127 09:03:09.694170 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-db-secret" Nov 27 09:03:09 crc kubenswrapper[4971]: I1127 09:03:09.707911 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-6269-account-create-update-9ww84"] Nov 27 09:03:09 crc kubenswrapper[4971]: I1127 09:03:09.764217 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/942471a2-354d-46eb-8043-a0b3de2d23ab-operator-scripts\") pod \"manila-db-create-5vg66\" (UID: \"942471a2-354d-46eb-8043-a0b3de2d23ab\") " pod="openstack/manila-db-create-5vg66" Nov 27 09:03:09 crc kubenswrapper[4971]: I1127 09:03:09.764385 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gmk8m\" (UniqueName: \"kubernetes.io/projected/942471a2-354d-46eb-8043-a0b3de2d23ab-kube-api-access-gmk8m\") pod \"manila-db-create-5vg66\" (UID: \"942471a2-354d-46eb-8043-a0b3de2d23ab\") " pod="openstack/manila-db-create-5vg66" Nov 27 09:03:09 crc kubenswrapper[4971]: I1127 09:03:09.765851 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/942471a2-354d-46eb-8043-a0b3de2d23ab-operator-scripts\") pod \"manila-db-create-5vg66\" (UID: \"942471a2-354d-46eb-8043-a0b3de2d23ab\") " pod="openstack/manila-db-create-5vg66" Nov 27 09:03:09 crc kubenswrapper[4971]: I1127 09:03:09.804398 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gmk8m\" (UniqueName: \"kubernetes.io/projected/942471a2-354d-46eb-8043-a0b3de2d23ab-kube-api-access-gmk8m\") pod \"manila-db-create-5vg66\" (UID: \"942471a2-354d-46eb-8043-a0b3de2d23ab\") " pod="openstack/manila-db-create-5vg66" Nov 27 09:03:09 crc kubenswrapper[4971]: I1127 09:03:09.867525 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8957\" (UniqueName: \"kubernetes.io/projected/5c543498-8218-435c-9dc7-783a4b91f163-kube-api-access-w8957\") pod \"manila-6269-account-create-update-9ww84\" (UID: \"5c543498-8218-435c-9dc7-783a4b91f163\") " pod="openstack/manila-6269-account-create-update-9ww84" Nov 27 09:03:09 crc kubenswrapper[4971]: I1127 09:03:09.867671 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5c543498-8218-435c-9dc7-783a4b91f163-operator-scripts\") pod \"manila-6269-account-create-update-9ww84\" (UID: \"5c543498-8218-435c-9dc7-783a4b91f163\") " pod="openstack/manila-6269-account-create-update-9ww84" Nov 27 09:03:09 crc kubenswrapper[4971]: I1127 09:03:09.921625 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-5vg66" Nov 27 09:03:09 crc kubenswrapper[4971]: I1127 09:03:09.969897 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8957\" (UniqueName: \"kubernetes.io/projected/5c543498-8218-435c-9dc7-783a4b91f163-kube-api-access-w8957\") pod \"manila-6269-account-create-update-9ww84\" (UID: \"5c543498-8218-435c-9dc7-783a4b91f163\") " pod="openstack/manila-6269-account-create-update-9ww84" Nov 27 09:03:09 crc kubenswrapper[4971]: I1127 09:03:09.970643 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5c543498-8218-435c-9dc7-783a4b91f163-operator-scripts\") pod \"manila-6269-account-create-update-9ww84\" (UID: \"5c543498-8218-435c-9dc7-783a4b91f163\") " pod="openstack/manila-6269-account-create-update-9ww84" Nov 27 09:03:09 crc kubenswrapper[4971]: I1127 09:03:09.971732 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5c543498-8218-435c-9dc7-783a4b91f163-operator-scripts\") pod \"manila-6269-account-create-update-9ww84\" (UID: \"5c543498-8218-435c-9dc7-783a4b91f163\") " pod="openstack/manila-6269-account-create-update-9ww84" Nov 27 09:03:09 crc kubenswrapper[4971]: I1127 09:03:09.996885 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8957\" (UniqueName: \"kubernetes.io/projected/5c543498-8218-435c-9dc7-783a4b91f163-kube-api-access-w8957\") pod \"manila-6269-account-create-update-9ww84\" (UID: \"5c543498-8218-435c-9dc7-783a4b91f163\") " pod="openstack/manila-6269-account-create-update-9ww84" Nov 27 09:03:10 crc kubenswrapper[4971]: I1127 09:03:10.049251 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-6269-account-create-update-9ww84" Nov 27 09:03:10 crc kubenswrapper[4971]: I1127 09:03:10.450947 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-5vg66"] Nov 27 09:03:10 crc kubenswrapper[4971]: I1127 09:03:10.626185 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-6269-account-create-update-9ww84"] Nov 27 09:03:10 crc kubenswrapper[4971]: W1127 09:03:10.640155 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5c543498_8218_435c_9dc7_783a4b91f163.slice/crio-61d9d313af94090944c7b4ee7ddf7168d1563780c5ff7c542447a0a1f4a0468f WatchSource:0}: Error finding container 61d9d313af94090944c7b4ee7ddf7168d1563780c5ff7c542447a0a1f4a0468f: Status 404 returned error can't find the container with id 61d9d313af94090944c7b4ee7ddf7168d1563780c5ff7c542447a0a1f4a0468f Nov 27 09:03:11 crc kubenswrapper[4971]: I1127 09:03:11.465455 4971 generic.go:334] "Generic (PLEG): container finished" podID="5c543498-8218-435c-9dc7-783a4b91f163" containerID="5a3ed4d856abc168aab78d8010ca707928a2e589e2c24300a1fe6ddd9854305f" exitCode=0 Nov 27 09:03:11 crc kubenswrapper[4971]: I1127 09:03:11.465890 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-6269-account-create-update-9ww84" event={"ID":"5c543498-8218-435c-9dc7-783a4b91f163","Type":"ContainerDied","Data":"5a3ed4d856abc168aab78d8010ca707928a2e589e2c24300a1fe6ddd9854305f"} Nov 27 09:03:11 crc kubenswrapper[4971]: I1127 09:03:11.465925 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-6269-account-create-update-9ww84" event={"ID":"5c543498-8218-435c-9dc7-783a4b91f163","Type":"ContainerStarted","Data":"61d9d313af94090944c7b4ee7ddf7168d1563780c5ff7c542447a0a1f4a0468f"} Nov 27 09:03:11 crc kubenswrapper[4971]: I1127 09:03:11.473685 4971 generic.go:334] "Generic (PLEG): container finished" podID="942471a2-354d-46eb-8043-a0b3de2d23ab" containerID="a298280c9bf1965ef19a615dfe5785582991b165ad7f08075f4ad09950a29373" exitCode=0 Nov 27 09:03:11 crc kubenswrapper[4971]: I1127 09:03:11.473758 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-5vg66" event={"ID":"942471a2-354d-46eb-8043-a0b3de2d23ab","Type":"ContainerDied","Data":"a298280c9bf1965ef19a615dfe5785582991b165ad7f08075f4ad09950a29373"} Nov 27 09:03:11 crc kubenswrapper[4971]: I1127 09:03:11.473800 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-5vg66" event={"ID":"942471a2-354d-46eb-8043-a0b3de2d23ab","Type":"ContainerStarted","Data":"8bf0d2f272f39be7c07623461de142a4e611aab9737598bb879b0c65f6d781d0"} Nov 27 09:03:13 crc kubenswrapper[4971]: I1127 09:03:13.021201 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-5vg66" Nov 27 09:03:13 crc kubenswrapper[4971]: I1127 09:03:13.060376 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-2vn4z"] Nov 27 09:03:13 crc kubenswrapper[4971]: I1127 09:03:13.073198 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-2vn4z"] Nov 27 09:03:13 crc kubenswrapper[4971]: I1127 09:03:13.152078 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gmk8m\" (UniqueName: \"kubernetes.io/projected/942471a2-354d-46eb-8043-a0b3de2d23ab-kube-api-access-gmk8m\") pod \"942471a2-354d-46eb-8043-a0b3de2d23ab\" (UID: \"942471a2-354d-46eb-8043-a0b3de2d23ab\") " Nov 27 09:03:13 crc kubenswrapper[4971]: I1127 09:03:13.152228 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/942471a2-354d-46eb-8043-a0b3de2d23ab-operator-scripts\") pod \"942471a2-354d-46eb-8043-a0b3de2d23ab\" (UID: \"942471a2-354d-46eb-8043-a0b3de2d23ab\") " Nov 27 09:03:13 crc kubenswrapper[4971]: I1127 09:03:13.152961 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/942471a2-354d-46eb-8043-a0b3de2d23ab-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "942471a2-354d-46eb-8043-a0b3de2d23ab" (UID: "942471a2-354d-46eb-8043-a0b3de2d23ab"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:03:13 crc kubenswrapper[4971]: I1127 09:03:13.160613 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/942471a2-354d-46eb-8043-a0b3de2d23ab-kube-api-access-gmk8m" (OuterVolumeSpecName: "kube-api-access-gmk8m") pod "942471a2-354d-46eb-8043-a0b3de2d23ab" (UID: "942471a2-354d-46eb-8043-a0b3de2d23ab"). InnerVolumeSpecName "kube-api-access-gmk8m". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:03:13 crc kubenswrapper[4971]: I1127 09:03:13.163637 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-6269-account-create-update-9ww84" Nov 27 09:03:13 crc kubenswrapper[4971]: I1127 09:03:13.254933 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w8957\" (UniqueName: \"kubernetes.io/projected/5c543498-8218-435c-9dc7-783a4b91f163-kube-api-access-w8957\") pod \"5c543498-8218-435c-9dc7-783a4b91f163\" (UID: \"5c543498-8218-435c-9dc7-783a4b91f163\") " Nov 27 09:03:13 crc kubenswrapper[4971]: I1127 09:03:13.255468 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5c543498-8218-435c-9dc7-783a4b91f163-operator-scripts\") pod \"5c543498-8218-435c-9dc7-783a4b91f163\" (UID: \"5c543498-8218-435c-9dc7-783a4b91f163\") " Nov 27 09:03:13 crc kubenswrapper[4971]: I1127 09:03:13.256151 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gmk8m\" (UniqueName: \"kubernetes.io/projected/942471a2-354d-46eb-8043-a0b3de2d23ab-kube-api-access-gmk8m\") on node \"crc\" DevicePath \"\"" Nov 27 09:03:13 crc kubenswrapper[4971]: I1127 09:03:13.256179 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/942471a2-354d-46eb-8043-a0b3de2d23ab-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 09:03:13 crc kubenswrapper[4971]: I1127 09:03:13.256443 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c543498-8218-435c-9dc7-783a4b91f163-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5c543498-8218-435c-9dc7-783a4b91f163" (UID: "5c543498-8218-435c-9dc7-783a4b91f163"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:03:13 crc kubenswrapper[4971]: I1127 09:03:13.258310 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c543498-8218-435c-9dc7-783a4b91f163-kube-api-access-w8957" (OuterVolumeSpecName: "kube-api-access-w8957") pod "5c543498-8218-435c-9dc7-783a4b91f163" (UID: "5c543498-8218-435c-9dc7-783a4b91f163"). InnerVolumeSpecName "kube-api-access-w8957". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:03:13 crc kubenswrapper[4971]: I1127 09:03:13.358264 4971 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5c543498-8218-435c-9dc7-783a4b91f163-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 09:03:13 crc kubenswrapper[4971]: I1127 09:03:13.358300 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w8957\" (UniqueName: \"kubernetes.io/projected/5c543498-8218-435c-9dc7-783a4b91f163-kube-api-access-w8957\") on node \"crc\" DevicePath \"\"" Nov 27 09:03:13 crc kubenswrapper[4971]: I1127 09:03:13.503322 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-6269-account-create-update-9ww84" event={"ID":"5c543498-8218-435c-9dc7-783a4b91f163","Type":"ContainerDied","Data":"61d9d313af94090944c7b4ee7ddf7168d1563780c5ff7c542447a0a1f4a0468f"} Nov 27 09:03:13 crc kubenswrapper[4971]: I1127 09:03:13.503369 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="61d9d313af94090944c7b4ee7ddf7168d1563780c5ff7c542447a0a1f4a0468f" Nov 27 09:03:13 crc kubenswrapper[4971]: I1127 09:03:13.503458 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-6269-account-create-update-9ww84" Nov 27 09:03:13 crc kubenswrapper[4971]: I1127 09:03:13.506383 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-5vg66" event={"ID":"942471a2-354d-46eb-8043-a0b3de2d23ab","Type":"ContainerDied","Data":"8bf0d2f272f39be7c07623461de142a4e611aab9737598bb879b0c65f6d781d0"} Nov 27 09:03:13 crc kubenswrapper[4971]: I1127 09:03:13.506438 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8bf0d2f272f39be7c07623461de142a4e611aab9737598bb879b0c65f6d781d0" Nov 27 09:03:13 crc kubenswrapper[4971]: I1127 09:03:13.506513 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-5vg66" Nov 27 09:03:14 crc kubenswrapper[4971]: I1127 09:03:14.032945 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-2bea-account-create-update-n5z98"] Nov 27 09:03:14 crc kubenswrapper[4971]: I1127 09:03:14.042469 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-2bea-account-create-update-n5z98"] Nov 27 09:03:14 crc kubenswrapper[4971]: I1127 09:03:14.570260 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76fa5095-c64b-4ed1-b5ed-221c8f85db60" path="/var/lib/kubelet/pods/76fa5095-c64b-4ed1-b5ed-221c8f85db60/volumes" Nov 27 09:03:14 crc kubenswrapper[4971]: I1127 09:03:14.576103 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="90d61a54-6dcb-4eec-971c-f9a93fda7d12" path="/var/lib/kubelet/pods/90d61a54-6dcb-4eec-971c-f9a93fda7d12/volumes" Nov 27 09:03:15 crc kubenswrapper[4971]: I1127 09:03:15.098207 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-sync-npkfx"] Nov 27 09:03:15 crc kubenswrapper[4971]: E1127 09:03:15.099326 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c543498-8218-435c-9dc7-783a4b91f163" containerName="mariadb-account-create-update" Nov 27 09:03:15 crc kubenswrapper[4971]: I1127 09:03:15.099356 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c543498-8218-435c-9dc7-783a4b91f163" containerName="mariadb-account-create-update" Nov 27 09:03:15 crc kubenswrapper[4971]: E1127 09:03:15.099377 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="942471a2-354d-46eb-8043-a0b3de2d23ab" containerName="mariadb-database-create" Nov 27 09:03:15 crc kubenswrapper[4971]: I1127 09:03:15.099388 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="942471a2-354d-46eb-8043-a0b3de2d23ab" containerName="mariadb-database-create" Nov 27 09:03:15 crc kubenswrapper[4971]: I1127 09:03:15.099833 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c543498-8218-435c-9dc7-783a4b91f163" containerName="mariadb-account-create-update" Nov 27 09:03:15 crc kubenswrapper[4971]: I1127 09:03:15.099891 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="942471a2-354d-46eb-8043-a0b3de2d23ab" containerName="mariadb-database-create" Nov 27 09:03:15 crc kubenswrapper[4971]: I1127 09:03:15.100992 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-npkfx" Nov 27 09:03:15 crc kubenswrapper[4971]: I1127 09:03:15.103278 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-zj8pn" Nov 27 09:03:15 crc kubenswrapper[4971]: I1127 09:03:15.107117 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-npkfx"] Nov 27 09:03:15 crc kubenswrapper[4971]: I1127 09:03:15.109754 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Nov 27 09:03:15 crc kubenswrapper[4971]: I1127 09:03:15.202149 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2hgsb\" (UniqueName: \"kubernetes.io/projected/a0c4f901-c67c-4848-8631-02c515ba7c7b-kube-api-access-2hgsb\") pod \"manila-db-sync-npkfx\" (UID: \"a0c4f901-c67c-4848-8631-02c515ba7c7b\") " pod="openstack/manila-db-sync-npkfx" Nov 27 09:03:15 crc kubenswrapper[4971]: I1127 09:03:15.202239 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0c4f901-c67c-4848-8631-02c515ba7c7b-config-data\") pod \"manila-db-sync-npkfx\" (UID: \"a0c4f901-c67c-4848-8631-02c515ba7c7b\") " pod="openstack/manila-db-sync-npkfx" Nov 27 09:03:15 crc kubenswrapper[4971]: I1127 09:03:15.202342 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/a0c4f901-c67c-4848-8631-02c515ba7c7b-job-config-data\") pod \"manila-db-sync-npkfx\" (UID: \"a0c4f901-c67c-4848-8631-02c515ba7c7b\") " pod="openstack/manila-db-sync-npkfx" Nov 27 09:03:15 crc kubenswrapper[4971]: I1127 09:03:15.202373 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0c4f901-c67c-4848-8631-02c515ba7c7b-combined-ca-bundle\") pod \"manila-db-sync-npkfx\" (UID: \"a0c4f901-c67c-4848-8631-02c515ba7c7b\") " pod="openstack/manila-db-sync-npkfx" Nov 27 09:03:15 crc kubenswrapper[4971]: I1127 09:03:15.304842 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/a0c4f901-c67c-4848-8631-02c515ba7c7b-job-config-data\") pod \"manila-db-sync-npkfx\" (UID: \"a0c4f901-c67c-4848-8631-02c515ba7c7b\") " pod="openstack/manila-db-sync-npkfx" Nov 27 09:03:15 crc kubenswrapper[4971]: I1127 09:03:15.304909 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0c4f901-c67c-4848-8631-02c515ba7c7b-combined-ca-bundle\") pod \"manila-db-sync-npkfx\" (UID: \"a0c4f901-c67c-4848-8631-02c515ba7c7b\") " pod="openstack/manila-db-sync-npkfx" Nov 27 09:03:15 crc kubenswrapper[4971]: I1127 09:03:15.305057 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2hgsb\" (UniqueName: \"kubernetes.io/projected/a0c4f901-c67c-4848-8631-02c515ba7c7b-kube-api-access-2hgsb\") pod \"manila-db-sync-npkfx\" (UID: \"a0c4f901-c67c-4848-8631-02c515ba7c7b\") " pod="openstack/manila-db-sync-npkfx" Nov 27 09:03:15 crc kubenswrapper[4971]: I1127 09:03:15.305116 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0c4f901-c67c-4848-8631-02c515ba7c7b-config-data\") pod \"manila-db-sync-npkfx\" (UID: \"a0c4f901-c67c-4848-8631-02c515ba7c7b\") " pod="openstack/manila-db-sync-npkfx" Nov 27 09:03:15 crc kubenswrapper[4971]: I1127 09:03:15.313455 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0c4f901-c67c-4848-8631-02c515ba7c7b-combined-ca-bundle\") pod \"manila-db-sync-npkfx\" (UID: \"a0c4f901-c67c-4848-8631-02c515ba7c7b\") " pod="openstack/manila-db-sync-npkfx" Nov 27 09:03:15 crc kubenswrapper[4971]: I1127 09:03:15.313685 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/a0c4f901-c67c-4848-8631-02c515ba7c7b-job-config-data\") pod \"manila-db-sync-npkfx\" (UID: \"a0c4f901-c67c-4848-8631-02c515ba7c7b\") " pod="openstack/manila-db-sync-npkfx" Nov 27 09:03:15 crc kubenswrapper[4971]: I1127 09:03:15.314113 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0c4f901-c67c-4848-8631-02c515ba7c7b-config-data\") pod \"manila-db-sync-npkfx\" (UID: \"a0c4f901-c67c-4848-8631-02c515ba7c7b\") " pod="openstack/manila-db-sync-npkfx" Nov 27 09:03:15 crc kubenswrapper[4971]: I1127 09:03:15.323799 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2hgsb\" (UniqueName: \"kubernetes.io/projected/a0c4f901-c67c-4848-8631-02c515ba7c7b-kube-api-access-2hgsb\") pod \"manila-db-sync-npkfx\" (UID: \"a0c4f901-c67c-4848-8631-02c515ba7c7b\") " pod="openstack/manila-db-sync-npkfx" Nov 27 09:03:15 crc kubenswrapper[4971]: I1127 09:03:15.420916 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-npkfx" Nov 27 09:03:16 crc kubenswrapper[4971]: W1127 09:03:16.094370 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda0c4f901_c67c_4848_8631_02c515ba7c7b.slice/crio-3ab6ca0f5f57361c76b4929c62c5502e25c660b20d3ab563739ddbf5649374b3 WatchSource:0}: Error finding container 3ab6ca0f5f57361c76b4929c62c5502e25c660b20d3ab563739ddbf5649374b3: Status 404 returned error can't find the container with id 3ab6ca0f5f57361c76b4929c62c5502e25c660b20d3ab563739ddbf5649374b3 Nov 27 09:03:16 crc kubenswrapper[4971]: I1127 09:03:16.095170 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-npkfx"] Nov 27 09:03:16 crc kubenswrapper[4971]: I1127 09:03:16.549261 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-npkfx" event={"ID":"a0c4f901-c67c-4848-8631-02c515ba7c7b","Type":"ContainerStarted","Data":"3ab6ca0f5f57361c76b4929c62c5502e25c660b20d3ab563739ddbf5649374b3"} Nov 27 09:03:20 crc kubenswrapper[4971]: I1127 09:03:20.551456 4971 scope.go:117] "RemoveContainer" containerID="994a193d8aefca1605e71d1a50a8591da0b0ae972faf08604e8f68555feb1168" Nov 27 09:03:20 crc kubenswrapper[4971]: E1127 09:03:20.552754 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:03:23 crc kubenswrapper[4971]: I1127 09:03:23.034714 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-5949w"] Nov 27 09:03:23 crc kubenswrapper[4971]: I1127 09:03:23.047185 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-5949w"] Nov 27 09:03:23 crc kubenswrapper[4971]: I1127 09:03:23.645886 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-npkfx" event={"ID":"a0c4f901-c67c-4848-8631-02c515ba7c7b","Type":"ContainerStarted","Data":"ee730af8dcb48497458691a1dce04d169dd5dfb16c04fcd662f3f8016435faa6"} Nov 27 09:03:23 crc kubenswrapper[4971]: I1127 09:03:23.700024 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-db-sync-npkfx" podStartSLOduration=2.519152867 podStartE2EDuration="8.700001116s" podCreationTimestamp="2025-11-27 09:03:15 +0000 UTC" firstStartedPulling="2025-11-27 09:03:16.097813921 +0000 UTC m=+7834.289857839" lastFinishedPulling="2025-11-27 09:03:22.27866218 +0000 UTC m=+7840.470706088" observedRunningTime="2025-11-27 09:03:23.673429635 +0000 UTC m=+7841.865473563" watchObservedRunningTime="2025-11-27 09:03:23.700001116 +0000 UTC m=+7841.892045034" Nov 27 09:03:24 crc kubenswrapper[4971]: I1127 09:03:24.576524 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="75d5055d-85b2-4925-b62c-1cfa9acaf383" path="/var/lib/kubelet/pods/75d5055d-85b2-4925-b62c-1cfa9acaf383/volumes" Nov 27 09:03:25 crc kubenswrapper[4971]: I1127 09:03:25.670114 4971 generic.go:334] "Generic (PLEG): container finished" podID="a0c4f901-c67c-4848-8631-02c515ba7c7b" containerID="ee730af8dcb48497458691a1dce04d169dd5dfb16c04fcd662f3f8016435faa6" exitCode=0 Nov 27 09:03:25 crc kubenswrapper[4971]: I1127 09:03:25.670164 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-npkfx" event={"ID":"a0c4f901-c67c-4848-8631-02c515ba7c7b","Type":"ContainerDied","Data":"ee730af8dcb48497458691a1dce04d169dd5dfb16c04fcd662f3f8016435faa6"} Nov 27 09:03:27 crc kubenswrapper[4971]: I1127 09:03:27.193759 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-npkfx" Nov 27 09:03:27 crc kubenswrapper[4971]: I1127 09:03:27.308260 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2hgsb\" (UniqueName: \"kubernetes.io/projected/a0c4f901-c67c-4848-8631-02c515ba7c7b-kube-api-access-2hgsb\") pod \"a0c4f901-c67c-4848-8631-02c515ba7c7b\" (UID: \"a0c4f901-c67c-4848-8631-02c515ba7c7b\") " Nov 27 09:03:27 crc kubenswrapper[4971]: I1127 09:03:27.308407 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0c4f901-c67c-4848-8631-02c515ba7c7b-config-data\") pod \"a0c4f901-c67c-4848-8631-02c515ba7c7b\" (UID: \"a0c4f901-c67c-4848-8631-02c515ba7c7b\") " Nov 27 09:03:27 crc kubenswrapper[4971]: I1127 09:03:27.308428 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0c4f901-c67c-4848-8631-02c515ba7c7b-combined-ca-bundle\") pod \"a0c4f901-c67c-4848-8631-02c515ba7c7b\" (UID: \"a0c4f901-c67c-4848-8631-02c515ba7c7b\") " Nov 27 09:03:27 crc kubenswrapper[4971]: I1127 09:03:27.308633 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/a0c4f901-c67c-4848-8631-02c515ba7c7b-job-config-data\") pod \"a0c4f901-c67c-4848-8631-02c515ba7c7b\" (UID: \"a0c4f901-c67c-4848-8631-02c515ba7c7b\") " Nov 27 09:03:27 crc kubenswrapper[4971]: I1127 09:03:27.315918 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0c4f901-c67c-4848-8631-02c515ba7c7b-kube-api-access-2hgsb" (OuterVolumeSpecName: "kube-api-access-2hgsb") pod "a0c4f901-c67c-4848-8631-02c515ba7c7b" (UID: "a0c4f901-c67c-4848-8631-02c515ba7c7b"). InnerVolumeSpecName "kube-api-access-2hgsb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:03:27 crc kubenswrapper[4971]: I1127 09:03:27.316924 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0c4f901-c67c-4848-8631-02c515ba7c7b-job-config-data" (OuterVolumeSpecName: "job-config-data") pod "a0c4f901-c67c-4848-8631-02c515ba7c7b" (UID: "a0c4f901-c67c-4848-8631-02c515ba7c7b"). InnerVolumeSpecName "job-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:03:27 crc kubenswrapper[4971]: I1127 09:03:27.321049 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0c4f901-c67c-4848-8631-02c515ba7c7b-config-data" (OuterVolumeSpecName: "config-data") pod "a0c4f901-c67c-4848-8631-02c515ba7c7b" (UID: "a0c4f901-c67c-4848-8631-02c515ba7c7b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:03:27 crc kubenswrapper[4971]: I1127 09:03:27.353560 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0c4f901-c67c-4848-8631-02c515ba7c7b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a0c4f901-c67c-4848-8631-02c515ba7c7b" (UID: "a0c4f901-c67c-4848-8631-02c515ba7c7b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:03:27 crc kubenswrapper[4971]: I1127 09:03:27.411597 4971 reconciler_common.go:293] "Volume detached for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/a0c4f901-c67c-4848-8631-02c515ba7c7b-job-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 09:03:27 crc kubenswrapper[4971]: I1127 09:03:27.411639 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2hgsb\" (UniqueName: \"kubernetes.io/projected/a0c4f901-c67c-4848-8631-02c515ba7c7b-kube-api-access-2hgsb\") on node \"crc\" DevicePath \"\"" Nov 27 09:03:27 crc kubenswrapper[4971]: I1127 09:03:27.411656 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0c4f901-c67c-4848-8631-02c515ba7c7b-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 09:03:27 crc kubenswrapper[4971]: I1127 09:03:27.411676 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0c4f901-c67c-4848-8631-02c515ba7c7b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:03:27 crc kubenswrapper[4971]: I1127 09:03:27.695652 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-npkfx" event={"ID":"a0c4f901-c67c-4848-8631-02c515ba7c7b","Type":"ContainerDied","Data":"3ab6ca0f5f57361c76b4929c62c5502e25c660b20d3ab563739ddbf5649374b3"} Nov 27 09:03:27 crc kubenswrapper[4971]: I1127 09:03:27.695709 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3ab6ca0f5f57361c76b4929c62c5502e25c660b20d3ab563739ddbf5649374b3" Nov 27 09:03:27 crc kubenswrapper[4971]: I1127 09:03:27.695785 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-npkfx" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.051017 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-share-share1-0"] Nov 27 09:03:28 crc kubenswrapper[4971]: E1127 09:03:28.053089 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0c4f901-c67c-4848-8631-02c515ba7c7b" containerName="manila-db-sync" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.053215 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0c4f901-c67c-4848-8631-02c515ba7c7b" containerName="manila-db-sync" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.053489 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0c4f901-c67c-4848-8631-02c515ba7c7b" containerName="manila-db-sync" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.055007 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.060463 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-share-share1-config-data" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.061139 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.061238 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scripts" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.062789 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-zj8pn" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.087818 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.112766 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6bc779bffc-k8jn2"] Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.114777 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc779bffc-k8jn2" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.142423 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bc779bffc-k8jn2"] Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.146490 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2ca40967-dc15-4bb4-8f05-24a4eb302605-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"2ca40967-dc15-4bb4-8f05-24a4eb302605\") " pod="openstack/manila-share-share1-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.146570 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fl6lb\" (UniqueName: \"kubernetes.io/projected/2ca40967-dc15-4bb4-8f05-24a4eb302605-kube-api-access-fl6lb\") pod \"manila-share-share1-0\" (UID: \"2ca40967-dc15-4bb4-8f05-24a4eb302605\") " pod="openstack/manila-share-share1-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.146654 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ca40967-dc15-4bb4-8f05-24a4eb302605-scripts\") pod \"manila-share-share1-0\" (UID: \"2ca40967-dc15-4bb4-8f05-24a4eb302605\") " pod="openstack/manila-share-share1-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.146673 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/2ca40967-dc15-4bb4-8f05-24a4eb302605-ceph\") pod \"manila-share-share1-0\" (UID: \"2ca40967-dc15-4bb4-8f05-24a4eb302605\") " pod="openstack/manila-share-share1-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.146710 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2ca40967-dc15-4bb4-8f05-24a4eb302605-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"2ca40967-dc15-4bb4-8f05-24a4eb302605\") " pod="openstack/manila-share-share1-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.146751 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/2ca40967-dc15-4bb4-8f05-24a4eb302605-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"2ca40967-dc15-4bb4-8f05-24a4eb302605\") " pod="openstack/manila-share-share1-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.146790 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ca40967-dc15-4bb4-8f05-24a4eb302605-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"2ca40967-dc15-4bb4-8f05-24a4eb302605\") " pod="openstack/manila-share-share1-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.146835 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ca40967-dc15-4bb4-8f05-24a4eb302605-config-data\") pod \"manila-share-share1-0\" (UID: \"2ca40967-dc15-4bb4-8f05-24a4eb302605\") " pod="openstack/manila-share-share1-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.177034 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-scheduler-0"] Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.179684 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.185948 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scheduler-config-data" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.201835 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.249369 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ca40967-dc15-4bb4-8f05-24a4eb302605-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"2ca40967-dc15-4bb4-8f05-24a4eb302605\") " pod="openstack/manila-share-share1-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.249447 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ca40967-dc15-4bb4-8f05-24a4eb302605-config-data\") pod \"manila-share-share1-0\" (UID: \"2ca40967-dc15-4bb4-8f05-24a4eb302605\") " pod="openstack/manila-share-share1-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.249575 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2ca40967-dc15-4bb4-8f05-24a4eb302605-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"2ca40967-dc15-4bb4-8f05-24a4eb302605\") " pod="openstack/manila-share-share1-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.249644 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f53839dc-4c3a-4d71-ba66-897cc991de94-config-data\") pod \"manila-scheduler-0\" (UID: \"f53839dc-4c3a-4d71-ba66-897cc991de94\") " pod="openstack/manila-scheduler-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.249673 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fl6lb\" (UniqueName: \"kubernetes.io/projected/2ca40967-dc15-4bb4-8f05-24a4eb302605-kube-api-access-fl6lb\") pod \"manila-share-share1-0\" (UID: \"2ca40967-dc15-4bb4-8f05-24a4eb302605\") " pod="openstack/manila-share-share1-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.249711 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/09c00568-2483-43ee-b406-28f3b60dd660-ovsdbserver-nb\") pod \"dnsmasq-dns-6bc779bffc-k8jn2\" (UID: \"09c00568-2483-43ee-b406-28f3b60dd660\") " pod="openstack/dnsmasq-dns-6bc779bffc-k8jn2" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.249748 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f53839dc-4c3a-4d71-ba66-897cc991de94-scripts\") pod \"manila-scheduler-0\" (UID: \"f53839dc-4c3a-4d71-ba66-897cc991de94\") " pod="openstack/manila-scheduler-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.249805 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btk2m\" (UniqueName: \"kubernetes.io/projected/09c00568-2483-43ee-b406-28f3b60dd660-kube-api-access-btk2m\") pod \"dnsmasq-dns-6bc779bffc-k8jn2\" (UID: \"09c00568-2483-43ee-b406-28f3b60dd660\") " pod="openstack/dnsmasq-dns-6bc779bffc-k8jn2" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.249899 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f53839dc-4c3a-4d71-ba66-897cc991de94-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"f53839dc-4c3a-4d71-ba66-897cc991de94\") " pod="openstack/manila-scheduler-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.249933 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7dctv\" (UniqueName: \"kubernetes.io/projected/f53839dc-4c3a-4d71-ba66-897cc991de94-kube-api-access-7dctv\") pod \"manila-scheduler-0\" (UID: \"f53839dc-4c3a-4d71-ba66-897cc991de94\") " pod="openstack/manila-scheduler-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.249969 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ca40967-dc15-4bb4-8f05-24a4eb302605-scripts\") pod \"manila-share-share1-0\" (UID: \"2ca40967-dc15-4bb4-8f05-24a4eb302605\") " pod="openstack/manila-share-share1-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.249990 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/2ca40967-dc15-4bb4-8f05-24a4eb302605-ceph\") pod \"manila-share-share1-0\" (UID: \"2ca40967-dc15-4bb4-8f05-24a4eb302605\") " pod="openstack/manila-share-share1-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.250015 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53839dc-4c3a-4d71-ba66-897cc991de94-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"f53839dc-4c3a-4d71-ba66-897cc991de94\") " pod="openstack/manila-scheduler-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.250075 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2ca40967-dc15-4bb4-8f05-24a4eb302605-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"2ca40967-dc15-4bb4-8f05-24a4eb302605\") " pod="openstack/manila-share-share1-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.250118 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f53839dc-4c3a-4d71-ba66-897cc991de94-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"f53839dc-4c3a-4d71-ba66-897cc991de94\") " pod="openstack/manila-scheduler-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.250146 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/09c00568-2483-43ee-b406-28f3b60dd660-dns-svc\") pod \"dnsmasq-dns-6bc779bffc-k8jn2\" (UID: \"09c00568-2483-43ee-b406-28f3b60dd660\") " pod="openstack/dnsmasq-dns-6bc779bffc-k8jn2" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.250186 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/09c00568-2483-43ee-b406-28f3b60dd660-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc779bffc-k8jn2\" (UID: \"09c00568-2483-43ee-b406-28f3b60dd660\") " pod="openstack/dnsmasq-dns-6bc779bffc-k8jn2" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.250231 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09c00568-2483-43ee-b406-28f3b60dd660-config\") pod \"dnsmasq-dns-6bc779bffc-k8jn2\" (UID: \"09c00568-2483-43ee-b406-28f3b60dd660\") " pod="openstack/dnsmasq-dns-6bc779bffc-k8jn2" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.250253 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/2ca40967-dc15-4bb4-8f05-24a4eb302605-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"2ca40967-dc15-4bb4-8f05-24a4eb302605\") " pod="openstack/manila-share-share1-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.250583 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/2ca40967-dc15-4bb4-8f05-24a4eb302605-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"2ca40967-dc15-4bb4-8f05-24a4eb302605\") " pod="openstack/manila-share-share1-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.251562 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2ca40967-dc15-4bb4-8f05-24a4eb302605-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"2ca40967-dc15-4bb4-8f05-24a4eb302605\") " pod="openstack/manila-share-share1-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.259287 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ca40967-dc15-4bb4-8f05-24a4eb302605-config-data\") pod \"manila-share-share1-0\" (UID: \"2ca40967-dc15-4bb4-8f05-24a4eb302605\") " pod="openstack/manila-share-share1-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.261796 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2ca40967-dc15-4bb4-8f05-24a4eb302605-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"2ca40967-dc15-4bb4-8f05-24a4eb302605\") " pod="openstack/manila-share-share1-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.262392 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ca40967-dc15-4bb4-8f05-24a4eb302605-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"2ca40967-dc15-4bb4-8f05-24a4eb302605\") " pod="openstack/manila-share-share1-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.271482 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ca40967-dc15-4bb4-8f05-24a4eb302605-scripts\") pod \"manila-share-share1-0\" (UID: \"2ca40967-dc15-4bb4-8f05-24a4eb302605\") " pod="openstack/manila-share-share1-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.279089 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/2ca40967-dc15-4bb4-8f05-24a4eb302605-ceph\") pod \"manila-share-share1-0\" (UID: \"2ca40967-dc15-4bb4-8f05-24a4eb302605\") " pod="openstack/manila-share-share1-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.285944 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fl6lb\" (UniqueName: \"kubernetes.io/projected/2ca40967-dc15-4bb4-8f05-24a4eb302605-kube-api-access-fl6lb\") pod \"manila-share-share1-0\" (UID: \"2ca40967-dc15-4bb4-8f05-24a4eb302605\") " pod="openstack/manila-share-share1-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.352827 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/09c00568-2483-43ee-b406-28f3b60dd660-ovsdbserver-nb\") pod \"dnsmasq-dns-6bc779bffc-k8jn2\" (UID: \"09c00568-2483-43ee-b406-28f3b60dd660\") " pod="openstack/dnsmasq-dns-6bc779bffc-k8jn2" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.352888 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f53839dc-4c3a-4d71-ba66-897cc991de94-scripts\") pod \"manila-scheduler-0\" (UID: \"f53839dc-4c3a-4d71-ba66-897cc991de94\") " pod="openstack/manila-scheduler-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.352928 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btk2m\" (UniqueName: \"kubernetes.io/projected/09c00568-2483-43ee-b406-28f3b60dd660-kube-api-access-btk2m\") pod \"dnsmasq-dns-6bc779bffc-k8jn2\" (UID: \"09c00568-2483-43ee-b406-28f3b60dd660\") " pod="openstack/dnsmasq-dns-6bc779bffc-k8jn2" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.352966 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f53839dc-4c3a-4d71-ba66-897cc991de94-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"f53839dc-4c3a-4d71-ba66-897cc991de94\") " pod="openstack/manila-scheduler-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.352983 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7dctv\" (UniqueName: \"kubernetes.io/projected/f53839dc-4c3a-4d71-ba66-897cc991de94-kube-api-access-7dctv\") pod \"manila-scheduler-0\" (UID: \"f53839dc-4c3a-4d71-ba66-897cc991de94\") " pod="openstack/manila-scheduler-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.353004 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53839dc-4c3a-4d71-ba66-897cc991de94-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"f53839dc-4c3a-4d71-ba66-897cc991de94\") " pod="openstack/manila-scheduler-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.353045 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f53839dc-4c3a-4d71-ba66-897cc991de94-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"f53839dc-4c3a-4d71-ba66-897cc991de94\") " pod="openstack/manila-scheduler-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.353064 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/09c00568-2483-43ee-b406-28f3b60dd660-dns-svc\") pod \"dnsmasq-dns-6bc779bffc-k8jn2\" (UID: \"09c00568-2483-43ee-b406-28f3b60dd660\") " pod="openstack/dnsmasq-dns-6bc779bffc-k8jn2" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.353085 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/09c00568-2483-43ee-b406-28f3b60dd660-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc779bffc-k8jn2\" (UID: \"09c00568-2483-43ee-b406-28f3b60dd660\") " pod="openstack/dnsmasq-dns-6bc779bffc-k8jn2" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.353106 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09c00568-2483-43ee-b406-28f3b60dd660-config\") pod \"dnsmasq-dns-6bc779bffc-k8jn2\" (UID: \"09c00568-2483-43ee-b406-28f3b60dd660\") " pod="openstack/dnsmasq-dns-6bc779bffc-k8jn2" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.353189 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f53839dc-4c3a-4d71-ba66-897cc991de94-config-data\") pod \"manila-scheduler-0\" (UID: \"f53839dc-4c3a-4d71-ba66-897cc991de94\") " pod="openstack/manila-scheduler-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.356691 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/09c00568-2483-43ee-b406-28f3b60dd660-ovsdbserver-nb\") pod \"dnsmasq-dns-6bc779bffc-k8jn2\" (UID: \"09c00568-2483-43ee-b406-28f3b60dd660\") " pod="openstack/dnsmasq-dns-6bc779bffc-k8jn2" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.357674 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f53839dc-4c3a-4d71-ba66-897cc991de94-config-data\") pod \"manila-scheduler-0\" (UID: \"f53839dc-4c3a-4d71-ba66-897cc991de94\") " pod="openstack/manila-scheduler-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.357799 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-api-0"] Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.359247 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f53839dc-4c3a-4d71-ba66-897cc991de94-scripts\") pod \"manila-scheduler-0\" (UID: \"f53839dc-4c3a-4d71-ba66-897cc991de94\") " pod="openstack/manila-scheduler-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.359264 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f53839dc-4c3a-4d71-ba66-897cc991de94-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"f53839dc-4c3a-4d71-ba66-897cc991de94\") " pod="openstack/manila-scheduler-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.360387 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/09c00568-2483-43ee-b406-28f3b60dd660-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc779bffc-k8jn2\" (UID: \"09c00568-2483-43ee-b406-28f3b60dd660\") " pod="openstack/dnsmasq-dns-6bc779bffc-k8jn2" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.360515 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53839dc-4c3a-4d71-ba66-897cc991de94-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"f53839dc-4c3a-4d71-ba66-897cc991de94\") " pod="openstack/manila-scheduler-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.363879 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09c00568-2483-43ee-b406-28f3b60dd660-config\") pod \"dnsmasq-dns-6bc779bffc-k8jn2\" (UID: \"09c00568-2483-43ee-b406-28f3b60dd660\") " pod="openstack/dnsmasq-dns-6bc779bffc-k8jn2" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.365152 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/09c00568-2483-43ee-b406-28f3b60dd660-dns-svc\") pod \"dnsmasq-dns-6bc779bffc-k8jn2\" (UID: \"09c00568-2483-43ee-b406-28f3b60dd660\") " pod="openstack/dnsmasq-dns-6bc779bffc-k8jn2" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.371219 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f53839dc-4c3a-4d71-ba66-897cc991de94-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"f53839dc-4c3a-4d71-ba66-897cc991de94\") " pod="openstack/manila-scheduler-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.377237 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7dctv\" (UniqueName: \"kubernetes.io/projected/f53839dc-4c3a-4d71-ba66-897cc991de94-kube-api-access-7dctv\") pod \"manila-scheduler-0\" (UID: \"f53839dc-4c3a-4d71-ba66-897cc991de94\") " pod="openstack/manila-scheduler-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.379365 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.382833 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-api-config-data" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.389685 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.393825 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btk2m\" (UniqueName: \"kubernetes.io/projected/09c00568-2483-43ee-b406-28f3b60dd660-kube-api-access-btk2m\") pod \"dnsmasq-dns-6bc779bffc-k8jn2\" (UID: \"09c00568-2483-43ee-b406-28f3b60dd660\") " pod="openstack/dnsmasq-dns-6bc779bffc-k8jn2" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.394640 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.443635 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc779bffc-k8jn2" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.464184 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3285b5d5-f19d-4f37-912a-8d3fde19a794-etc-machine-id\") pod \"manila-api-0\" (UID: \"3285b5d5-f19d-4f37-912a-8d3fde19a794\") " pod="openstack/manila-api-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.464236 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3285b5d5-f19d-4f37-912a-8d3fde19a794-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"3285b5d5-f19d-4f37-912a-8d3fde19a794\") " pod="openstack/manila-api-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.464270 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cxjrg\" (UniqueName: \"kubernetes.io/projected/3285b5d5-f19d-4f37-912a-8d3fde19a794-kube-api-access-cxjrg\") pod \"manila-api-0\" (UID: \"3285b5d5-f19d-4f37-912a-8d3fde19a794\") " pod="openstack/manila-api-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.464334 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3285b5d5-f19d-4f37-912a-8d3fde19a794-config-data\") pod \"manila-api-0\" (UID: \"3285b5d5-f19d-4f37-912a-8d3fde19a794\") " pod="openstack/manila-api-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.464366 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3285b5d5-f19d-4f37-912a-8d3fde19a794-config-data-custom\") pod \"manila-api-0\" (UID: \"3285b5d5-f19d-4f37-912a-8d3fde19a794\") " pod="openstack/manila-api-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.464407 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3285b5d5-f19d-4f37-912a-8d3fde19a794-logs\") pod \"manila-api-0\" (UID: \"3285b5d5-f19d-4f37-912a-8d3fde19a794\") " pod="openstack/manila-api-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.464694 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3285b5d5-f19d-4f37-912a-8d3fde19a794-scripts\") pod \"manila-api-0\" (UID: \"3285b5d5-f19d-4f37-912a-8d3fde19a794\") " pod="openstack/manila-api-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.508783 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.566400 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3285b5d5-f19d-4f37-912a-8d3fde19a794-logs\") pod \"manila-api-0\" (UID: \"3285b5d5-f19d-4f37-912a-8d3fde19a794\") " pod="openstack/manila-api-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.566500 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3285b5d5-f19d-4f37-912a-8d3fde19a794-scripts\") pod \"manila-api-0\" (UID: \"3285b5d5-f19d-4f37-912a-8d3fde19a794\") " pod="openstack/manila-api-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.566898 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3285b5d5-f19d-4f37-912a-8d3fde19a794-etc-machine-id\") pod \"manila-api-0\" (UID: \"3285b5d5-f19d-4f37-912a-8d3fde19a794\") " pod="openstack/manila-api-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.566934 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3285b5d5-f19d-4f37-912a-8d3fde19a794-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"3285b5d5-f19d-4f37-912a-8d3fde19a794\") " pod="openstack/manila-api-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.566962 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cxjrg\" (UniqueName: \"kubernetes.io/projected/3285b5d5-f19d-4f37-912a-8d3fde19a794-kube-api-access-cxjrg\") pod \"manila-api-0\" (UID: \"3285b5d5-f19d-4f37-912a-8d3fde19a794\") " pod="openstack/manila-api-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.567024 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3285b5d5-f19d-4f37-912a-8d3fde19a794-config-data\") pod \"manila-api-0\" (UID: \"3285b5d5-f19d-4f37-912a-8d3fde19a794\") " pod="openstack/manila-api-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.567064 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3285b5d5-f19d-4f37-912a-8d3fde19a794-config-data-custom\") pod \"manila-api-0\" (UID: \"3285b5d5-f19d-4f37-912a-8d3fde19a794\") " pod="openstack/manila-api-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.567233 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3285b5d5-f19d-4f37-912a-8d3fde19a794-logs\") pod \"manila-api-0\" (UID: \"3285b5d5-f19d-4f37-912a-8d3fde19a794\") " pod="openstack/manila-api-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.571756 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3285b5d5-f19d-4f37-912a-8d3fde19a794-config-data-custom\") pod \"manila-api-0\" (UID: \"3285b5d5-f19d-4f37-912a-8d3fde19a794\") " pod="openstack/manila-api-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.573355 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3285b5d5-f19d-4f37-912a-8d3fde19a794-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"3285b5d5-f19d-4f37-912a-8d3fde19a794\") " pod="openstack/manila-api-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.575547 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3285b5d5-f19d-4f37-912a-8d3fde19a794-etc-machine-id\") pod \"manila-api-0\" (UID: \"3285b5d5-f19d-4f37-912a-8d3fde19a794\") " pod="openstack/manila-api-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.575948 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3285b5d5-f19d-4f37-912a-8d3fde19a794-config-data\") pod \"manila-api-0\" (UID: \"3285b5d5-f19d-4f37-912a-8d3fde19a794\") " pod="openstack/manila-api-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.577008 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3285b5d5-f19d-4f37-912a-8d3fde19a794-scripts\") pod \"manila-api-0\" (UID: \"3285b5d5-f19d-4f37-912a-8d3fde19a794\") " pod="openstack/manila-api-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.589070 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cxjrg\" (UniqueName: \"kubernetes.io/projected/3285b5d5-f19d-4f37-912a-8d3fde19a794-kube-api-access-cxjrg\") pod \"manila-api-0\" (UID: \"3285b5d5-f19d-4f37-912a-8d3fde19a794\") " pod="openstack/manila-api-0" Nov 27 09:03:28 crc kubenswrapper[4971]: I1127 09:03:28.818108 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Nov 27 09:03:29 crc kubenswrapper[4971]: I1127 09:03:29.146129 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bc779bffc-k8jn2"] Nov 27 09:03:29 crc kubenswrapper[4971]: I1127 09:03:29.278746 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Nov 27 09:03:29 crc kubenswrapper[4971]: I1127 09:03:29.322864 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Nov 27 09:03:29 crc kubenswrapper[4971]: W1127 09:03:29.327041 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf53839dc_4c3a_4d71_ba66_897cc991de94.slice/crio-dfdf7f291463e53d6c3da1f99b56f5bb25d76e4ed24929316810e18ca49fab54 WatchSource:0}: Error finding container dfdf7f291463e53d6c3da1f99b56f5bb25d76e4ed24929316810e18ca49fab54: Status 404 returned error can't find the container with id dfdf7f291463e53d6c3da1f99b56f5bb25d76e4ed24929316810e18ca49fab54 Nov 27 09:03:29 crc kubenswrapper[4971]: I1127 09:03:29.500992 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Nov 27 09:03:29 crc kubenswrapper[4971]: I1127 09:03:29.744804 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"f53839dc-4c3a-4d71-ba66-897cc991de94","Type":"ContainerStarted","Data":"dfdf7f291463e53d6c3da1f99b56f5bb25d76e4ed24929316810e18ca49fab54"} Nov 27 09:03:29 crc kubenswrapper[4971]: I1127 09:03:29.755846 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"2ca40967-dc15-4bb4-8f05-24a4eb302605","Type":"ContainerStarted","Data":"6039ee4841df2346e2649c18b6d354ac3533ed822ce2e02b0b047cebd8bd49f9"} Nov 27 09:03:29 crc kubenswrapper[4971]: I1127 09:03:29.768456 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"3285b5d5-f19d-4f37-912a-8d3fde19a794","Type":"ContainerStarted","Data":"b87bc545759f03bbfaef7c5c226a560ba75d4fe70187749d7cf4761ffdc2091f"} Nov 27 09:03:29 crc kubenswrapper[4971]: I1127 09:03:29.785682 4971 generic.go:334] "Generic (PLEG): container finished" podID="09c00568-2483-43ee-b406-28f3b60dd660" containerID="54ffa50611ffa0eec5961247c221e99066bb00cb56e1de4b812bdaa2ad8ed220" exitCode=0 Nov 27 09:03:29 crc kubenswrapper[4971]: I1127 09:03:29.785741 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc779bffc-k8jn2" event={"ID":"09c00568-2483-43ee-b406-28f3b60dd660","Type":"ContainerDied","Data":"54ffa50611ffa0eec5961247c221e99066bb00cb56e1de4b812bdaa2ad8ed220"} Nov 27 09:03:29 crc kubenswrapper[4971]: I1127 09:03:29.785769 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc779bffc-k8jn2" event={"ID":"09c00568-2483-43ee-b406-28f3b60dd660","Type":"ContainerStarted","Data":"1f81aaaeec789a5a9b323b41c3596b2e8f28868f7a23558a727f75e117e413db"} Nov 27 09:03:30 crc kubenswrapper[4971]: I1127 09:03:30.807325 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"3285b5d5-f19d-4f37-912a-8d3fde19a794","Type":"ContainerStarted","Data":"0e254304c63cf4555c663065337394e211c9e29467d78996df0480c931a65dfb"} Nov 27 09:03:30 crc kubenswrapper[4971]: I1127 09:03:30.822682 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc779bffc-k8jn2" event={"ID":"09c00568-2483-43ee-b406-28f3b60dd660","Type":"ContainerStarted","Data":"7b0b0ee9a39768b33b6beb3ec1ceca199b5bf272227f3f316ec4f4db5476d95b"} Nov 27 09:03:30 crc kubenswrapper[4971]: I1127 09:03:30.822833 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6bc779bffc-k8jn2" Nov 27 09:03:30 crc kubenswrapper[4971]: I1127 09:03:30.843831 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"f53839dc-4c3a-4d71-ba66-897cc991de94","Type":"ContainerStarted","Data":"0e50214e0861622731e88861f59414a28d1f855d7abe6fe9f48fc1a8c3c6af5d"} Nov 27 09:03:30 crc kubenswrapper[4971]: I1127 09:03:30.859934 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6bc779bffc-k8jn2" podStartSLOduration=2.859901916 podStartE2EDuration="2.859901916s" podCreationTimestamp="2025-11-27 09:03:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 09:03:30.842822137 +0000 UTC m=+7849.034866065" watchObservedRunningTime="2025-11-27 09:03:30.859901916 +0000 UTC m=+7849.051945834" Nov 27 09:03:31 crc kubenswrapper[4971]: I1127 09:03:31.864355 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"3285b5d5-f19d-4f37-912a-8d3fde19a794","Type":"ContainerStarted","Data":"8053b5cb11bcc1e7480c9692d3661987aa53beb32c4e6b5238b72c1298340a5f"} Nov 27 09:03:31 crc kubenswrapper[4971]: I1127 09:03:31.865093 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/manila-api-0" Nov 27 09:03:31 crc kubenswrapper[4971]: I1127 09:03:31.869214 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"f53839dc-4c3a-4d71-ba66-897cc991de94","Type":"ContainerStarted","Data":"05302658ba204d3e7475ee0f28ab053d6a7bba8837a0e8df312a3408e033ef12"} Nov 27 09:03:31 crc kubenswrapper[4971]: I1127 09:03:31.891798 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-api-0" podStartSLOduration=3.891774301 podStartE2EDuration="3.891774301s" podCreationTimestamp="2025-11-27 09:03:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 09:03:31.882560647 +0000 UTC m=+7850.074604585" watchObservedRunningTime="2025-11-27 09:03:31.891774301 +0000 UTC m=+7850.083818219" Nov 27 09:03:31 crc kubenswrapper[4971]: I1127 09:03:31.910223 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-scheduler-0" podStartSLOduration=3.443835702 podStartE2EDuration="3.910206589s" podCreationTimestamp="2025-11-27 09:03:28 +0000 UTC" firstStartedPulling="2025-11-27 09:03:29.329212886 +0000 UTC m=+7847.521256804" lastFinishedPulling="2025-11-27 09:03:29.795583773 +0000 UTC m=+7847.987627691" observedRunningTime="2025-11-27 09:03:31.909014685 +0000 UTC m=+7850.101058623" watchObservedRunningTime="2025-11-27 09:03:31.910206589 +0000 UTC m=+7850.102250507" Nov 27 09:03:32 crc kubenswrapper[4971]: I1127 09:03:32.425931 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 27 09:03:32 crc kubenswrapper[4971]: I1127 09:03:32.426243 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="44fc7298-f2a4-4278-9b26-24ad31c66cd5" containerName="ceilometer-central-agent" containerID="cri-o://5aae78464f6aa5cd9ef4630c21ac61c7cdce20b91eaa7173b2622ab2f6e96af3" gracePeriod=30 Nov 27 09:03:32 crc kubenswrapper[4971]: I1127 09:03:32.426471 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="44fc7298-f2a4-4278-9b26-24ad31c66cd5" containerName="sg-core" containerID="cri-o://ae31041b97490942c81f12b5b593965066a09bd39de719e60ba76ce5ed7d9dbd" gracePeriod=30 Nov 27 09:03:32 crc kubenswrapper[4971]: I1127 09:03:32.426783 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="44fc7298-f2a4-4278-9b26-24ad31c66cd5" containerName="proxy-httpd" containerID="cri-o://09e43000bb4dd7562a875107fa08a4132d19c8d68bc2e42c7c0d9001d0550af9" gracePeriod=30 Nov 27 09:03:32 crc kubenswrapper[4971]: I1127 09:03:32.426818 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="44fc7298-f2a4-4278-9b26-24ad31c66cd5" containerName="ceilometer-notification-agent" containerID="cri-o://1a70de4bb093cb5e220d019a58f00a9ba6a0edd749dfd99ef96d5eadc0651aed" gracePeriod=30 Nov 27 09:03:32 crc kubenswrapper[4971]: I1127 09:03:32.435208 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Nov 27 09:03:32 crc kubenswrapper[4971]: I1127 09:03:32.882691 4971 generic.go:334] "Generic (PLEG): container finished" podID="44fc7298-f2a4-4278-9b26-24ad31c66cd5" containerID="09e43000bb4dd7562a875107fa08a4132d19c8d68bc2e42c7c0d9001d0550af9" exitCode=0 Nov 27 09:03:32 crc kubenswrapper[4971]: I1127 09:03:32.883014 4971 generic.go:334] "Generic (PLEG): container finished" podID="44fc7298-f2a4-4278-9b26-24ad31c66cd5" containerID="ae31041b97490942c81f12b5b593965066a09bd39de719e60ba76ce5ed7d9dbd" exitCode=2 Nov 27 09:03:32 crc kubenswrapper[4971]: I1127 09:03:32.882738 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"44fc7298-f2a4-4278-9b26-24ad31c66cd5","Type":"ContainerDied","Data":"09e43000bb4dd7562a875107fa08a4132d19c8d68bc2e42c7c0d9001d0550af9"} Nov 27 09:03:32 crc kubenswrapper[4971]: I1127 09:03:32.883306 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"44fc7298-f2a4-4278-9b26-24ad31c66cd5","Type":"ContainerDied","Data":"ae31041b97490942c81f12b5b593965066a09bd39de719e60ba76ce5ed7d9dbd"} Nov 27 09:03:33 crc kubenswrapper[4971]: I1127 09:03:33.550887 4971 scope.go:117] "RemoveContainer" containerID="994a193d8aefca1605e71d1a50a8591da0b0ae972faf08604e8f68555feb1168" Nov 27 09:03:33 crc kubenswrapper[4971]: E1127 09:03:33.551235 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:03:33 crc kubenswrapper[4971]: I1127 09:03:33.832152 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="44fc7298-f2a4-4278-9b26-24ad31c66cd5" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.1.137:3000/\": dial tcp 10.217.1.137:3000: connect: connection refused" Nov 27 09:03:33 crc kubenswrapper[4971]: I1127 09:03:33.899073 4971 generic.go:334] "Generic (PLEG): container finished" podID="44fc7298-f2a4-4278-9b26-24ad31c66cd5" containerID="5aae78464f6aa5cd9ef4630c21ac61c7cdce20b91eaa7173b2622ab2f6e96af3" exitCode=0 Nov 27 09:03:33 crc kubenswrapper[4971]: I1127 09:03:33.899123 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"44fc7298-f2a4-4278-9b26-24ad31c66cd5","Type":"ContainerDied","Data":"5aae78464f6aa5cd9ef4630c21ac61c7cdce20b91eaa7173b2622ab2f6e96af3"} Nov 27 09:03:37 crc kubenswrapper[4971]: I1127 09:03:37.530754 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 09:03:37 crc kubenswrapper[4971]: I1127 09:03:37.690486 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44fc7298-f2a4-4278-9b26-24ad31c66cd5-config-data\") pod \"44fc7298-f2a4-4278-9b26-24ad31c66cd5\" (UID: \"44fc7298-f2a4-4278-9b26-24ad31c66cd5\") " Nov 27 09:03:37 crc kubenswrapper[4971]: I1127 09:03:37.690599 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/44fc7298-f2a4-4278-9b26-24ad31c66cd5-log-httpd\") pod \"44fc7298-f2a4-4278-9b26-24ad31c66cd5\" (UID: \"44fc7298-f2a4-4278-9b26-24ad31c66cd5\") " Nov 27 09:03:37 crc kubenswrapper[4971]: I1127 09:03:37.690688 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/44fc7298-f2a4-4278-9b26-24ad31c66cd5-sg-core-conf-yaml\") pod \"44fc7298-f2a4-4278-9b26-24ad31c66cd5\" (UID: \"44fc7298-f2a4-4278-9b26-24ad31c66cd5\") " Nov 27 09:03:37 crc kubenswrapper[4971]: I1127 09:03:37.690724 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bwt7v\" (UniqueName: \"kubernetes.io/projected/44fc7298-f2a4-4278-9b26-24ad31c66cd5-kube-api-access-bwt7v\") pod \"44fc7298-f2a4-4278-9b26-24ad31c66cd5\" (UID: \"44fc7298-f2a4-4278-9b26-24ad31c66cd5\") " Nov 27 09:03:37 crc kubenswrapper[4971]: I1127 09:03:37.690782 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/44fc7298-f2a4-4278-9b26-24ad31c66cd5-scripts\") pod \"44fc7298-f2a4-4278-9b26-24ad31c66cd5\" (UID: \"44fc7298-f2a4-4278-9b26-24ad31c66cd5\") " Nov 27 09:03:37 crc kubenswrapper[4971]: I1127 09:03:37.690850 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/44fc7298-f2a4-4278-9b26-24ad31c66cd5-run-httpd\") pod \"44fc7298-f2a4-4278-9b26-24ad31c66cd5\" (UID: \"44fc7298-f2a4-4278-9b26-24ad31c66cd5\") " Nov 27 09:03:37 crc kubenswrapper[4971]: I1127 09:03:37.690913 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44fc7298-f2a4-4278-9b26-24ad31c66cd5-combined-ca-bundle\") pod \"44fc7298-f2a4-4278-9b26-24ad31c66cd5\" (UID: \"44fc7298-f2a4-4278-9b26-24ad31c66cd5\") " Nov 27 09:03:37 crc kubenswrapper[4971]: I1127 09:03:37.691219 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/44fc7298-f2a4-4278-9b26-24ad31c66cd5-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "44fc7298-f2a4-4278-9b26-24ad31c66cd5" (UID: "44fc7298-f2a4-4278-9b26-24ad31c66cd5"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:03:37 crc kubenswrapper[4971]: I1127 09:03:37.691692 4971 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/44fc7298-f2a4-4278-9b26-24ad31c66cd5-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 27 09:03:37 crc kubenswrapper[4971]: I1127 09:03:37.691690 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/44fc7298-f2a4-4278-9b26-24ad31c66cd5-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "44fc7298-f2a4-4278-9b26-24ad31c66cd5" (UID: "44fc7298-f2a4-4278-9b26-24ad31c66cd5"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:03:37 crc kubenswrapper[4971]: I1127 09:03:37.696342 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44fc7298-f2a4-4278-9b26-24ad31c66cd5-scripts" (OuterVolumeSpecName: "scripts") pod "44fc7298-f2a4-4278-9b26-24ad31c66cd5" (UID: "44fc7298-f2a4-4278-9b26-24ad31c66cd5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:03:37 crc kubenswrapper[4971]: I1127 09:03:37.702168 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44fc7298-f2a4-4278-9b26-24ad31c66cd5-kube-api-access-bwt7v" (OuterVolumeSpecName: "kube-api-access-bwt7v") pod "44fc7298-f2a4-4278-9b26-24ad31c66cd5" (UID: "44fc7298-f2a4-4278-9b26-24ad31c66cd5"). InnerVolumeSpecName "kube-api-access-bwt7v". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:03:37 crc kubenswrapper[4971]: I1127 09:03:37.726838 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44fc7298-f2a4-4278-9b26-24ad31c66cd5-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "44fc7298-f2a4-4278-9b26-24ad31c66cd5" (UID: "44fc7298-f2a4-4278-9b26-24ad31c66cd5"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:03:37 crc kubenswrapper[4971]: I1127 09:03:37.778187 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44fc7298-f2a4-4278-9b26-24ad31c66cd5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "44fc7298-f2a4-4278-9b26-24ad31c66cd5" (UID: "44fc7298-f2a4-4278-9b26-24ad31c66cd5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:03:37 crc kubenswrapper[4971]: I1127 09:03:37.793905 4971 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/44fc7298-f2a4-4278-9b26-24ad31c66cd5-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 27 09:03:37 crc kubenswrapper[4971]: I1127 09:03:37.793957 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bwt7v\" (UniqueName: \"kubernetes.io/projected/44fc7298-f2a4-4278-9b26-24ad31c66cd5-kube-api-access-bwt7v\") on node \"crc\" DevicePath \"\"" Nov 27 09:03:37 crc kubenswrapper[4971]: I1127 09:03:37.793972 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/44fc7298-f2a4-4278-9b26-24ad31c66cd5-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 09:03:37 crc kubenswrapper[4971]: I1127 09:03:37.793981 4971 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/44fc7298-f2a4-4278-9b26-24ad31c66cd5-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 27 09:03:37 crc kubenswrapper[4971]: I1127 09:03:37.793990 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44fc7298-f2a4-4278-9b26-24ad31c66cd5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:03:37 crc kubenswrapper[4971]: I1127 09:03:37.800181 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44fc7298-f2a4-4278-9b26-24ad31c66cd5-config-data" (OuterVolumeSpecName: "config-data") pod "44fc7298-f2a4-4278-9b26-24ad31c66cd5" (UID: "44fc7298-f2a4-4278-9b26-24ad31c66cd5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:03:37 crc kubenswrapper[4971]: I1127 09:03:37.896964 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44fc7298-f2a4-4278-9b26-24ad31c66cd5-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 09:03:37 crc kubenswrapper[4971]: I1127 09:03:37.956397 4971 generic.go:334] "Generic (PLEG): container finished" podID="44fc7298-f2a4-4278-9b26-24ad31c66cd5" containerID="1a70de4bb093cb5e220d019a58f00a9ba6a0edd749dfd99ef96d5eadc0651aed" exitCode=0 Nov 27 09:03:37 crc kubenswrapper[4971]: I1127 09:03:37.956436 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"44fc7298-f2a4-4278-9b26-24ad31c66cd5","Type":"ContainerDied","Data":"1a70de4bb093cb5e220d019a58f00a9ba6a0edd749dfd99ef96d5eadc0651aed"} Nov 27 09:03:37 crc kubenswrapper[4971]: I1127 09:03:37.956486 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 09:03:37 crc kubenswrapper[4971]: I1127 09:03:37.956508 4971 scope.go:117] "RemoveContainer" containerID="09e43000bb4dd7562a875107fa08a4132d19c8d68bc2e42c7c0d9001d0550af9" Nov 27 09:03:37 crc kubenswrapper[4971]: I1127 09:03:37.956495 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"44fc7298-f2a4-4278-9b26-24ad31c66cd5","Type":"ContainerDied","Data":"8890e266f84fc2578d8cbe5b1f5e81bdbbbc584456e7e36e51e2dbfcf8dbdb6e"} Nov 27 09:03:37 crc kubenswrapper[4971]: I1127 09:03:37.963723 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"2ca40967-dc15-4bb4-8f05-24a4eb302605","Type":"ContainerStarted","Data":"81e03a62dc74d57f88ff384ba453c9ccc8b1392116d98a61357b6e490c5e68ab"} Nov 27 09:03:37 crc kubenswrapper[4971]: I1127 09:03:37.963989 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"2ca40967-dc15-4bb4-8f05-24a4eb302605","Type":"ContainerStarted","Data":"bd60aaf17c0c53ea958b18bf081a8295e7defe33537cf1eecb9645aac748958c"} Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.012372 4971 scope.go:117] "RemoveContainer" containerID="ae31041b97490942c81f12b5b593965066a09bd39de719e60ba76ce5ed7d9dbd" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.051681 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-share-share1-0" podStartSLOduration=2.781842569 podStartE2EDuration="10.051660059s" podCreationTimestamp="2025-11-27 09:03:28 +0000 UTC" firstStartedPulling="2025-11-27 09:03:29.263479432 +0000 UTC m=+7847.455523350" lastFinishedPulling="2025-11-27 09:03:36.533296922 +0000 UTC m=+7854.725340840" observedRunningTime="2025-11-27 09:03:37.988757406 +0000 UTC m=+7856.180801334" watchObservedRunningTime="2025-11-27 09:03:38.051660059 +0000 UTC m=+7856.243703977" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.081745 4971 scope.go:117] "RemoveContainer" containerID="1a70de4bb093cb5e220d019a58f00a9ba6a0edd749dfd99ef96d5eadc0651aed" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.083679 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.094977 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.104708 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 27 09:03:38 crc kubenswrapper[4971]: E1127 09:03:38.105140 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44fc7298-f2a4-4278-9b26-24ad31c66cd5" containerName="ceilometer-central-agent" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.105157 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="44fc7298-f2a4-4278-9b26-24ad31c66cd5" containerName="ceilometer-central-agent" Nov 27 09:03:38 crc kubenswrapper[4971]: E1127 09:03:38.105198 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44fc7298-f2a4-4278-9b26-24ad31c66cd5" containerName="sg-core" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.105204 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="44fc7298-f2a4-4278-9b26-24ad31c66cd5" containerName="sg-core" Nov 27 09:03:38 crc kubenswrapper[4971]: E1127 09:03:38.105220 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44fc7298-f2a4-4278-9b26-24ad31c66cd5" containerName="ceilometer-notification-agent" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.105226 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="44fc7298-f2a4-4278-9b26-24ad31c66cd5" containerName="ceilometer-notification-agent" Nov 27 09:03:38 crc kubenswrapper[4971]: E1127 09:03:38.105236 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44fc7298-f2a4-4278-9b26-24ad31c66cd5" containerName="proxy-httpd" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.105241 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="44fc7298-f2a4-4278-9b26-24ad31c66cd5" containerName="proxy-httpd" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.105436 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="44fc7298-f2a4-4278-9b26-24ad31c66cd5" containerName="ceilometer-central-agent" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.105455 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="44fc7298-f2a4-4278-9b26-24ad31c66cd5" containerName="proxy-httpd" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.105472 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="44fc7298-f2a4-4278-9b26-24ad31c66cd5" containerName="sg-core" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.105485 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="44fc7298-f2a4-4278-9b26-24ad31c66cd5" containerName="ceilometer-notification-agent" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.108030 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.111488 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.111578 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.114305 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.131814 4971 scope.go:117] "RemoveContainer" containerID="5aae78464f6aa5cd9ef4630c21ac61c7cdce20b91eaa7173b2622ab2f6e96af3" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.155192 4971 scope.go:117] "RemoveContainer" containerID="09e43000bb4dd7562a875107fa08a4132d19c8d68bc2e42c7c0d9001d0550af9" Nov 27 09:03:38 crc kubenswrapper[4971]: E1127 09:03:38.155917 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"09e43000bb4dd7562a875107fa08a4132d19c8d68bc2e42c7c0d9001d0550af9\": container with ID starting with 09e43000bb4dd7562a875107fa08a4132d19c8d68bc2e42c7c0d9001d0550af9 not found: ID does not exist" containerID="09e43000bb4dd7562a875107fa08a4132d19c8d68bc2e42c7c0d9001d0550af9" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.155970 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"09e43000bb4dd7562a875107fa08a4132d19c8d68bc2e42c7c0d9001d0550af9"} err="failed to get container status \"09e43000bb4dd7562a875107fa08a4132d19c8d68bc2e42c7c0d9001d0550af9\": rpc error: code = NotFound desc = could not find container \"09e43000bb4dd7562a875107fa08a4132d19c8d68bc2e42c7c0d9001d0550af9\": container with ID starting with 09e43000bb4dd7562a875107fa08a4132d19c8d68bc2e42c7c0d9001d0550af9 not found: ID does not exist" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.156004 4971 scope.go:117] "RemoveContainer" containerID="ae31041b97490942c81f12b5b593965066a09bd39de719e60ba76ce5ed7d9dbd" Nov 27 09:03:38 crc kubenswrapper[4971]: E1127 09:03:38.156447 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae31041b97490942c81f12b5b593965066a09bd39de719e60ba76ce5ed7d9dbd\": container with ID starting with ae31041b97490942c81f12b5b593965066a09bd39de719e60ba76ce5ed7d9dbd not found: ID does not exist" containerID="ae31041b97490942c81f12b5b593965066a09bd39de719e60ba76ce5ed7d9dbd" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.156495 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae31041b97490942c81f12b5b593965066a09bd39de719e60ba76ce5ed7d9dbd"} err="failed to get container status \"ae31041b97490942c81f12b5b593965066a09bd39de719e60ba76ce5ed7d9dbd\": rpc error: code = NotFound desc = could not find container \"ae31041b97490942c81f12b5b593965066a09bd39de719e60ba76ce5ed7d9dbd\": container with ID starting with ae31041b97490942c81f12b5b593965066a09bd39de719e60ba76ce5ed7d9dbd not found: ID does not exist" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.156521 4971 scope.go:117] "RemoveContainer" containerID="1a70de4bb093cb5e220d019a58f00a9ba6a0edd749dfd99ef96d5eadc0651aed" Nov 27 09:03:38 crc kubenswrapper[4971]: E1127 09:03:38.156824 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a70de4bb093cb5e220d019a58f00a9ba6a0edd749dfd99ef96d5eadc0651aed\": container with ID starting with 1a70de4bb093cb5e220d019a58f00a9ba6a0edd749dfd99ef96d5eadc0651aed not found: ID does not exist" containerID="1a70de4bb093cb5e220d019a58f00a9ba6a0edd749dfd99ef96d5eadc0651aed" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.156854 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a70de4bb093cb5e220d019a58f00a9ba6a0edd749dfd99ef96d5eadc0651aed"} err="failed to get container status \"1a70de4bb093cb5e220d019a58f00a9ba6a0edd749dfd99ef96d5eadc0651aed\": rpc error: code = NotFound desc = could not find container \"1a70de4bb093cb5e220d019a58f00a9ba6a0edd749dfd99ef96d5eadc0651aed\": container with ID starting with 1a70de4bb093cb5e220d019a58f00a9ba6a0edd749dfd99ef96d5eadc0651aed not found: ID does not exist" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.156872 4971 scope.go:117] "RemoveContainer" containerID="5aae78464f6aa5cd9ef4630c21ac61c7cdce20b91eaa7173b2622ab2f6e96af3" Nov 27 09:03:38 crc kubenswrapper[4971]: E1127 09:03:38.157455 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5aae78464f6aa5cd9ef4630c21ac61c7cdce20b91eaa7173b2622ab2f6e96af3\": container with ID starting with 5aae78464f6aa5cd9ef4630c21ac61c7cdce20b91eaa7173b2622ab2f6e96af3 not found: ID does not exist" containerID="5aae78464f6aa5cd9ef4630c21ac61c7cdce20b91eaa7173b2622ab2f6e96af3" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.157489 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5aae78464f6aa5cd9ef4630c21ac61c7cdce20b91eaa7173b2622ab2f6e96af3"} err="failed to get container status \"5aae78464f6aa5cd9ef4630c21ac61c7cdce20b91eaa7173b2622ab2f6e96af3\": rpc error: code = NotFound desc = could not find container \"5aae78464f6aa5cd9ef4630c21ac61c7cdce20b91eaa7173b2622ab2f6e96af3\": container with ID starting with 5aae78464f6aa5cd9ef4630c21ac61c7cdce20b91eaa7173b2622ab2f6e96af3 not found: ID does not exist" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.220936 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b21ebda9-060e-457a-8631-9d78eaaa095d-log-httpd\") pod \"ceilometer-0\" (UID: \"b21ebda9-060e-457a-8631-9d78eaaa095d\") " pod="openstack/ceilometer-0" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.221033 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b21ebda9-060e-457a-8631-9d78eaaa095d-scripts\") pod \"ceilometer-0\" (UID: \"b21ebda9-060e-457a-8631-9d78eaaa095d\") " pod="openstack/ceilometer-0" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.221098 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b21ebda9-060e-457a-8631-9d78eaaa095d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b21ebda9-060e-457a-8631-9d78eaaa095d\") " pod="openstack/ceilometer-0" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.221144 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vn4lk\" (UniqueName: \"kubernetes.io/projected/b21ebda9-060e-457a-8631-9d78eaaa095d-kube-api-access-vn4lk\") pod \"ceilometer-0\" (UID: \"b21ebda9-060e-457a-8631-9d78eaaa095d\") " pod="openstack/ceilometer-0" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.221182 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b21ebda9-060e-457a-8631-9d78eaaa095d-run-httpd\") pod \"ceilometer-0\" (UID: \"b21ebda9-060e-457a-8631-9d78eaaa095d\") " pod="openstack/ceilometer-0" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.221210 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b21ebda9-060e-457a-8631-9d78eaaa095d-config-data\") pod \"ceilometer-0\" (UID: \"b21ebda9-060e-457a-8631-9d78eaaa095d\") " pod="openstack/ceilometer-0" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.221273 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b21ebda9-060e-457a-8631-9d78eaaa095d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b21ebda9-060e-457a-8631-9d78eaaa095d\") " pod="openstack/ceilometer-0" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.323088 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b21ebda9-060e-457a-8631-9d78eaaa095d-scripts\") pod \"ceilometer-0\" (UID: \"b21ebda9-060e-457a-8631-9d78eaaa095d\") " pod="openstack/ceilometer-0" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.323176 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b21ebda9-060e-457a-8631-9d78eaaa095d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b21ebda9-060e-457a-8631-9d78eaaa095d\") " pod="openstack/ceilometer-0" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.323217 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vn4lk\" (UniqueName: \"kubernetes.io/projected/b21ebda9-060e-457a-8631-9d78eaaa095d-kube-api-access-vn4lk\") pod \"ceilometer-0\" (UID: \"b21ebda9-060e-457a-8631-9d78eaaa095d\") " pod="openstack/ceilometer-0" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.323244 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b21ebda9-060e-457a-8631-9d78eaaa095d-run-httpd\") pod \"ceilometer-0\" (UID: \"b21ebda9-060e-457a-8631-9d78eaaa095d\") " pod="openstack/ceilometer-0" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.323264 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b21ebda9-060e-457a-8631-9d78eaaa095d-config-data\") pod \"ceilometer-0\" (UID: \"b21ebda9-060e-457a-8631-9d78eaaa095d\") " pod="openstack/ceilometer-0" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.323308 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b21ebda9-060e-457a-8631-9d78eaaa095d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b21ebda9-060e-457a-8631-9d78eaaa095d\") " pod="openstack/ceilometer-0" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.323367 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b21ebda9-060e-457a-8631-9d78eaaa095d-log-httpd\") pod \"ceilometer-0\" (UID: \"b21ebda9-060e-457a-8631-9d78eaaa095d\") " pod="openstack/ceilometer-0" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.323827 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b21ebda9-060e-457a-8631-9d78eaaa095d-log-httpd\") pod \"ceilometer-0\" (UID: \"b21ebda9-060e-457a-8631-9d78eaaa095d\") " pod="openstack/ceilometer-0" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.324729 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b21ebda9-060e-457a-8631-9d78eaaa095d-run-httpd\") pod \"ceilometer-0\" (UID: \"b21ebda9-060e-457a-8631-9d78eaaa095d\") " pod="openstack/ceilometer-0" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.328352 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b21ebda9-060e-457a-8631-9d78eaaa095d-scripts\") pod \"ceilometer-0\" (UID: \"b21ebda9-060e-457a-8631-9d78eaaa095d\") " pod="openstack/ceilometer-0" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.328413 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b21ebda9-060e-457a-8631-9d78eaaa095d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b21ebda9-060e-457a-8631-9d78eaaa095d\") " pod="openstack/ceilometer-0" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.328889 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b21ebda9-060e-457a-8631-9d78eaaa095d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b21ebda9-060e-457a-8631-9d78eaaa095d\") " pod="openstack/ceilometer-0" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.330890 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b21ebda9-060e-457a-8631-9d78eaaa095d-config-data\") pod \"ceilometer-0\" (UID: \"b21ebda9-060e-457a-8631-9d78eaaa095d\") " pod="openstack/ceilometer-0" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.341996 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vn4lk\" (UniqueName: \"kubernetes.io/projected/b21ebda9-060e-457a-8631-9d78eaaa095d-kube-api-access-vn4lk\") pod \"ceilometer-0\" (UID: \"b21ebda9-060e-457a-8631-9d78eaaa095d\") " pod="openstack/ceilometer-0" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.390977 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-share-share1-0" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.433278 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.445706 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6bc779bffc-k8jn2" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.515856 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-scheduler-0" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.529114 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86bf89b475-tb5r5"] Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.529479 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-86bf89b475-tb5r5" podUID="d92253a9-9aa5-4930-bc0c-de495914f7c1" containerName="dnsmasq-dns" containerID="cri-o://3a4ecb10e9f79f8f865d8cf9452ba163b667d4a242a6806e00617e57ad36756c" gracePeriod=10 Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.576186 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44fc7298-f2a4-4278-9b26-24ad31c66cd5" path="/var/lib/kubelet/pods/44fc7298-f2a4-4278-9b26-24ad31c66cd5/volumes" Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.979664 4971 generic.go:334] "Generic (PLEG): container finished" podID="d92253a9-9aa5-4930-bc0c-de495914f7c1" containerID="3a4ecb10e9f79f8f865d8cf9452ba163b667d4a242a6806e00617e57ad36756c" exitCode=0 Nov 27 09:03:38 crc kubenswrapper[4971]: I1127 09:03:38.982495 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86bf89b475-tb5r5" event={"ID":"d92253a9-9aa5-4930-bc0c-de495914f7c1","Type":"ContainerDied","Data":"3a4ecb10e9f79f8f865d8cf9452ba163b667d4a242a6806e00617e57ad36756c"} Nov 27 09:03:39 crc kubenswrapper[4971]: I1127 09:03:39.050221 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 27 09:03:39 crc kubenswrapper[4971]: I1127 09:03:39.251905 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86bf89b475-tb5r5" Nov 27 09:03:39 crc kubenswrapper[4971]: I1127 09:03:39.350225 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tfmcx\" (UniqueName: \"kubernetes.io/projected/d92253a9-9aa5-4930-bc0c-de495914f7c1-kube-api-access-tfmcx\") pod \"d92253a9-9aa5-4930-bc0c-de495914f7c1\" (UID: \"d92253a9-9aa5-4930-bc0c-de495914f7c1\") " Nov 27 09:03:39 crc kubenswrapper[4971]: I1127 09:03:39.350436 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d92253a9-9aa5-4930-bc0c-de495914f7c1-config\") pod \"d92253a9-9aa5-4930-bc0c-de495914f7c1\" (UID: \"d92253a9-9aa5-4930-bc0c-de495914f7c1\") " Nov 27 09:03:39 crc kubenswrapper[4971]: I1127 09:03:39.350490 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d92253a9-9aa5-4930-bc0c-de495914f7c1-ovsdbserver-sb\") pod \"d92253a9-9aa5-4930-bc0c-de495914f7c1\" (UID: \"d92253a9-9aa5-4930-bc0c-de495914f7c1\") " Nov 27 09:03:39 crc kubenswrapper[4971]: I1127 09:03:39.350550 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d92253a9-9aa5-4930-bc0c-de495914f7c1-dns-svc\") pod \"d92253a9-9aa5-4930-bc0c-de495914f7c1\" (UID: \"d92253a9-9aa5-4930-bc0c-de495914f7c1\") " Nov 27 09:03:39 crc kubenswrapper[4971]: I1127 09:03:39.350635 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d92253a9-9aa5-4930-bc0c-de495914f7c1-ovsdbserver-nb\") pod \"d92253a9-9aa5-4930-bc0c-de495914f7c1\" (UID: \"d92253a9-9aa5-4930-bc0c-de495914f7c1\") " Nov 27 09:03:39 crc kubenswrapper[4971]: I1127 09:03:39.356415 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d92253a9-9aa5-4930-bc0c-de495914f7c1-kube-api-access-tfmcx" (OuterVolumeSpecName: "kube-api-access-tfmcx") pod "d92253a9-9aa5-4930-bc0c-de495914f7c1" (UID: "d92253a9-9aa5-4930-bc0c-de495914f7c1"). InnerVolumeSpecName "kube-api-access-tfmcx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:03:39 crc kubenswrapper[4971]: I1127 09:03:39.425327 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d92253a9-9aa5-4930-bc0c-de495914f7c1-config" (OuterVolumeSpecName: "config") pod "d92253a9-9aa5-4930-bc0c-de495914f7c1" (UID: "d92253a9-9aa5-4930-bc0c-de495914f7c1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:03:39 crc kubenswrapper[4971]: I1127 09:03:39.429139 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d92253a9-9aa5-4930-bc0c-de495914f7c1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d92253a9-9aa5-4930-bc0c-de495914f7c1" (UID: "d92253a9-9aa5-4930-bc0c-de495914f7c1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:03:39 crc kubenswrapper[4971]: I1127 09:03:39.441783 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d92253a9-9aa5-4930-bc0c-de495914f7c1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d92253a9-9aa5-4930-bc0c-de495914f7c1" (UID: "d92253a9-9aa5-4930-bc0c-de495914f7c1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:03:39 crc kubenswrapper[4971]: I1127 09:03:39.453920 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d92253a9-9aa5-4930-bc0c-de495914f7c1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 27 09:03:39 crc kubenswrapper[4971]: I1127 09:03:39.453948 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tfmcx\" (UniqueName: \"kubernetes.io/projected/d92253a9-9aa5-4930-bc0c-de495914f7c1-kube-api-access-tfmcx\") on node \"crc\" DevicePath \"\"" Nov 27 09:03:39 crc kubenswrapper[4971]: I1127 09:03:39.453959 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d92253a9-9aa5-4930-bc0c-de495914f7c1-config\") on node \"crc\" DevicePath \"\"" Nov 27 09:03:39 crc kubenswrapper[4971]: I1127 09:03:39.453970 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d92253a9-9aa5-4930-bc0c-de495914f7c1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 27 09:03:39 crc kubenswrapper[4971]: I1127 09:03:39.459284 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d92253a9-9aa5-4930-bc0c-de495914f7c1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d92253a9-9aa5-4930-bc0c-de495914f7c1" (UID: "d92253a9-9aa5-4930-bc0c-de495914f7c1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:03:39 crc kubenswrapper[4971]: I1127 09:03:39.557259 4971 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d92253a9-9aa5-4930-bc0c-de495914f7c1-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 27 09:03:39 crc kubenswrapper[4971]: I1127 09:03:39.991917 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86bf89b475-tb5r5" event={"ID":"d92253a9-9aa5-4930-bc0c-de495914f7c1","Type":"ContainerDied","Data":"226012ccd9c70d044476255ffa0423c2f6b369db4415753b5a302b8cf0d1d041"} Nov 27 09:03:39 crc kubenswrapper[4971]: I1127 09:03:39.991994 4971 scope.go:117] "RemoveContainer" containerID="3a4ecb10e9f79f8f865d8cf9452ba163b667d4a242a6806e00617e57ad36756c" Nov 27 09:03:39 crc kubenswrapper[4971]: I1127 09:03:39.992132 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86bf89b475-tb5r5" Nov 27 09:03:39 crc kubenswrapper[4971]: I1127 09:03:39.995194 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b21ebda9-060e-457a-8631-9d78eaaa095d","Type":"ContainerStarted","Data":"780268b6eacd8703da7d1c72e8a0f4c8f58a4bdfbfff049bc901eafc147a522b"} Nov 27 09:03:39 crc kubenswrapper[4971]: I1127 09:03:39.995266 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b21ebda9-060e-457a-8631-9d78eaaa095d","Type":"ContainerStarted","Data":"c2e6187f6c5c5e5b7d8d40610db18392f8a0f260aa624bd201cb9914c4dc98bd"} Nov 27 09:03:39 crc kubenswrapper[4971]: I1127 09:03:39.995287 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b21ebda9-060e-457a-8631-9d78eaaa095d","Type":"ContainerStarted","Data":"16702bf565d15cc2daecd8f54a85f1dbe9577d6e7d5046eb2efe1de675f268a5"} Nov 27 09:03:40 crc kubenswrapper[4971]: I1127 09:03:40.019558 4971 scope.go:117] "RemoveContainer" containerID="640e546478bd7c57526f61c01b070a9fa4ac8fcaf26972eedce4831a59593436" Nov 27 09:03:40 crc kubenswrapper[4971]: I1127 09:03:40.064285 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86bf89b475-tb5r5"] Nov 27 09:03:40 crc kubenswrapper[4971]: I1127 09:03:40.079143 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86bf89b475-tb5r5"] Nov 27 09:03:40 crc kubenswrapper[4971]: I1127 09:03:40.564594 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d92253a9-9aa5-4930-bc0c-de495914f7c1" path="/var/lib/kubelet/pods/d92253a9-9aa5-4930-bc0c-de495914f7c1/volumes" Nov 27 09:03:41 crc kubenswrapper[4971]: I1127 09:03:41.008291 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b21ebda9-060e-457a-8631-9d78eaaa095d","Type":"ContainerStarted","Data":"9a765314e66db974a8c70793836e72a5b9f22cffbb9abc39151d6d61ce8d0a04"} Nov 27 09:03:42 crc kubenswrapper[4971]: I1127 09:03:42.290877 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 27 09:03:43 crc kubenswrapper[4971]: I1127 09:03:43.059913 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b21ebda9-060e-457a-8631-9d78eaaa095d","Type":"ContainerStarted","Data":"01857d5dfa7c21cc34f52e159e47feb99ea5dba55f58bbda6306b459d7aba188"} Nov 27 09:03:43 crc kubenswrapper[4971]: I1127 09:03:43.060192 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b21ebda9-060e-457a-8631-9d78eaaa095d" containerName="sg-core" containerID="cri-o://9a765314e66db974a8c70793836e72a5b9f22cffbb9abc39151d6d61ce8d0a04" gracePeriod=30 Nov 27 09:03:43 crc kubenswrapper[4971]: I1127 09:03:43.060309 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b21ebda9-060e-457a-8631-9d78eaaa095d" containerName="proxy-httpd" containerID="cri-o://01857d5dfa7c21cc34f52e159e47feb99ea5dba55f58bbda6306b459d7aba188" gracePeriod=30 Nov 27 09:03:43 crc kubenswrapper[4971]: I1127 09:03:43.060168 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b21ebda9-060e-457a-8631-9d78eaaa095d" containerName="ceilometer-central-agent" containerID="cri-o://c2e6187f6c5c5e5b7d8d40610db18392f8a0f260aa624bd201cb9914c4dc98bd" gracePeriod=30 Nov 27 09:03:43 crc kubenswrapper[4971]: I1127 09:03:43.060438 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b21ebda9-060e-457a-8631-9d78eaaa095d" containerName="ceilometer-notification-agent" containerID="cri-o://780268b6eacd8703da7d1c72e8a0f4c8f58a4bdfbfff049bc901eafc147a522b" gracePeriod=30 Nov 27 09:03:43 crc kubenswrapper[4971]: I1127 09:03:43.060483 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 27 09:03:43 crc kubenswrapper[4971]: I1127 09:03:43.096009 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.923507668 podStartE2EDuration="5.095983264s" podCreationTimestamp="2025-11-27 09:03:38 +0000 UTC" firstStartedPulling="2025-11-27 09:03:39.061864312 +0000 UTC m=+7857.253908230" lastFinishedPulling="2025-11-27 09:03:42.234339908 +0000 UTC m=+7860.426383826" observedRunningTime="2025-11-27 09:03:43.0843466 +0000 UTC m=+7861.276390528" watchObservedRunningTime="2025-11-27 09:03:43.095983264 +0000 UTC m=+7861.288027182" Nov 27 09:03:44 crc kubenswrapper[4971]: I1127 09:03:44.076364 4971 generic.go:334] "Generic (PLEG): container finished" podID="b21ebda9-060e-457a-8631-9d78eaaa095d" containerID="01857d5dfa7c21cc34f52e159e47feb99ea5dba55f58bbda6306b459d7aba188" exitCode=0 Nov 27 09:03:44 crc kubenswrapper[4971]: I1127 09:03:44.077019 4971 generic.go:334] "Generic (PLEG): container finished" podID="b21ebda9-060e-457a-8631-9d78eaaa095d" containerID="9a765314e66db974a8c70793836e72a5b9f22cffbb9abc39151d6d61ce8d0a04" exitCode=2 Nov 27 09:03:44 crc kubenswrapper[4971]: I1127 09:03:44.077039 4971 generic.go:334] "Generic (PLEG): container finished" podID="b21ebda9-060e-457a-8631-9d78eaaa095d" containerID="780268b6eacd8703da7d1c72e8a0f4c8f58a4bdfbfff049bc901eafc147a522b" exitCode=0 Nov 27 09:03:44 crc kubenswrapper[4971]: I1127 09:03:44.077073 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b21ebda9-060e-457a-8631-9d78eaaa095d","Type":"ContainerDied","Data":"01857d5dfa7c21cc34f52e159e47feb99ea5dba55f58bbda6306b459d7aba188"} Nov 27 09:03:44 crc kubenswrapper[4971]: I1127 09:03:44.077126 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b21ebda9-060e-457a-8631-9d78eaaa095d","Type":"ContainerDied","Data":"9a765314e66db974a8c70793836e72a5b9f22cffbb9abc39151d6d61ce8d0a04"} Nov 27 09:03:44 crc kubenswrapper[4971]: I1127 09:03:44.077144 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b21ebda9-060e-457a-8631-9d78eaaa095d","Type":"ContainerDied","Data":"780268b6eacd8703da7d1c72e8a0f4c8f58a4bdfbfff049bc901eafc147a522b"} Nov 27 09:03:45 crc kubenswrapper[4971]: I1127 09:03:45.102206 4971 generic.go:334] "Generic (PLEG): container finished" podID="b21ebda9-060e-457a-8631-9d78eaaa095d" containerID="c2e6187f6c5c5e5b7d8d40610db18392f8a0f260aa624bd201cb9914c4dc98bd" exitCode=0 Nov 27 09:03:45 crc kubenswrapper[4971]: I1127 09:03:45.102621 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b21ebda9-060e-457a-8631-9d78eaaa095d","Type":"ContainerDied","Data":"c2e6187f6c5c5e5b7d8d40610db18392f8a0f260aa624bd201cb9914c4dc98bd"} Nov 27 09:03:45 crc kubenswrapper[4971]: I1127 09:03:45.328059 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 09:03:45 crc kubenswrapper[4971]: I1127 09:03:45.399910 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b21ebda9-060e-457a-8631-9d78eaaa095d-combined-ca-bundle\") pod \"b21ebda9-060e-457a-8631-9d78eaaa095d\" (UID: \"b21ebda9-060e-457a-8631-9d78eaaa095d\") " Nov 27 09:03:45 crc kubenswrapper[4971]: I1127 09:03:45.400118 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b21ebda9-060e-457a-8631-9d78eaaa095d-run-httpd\") pod \"b21ebda9-060e-457a-8631-9d78eaaa095d\" (UID: \"b21ebda9-060e-457a-8631-9d78eaaa095d\") " Nov 27 09:03:45 crc kubenswrapper[4971]: I1127 09:03:45.400183 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b21ebda9-060e-457a-8631-9d78eaaa095d-scripts\") pod \"b21ebda9-060e-457a-8631-9d78eaaa095d\" (UID: \"b21ebda9-060e-457a-8631-9d78eaaa095d\") " Nov 27 09:03:45 crc kubenswrapper[4971]: I1127 09:03:45.400208 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b21ebda9-060e-457a-8631-9d78eaaa095d-log-httpd\") pod \"b21ebda9-060e-457a-8631-9d78eaaa095d\" (UID: \"b21ebda9-060e-457a-8631-9d78eaaa095d\") " Nov 27 09:03:45 crc kubenswrapper[4971]: I1127 09:03:45.400401 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vn4lk\" (UniqueName: \"kubernetes.io/projected/b21ebda9-060e-457a-8631-9d78eaaa095d-kube-api-access-vn4lk\") pod \"b21ebda9-060e-457a-8631-9d78eaaa095d\" (UID: \"b21ebda9-060e-457a-8631-9d78eaaa095d\") " Nov 27 09:03:45 crc kubenswrapper[4971]: I1127 09:03:45.400435 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b21ebda9-060e-457a-8631-9d78eaaa095d-config-data\") pod \"b21ebda9-060e-457a-8631-9d78eaaa095d\" (UID: \"b21ebda9-060e-457a-8631-9d78eaaa095d\") " Nov 27 09:03:45 crc kubenswrapper[4971]: I1127 09:03:45.400466 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b21ebda9-060e-457a-8631-9d78eaaa095d-sg-core-conf-yaml\") pod \"b21ebda9-060e-457a-8631-9d78eaaa095d\" (UID: \"b21ebda9-060e-457a-8631-9d78eaaa095d\") " Nov 27 09:03:45 crc kubenswrapper[4971]: I1127 09:03:45.401481 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b21ebda9-060e-457a-8631-9d78eaaa095d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "b21ebda9-060e-457a-8631-9d78eaaa095d" (UID: "b21ebda9-060e-457a-8631-9d78eaaa095d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:03:45 crc kubenswrapper[4971]: I1127 09:03:45.401859 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b21ebda9-060e-457a-8631-9d78eaaa095d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "b21ebda9-060e-457a-8631-9d78eaaa095d" (UID: "b21ebda9-060e-457a-8631-9d78eaaa095d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:03:45 crc kubenswrapper[4971]: I1127 09:03:45.406975 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b21ebda9-060e-457a-8631-9d78eaaa095d-scripts" (OuterVolumeSpecName: "scripts") pod "b21ebda9-060e-457a-8631-9d78eaaa095d" (UID: "b21ebda9-060e-457a-8631-9d78eaaa095d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:03:45 crc kubenswrapper[4971]: I1127 09:03:45.408214 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b21ebda9-060e-457a-8631-9d78eaaa095d-kube-api-access-vn4lk" (OuterVolumeSpecName: "kube-api-access-vn4lk") pod "b21ebda9-060e-457a-8631-9d78eaaa095d" (UID: "b21ebda9-060e-457a-8631-9d78eaaa095d"). InnerVolumeSpecName "kube-api-access-vn4lk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:03:45 crc kubenswrapper[4971]: I1127 09:03:45.433581 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b21ebda9-060e-457a-8631-9d78eaaa095d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "b21ebda9-060e-457a-8631-9d78eaaa095d" (UID: "b21ebda9-060e-457a-8631-9d78eaaa095d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:03:45 crc kubenswrapper[4971]: I1127 09:03:45.495007 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b21ebda9-060e-457a-8631-9d78eaaa095d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b21ebda9-060e-457a-8631-9d78eaaa095d" (UID: "b21ebda9-060e-457a-8631-9d78eaaa095d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:03:45 crc kubenswrapper[4971]: I1127 09:03:45.504076 4971 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b21ebda9-060e-457a-8631-9d78eaaa095d-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 27 09:03:45 crc kubenswrapper[4971]: I1127 09:03:45.504125 4971 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b21ebda9-060e-457a-8631-9d78eaaa095d-scripts\") on node \"crc\" DevicePath \"\"" Nov 27 09:03:45 crc kubenswrapper[4971]: I1127 09:03:45.504138 4971 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b21ebda9-060e-457a-8631-9d78eaaa095d-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 27 09:03:45 crc kubenswrapper[4971]: I1127 09:03:45.504151 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vn4lk\" (UniqueName: \"kubernetes.io/projected/b21ebda9-060e-457a-8631-9d78eaaa095d-kube-api-access-vn4lk\") on node \"crc\" DevicePath \"\"" Nov 27 09:03:45 crc kubenswrapper[4971]: I1127 09:03:45.504166 4971 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b21ebda9-060e-457a-8631-9d78eaaa095d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 27 09:03:45 crc kubenswrapper[4971]: I1127 09:03:45.504178 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b21ebda9-060e-457a-8631-9d78eaaa095d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:03:45 crc kubenswrapper[4971]: I1127 09:03:45.506965 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b21ebda9-060e-457a-8631-9d78eaaa095d-config-data" (OuterVolumeSpecName: "config-data") pod "b21ebda9-060e-457a-8631-9d78eaaa095d" (UID: "b21ebda9-060e-457a-8631-9d78eaaa095d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:03:45 crc kubenswrapper[4971]: I1127 09:03:45.607005 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b21ebda9-060e-457a-8631-9d78eaaa095d-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.121027 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b21ebda9-060e-457a-8631-9d78eaaa095d","Type":"ContainerDied","Data":"16702bf565d15cc2daecd8f54a85f1dbe9577d6e7d5046eb2efe1de675f268a5"} Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.121679 4971 scope.go:117] "RemoveContainer" containerID="01857d5dfa7c21cc34f52e159e47feb99ea5dba55f58bbda6306b459d7aba188" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.121330 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.159441 4971 scope.go:117] "RemoveContainer" containerID="9a765314e66db974a8c70793836e72a5b9f22cffbb9abc39151d6d61ce8d0a04" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.167542 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.182672 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.193380 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 27 09:03:46 crc kubenswrapper[4971]: E1127 09:03:46.193863 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d92253a9-9aa5-4930-bc0c-de495914f7c1" containerName="init" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.193884 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="d92253a9-9aa5-4930-bc0c-de495914f7c1" containerName="init" Nov 27 09:03:46 crc kubenswrapper[4971]: E1127 09:03:46.193915 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b21ebda9-060e-457a-8631-9d78eaaa095d" containerName="ceilometer-notification-agent" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.193926 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b21ebda9-060e-457a-8631-9d78eaaa095d" containerName="ceilometer-notification-agent" Nov 27 09:03:46 crc kubenswrapper[4971]: E1127 09:03:46.193948 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b21ebda9-060e-457a-8631-9d78eaaa095d" containerName="ceilometer-central-agent" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.193956 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b21ebda9-060e-457a-8631-9d78eaaa095d" containerName="ceilometer-central-agent" Nov 27 09:03:46 crc kubenswrapper[4971]: E1127 09:03:46.193971 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b21ebda9-060e-457a-8631-9d78eaaa095d" containerName="sg-core" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.193976 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b21ebda9-060e-457a-8631-9d78eaaa095d" containerName="sg-core" Nov 27 09:03:46 crc kubenswrapper[4971]: E1127 09:03:46.193997 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b21ebda9-060e-457a-8631-9d78eaaa095d" containerName="proxy-httpd" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.194004 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b21ebda9-060e-457a-8631-9d78eaaa095d" containerName="proxy-httpd" Nov 27 09:03:46 crc kubenswrapper[4971]: E1127 09:03:46.194013 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d92253a9-9aa5-4930-bc0c-de495914f7c1" containerName="dnsmasq-dns" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.194019 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="d92253a9-9aa5-4930-bc0c-de495914f7c1" containerName="dnsmasq-dns" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.194630 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="b21ebda9-060e-457a-8631-9d78eaaa095d" containerName="proxy-httpd" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.194652 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="b21ebda9-060e-457a-8631-9d78eaaa095d" containerName="ceilometer-central-agent" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.194668 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="d92253a9-9aa5-4930-bc0c-de495914f7c1" containerName="dnsmasq-dns" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.194679 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="b21ebda9-060e-457a-8631-9d78eaaa095d" containerName="sg-core" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.194695 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="b21ebda9-060e-457a-8631-9d78eaaa095d" containerName="ceilometer-notification-agent" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.196897 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.200399 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.201013 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.209978 4971 scope.go:117] "RemoveContainer" containerID="780268b6eacd8703da7d1c72e8a0f4c8f58a4bdfbfff049bc901eafc147a522b" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.217269 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.290853 4971 scope.go:117] "RemoveContainer" containerID="c2e6187f6c5c5e5b7d8d40610db18392f8a0f260aa624bd201cb9914c4dc98bd" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.323519 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84\") " pod="openstack/ceilometer-0" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.323651 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84\") " pod="openstack/ceilometer-0" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.323678 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8tzzz\" (UniqueName: \"kubernetes.io/projected/a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84-kube-api-access-8tzzz\") pod \"ceilometer-0\" (UID: \"a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84\") " pod="openstack/ceilometer-0" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.323784 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84-log-httpd\") pod \"ceilometer-0\" (UID: \"a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84\") " pod="openstack/ceilometer-0" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.323851 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84-config-data\") pod \"ceilometer-0\" (UID: \"a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84\") " pod="openstack/ceilometer-0" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.323896 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84-scripts\") pod \"ceilometer-0\" (UID: \"a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84\") " pod="openstack/ceilometer-0" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.323924 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84-run-httpd\") pod \"ceilometer-0\" (UID: \"a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84\") " pod="openstack/ceilometer-0" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.425650 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84\") " pod="openstack/ceilometer-0" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.425732 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84\") " pod="openstack/ceilometer-0" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.425753 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8tzzz\" (UniqueName: \"kubernetes.io/projected/a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84-kube-api-access-8tzzz\") pod \"ceilometer-0\" (UID: \"a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84\") " pod="openstack/ceilometer-0" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.425830 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84-log-httpd\") pod \"ceilometer-0\" (UID: \"a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84\") " pod="openstack/ceilometer-0" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.425870 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84-config-data\") pod \"ceilometer-0\" (UID: \"a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84\") " pod="openstack/ceilometer-0" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.425888 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84-scripts\") pod \"ceilometer-0\" (UID: \"a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84\") " pod="openstack/ceilometer-0" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.425910 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84-run-httpd\") pod \"ceilometer-0\" (UID: \"a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84\") " pod="openstack/ceilometer-0" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.426650 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84-log-httpd\") pod \"ceilometer-0\" (UID: \"a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84\") " pod="openstack/ceilometer-0" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.426901 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84-run-httpd\") pod \"ceilometer-0\" (UID: \"a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84\") " pod="openstack/ceilometer-0" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.431086 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84-config-data\") pod \"ceilometer-0\" (UID: \"a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84\") " pod="openstack/ceilometer-0" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.432147 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84-scripts\") pod \"ceilometer-0\" (UID: \"a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84\") " pod="openstack/ceilometer-0" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.442923 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84\") " pod="openstack/ceilometer-0" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.445660 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8tzzz\" (UniqueName: \"kubernetes.io/projected/a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84-kube-api-access-8tzzz\") pod \"ceilometer-0\" (UID: \"a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84\") " pod="openstack/ceilometer-0" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.460713 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84\") " pod="openstack/ceilometer-0" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.565139 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b21ebda9-060e-457a-8631-9d78eaaa095d" path="/var/lib/kubelet/pods/b21ebda9-060e-457a-8631-9d78eaaa095d/volumes" Nov 27 09:03:46 crc kubenswrapper[4971]: I1127 09:03:46.565306 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 27 09:03:47 crc kubenswrapper[4971]: I1127 09:03:47.052905 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 27 09:03:47 crc kubenswrapper[4971]: W1127 09:03:47.057657 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda7c50396_fa1e_4ec4_8835_6fd6e1bc9b84.slice/crio-a77a8a96882a03dfb76fc672e99ecbf767bffd333216774d5ce525acec9df312 WatchSource:0}: Error finding container a77a8a96882a03dfb76fc672e99ecbf767bffd333216774d5ce525acec9df312: Status 404 returned error can't find the container with id a77a8a96882a03dfb76fc672e99ecbf767bffd333216774d5ce525acec9df312 Nov 27 09:03:47 crc kubenswrapper[4971]: I1127 09:03:47.134167 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84","Type":"ContainerStarted","Data":"a77a8a96882a03dfb76fc672e99ecbf767bffd333216774d5ce525acec9df312"} Nov 27 09:03:47 crc kubenswrapper[4971]: I1127 09:03:47.550458 4971 scope.go:117] "RemoveContainer" containerID="994a193d8aefca1605e71d1a50a8591da0b0ae972faf08604e8f68555feb1168" Nov 27 09:03:47 crc kubenswrapper[4971]: E1127 09:03:47.551000 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:03:48 crc kubenswrapper[4971]: I1127 09:03:48.149743 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84","Type":"ContainerStarted","Data":"b1434ecc5caabe3ea86b2b17267287af00ba6ebfa4dfa74126c3f0c2b125868c"} Nov 27 09:03:48 crc kubenswrapper[4971]: I1127 09:03:48.150291 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84","Type":"ContainerStarted","Data":"7bc9cf39af4374d550e0eecdb012205f86e8ddbe2fb23797b575c44465f542ed"} Nov 27 09:03:49 crc kubenswrapper[4971]: I1127 09:03:49.162070 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84","Type":"ContainerStarted","Data":"6dee745319dbfb4a63fdeff751a0d6c2753e6b257403f7fe1596bf83315eb9bc"} Nov 27 09:03:50 crc kubenswrapper[4971]: I1127 09:03:50.066738 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-share-share1-0" Nov 27 09:03:50 crc kubenswrapper[4971]: I1127 09:03:50.177360 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84","Type":"ContainerStarted","Data":"03b09e7251932787381831619fdecbe5926130e34d2ed759e757933b58e4a54e"} Nov 27 09:03:50 crc kubenswrapper[4971]: I1127 09:03:50.178718 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 27 09:03:50 crc kubenswrapper[4971]: I1127 09:03:50.218739 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.5530926489999999 podStartE2EDuration="4.218718148s" podCreationTimestamp="2025-11-27 09:03:46 +0000 UTC" firstStartedPulling="2025-11-27 09:03:47.061476159 +0000 UTC m=+7865.253520077" lastFinishedPulling="2025-11-27 09:03:49.727101658 +0000 UTC m=+7867.919145576" observedRunningTime="2025-11-27 09:03:50.203897743 +0000 UTC m=+7868.395941671" watchObservedRunningTime="2025-11-27 09:03:50.218718148 +0000 UTC m=+7868.410762066" Nov 27 09:03:50 crc kubenswrapper[4971]: I1127 09:03:50.350144 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-scheduler-0" Nov 27 09:03:50 crc kubenswrapper[4971]: I1127 09:03:50.437404 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/manila-api-0" Nov 27 09:03:55 crc kubenswrapper[4971]: I1127 09:03:55.871193 4971 scope.go:117] "RemoveContainer" containerID="e75750b3e68dab7e4b1472f97a570c72b6a25b4a1fb0daae229530905fbed035" Nov 27 09:03:55 crc kubenswrapper[4971]: I1127 09:03:55.901798 4971 scope.go:117] "RemoveContainer" containerID="8c17a9b039b351f6c479fd011283c99457ea2826ecad03822e7480c24a053563" Nov 27 09:03:55 crc kubenswrapper[4971]: I1127 09:03:55.975306 4971 scope.go:117] "RemoveContainer" containerID="35620458da3e080f33f43cc93fb770b2cc1cd6250a32765da4d35255073653d4" Nov 27 09:04:01 crc kubenswrapper[4971]: I1127 09:04:01.551260 4971 scope.go:117] "RemoveContainer" containerID="994a193d8aefca1605e71d1a50a8591da0b0ae972faf08604e8f68555feb1168" Nov 27 09:04:01 crc kubenswrapper[4971]: E1127 09:04:01.552727 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:04:16 crc kubenswrapper[4971]: I1127 09:04:16.550849 4971 scope.go:117] "RemoveContainer" containerID="994a193d8aefca1605e71d1a50a8591da0b0ae972faf08604e8f68555feb1168" Nov 27 09:04:16 crc kubenswrapper[4971]: E1127 09:04:16.552165 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:04:16 crc kubenswrapper[4971]: I1127 09:04:16.577576 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Nov 27 09:04:24 crc kubenswrapper[4971]: I1127 09:04:24.056740 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-r4rk7"] Nov 27 09:04:24 crc kubenswrapper[4971]: I1127 09:04:24.074341 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-66dtj"] Nov 27 09:04:24 crc kubenswrapper[4971]: I1127 09:04:24.086928 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-efd3-account-create-update-v8hnf"] Nov 27 09:04:24 crc kubenswrapper[4971]: I1127 09:04:24.098968 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-15ab-account-create-update-d2vpj"] Nov 27 09:04:24 crc kubenswrapper[4971]: I1127 09:04:24.109870 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-r4rk7"] Nov 27 09:04:24 crc kubenswrapper[4971]: I1127 09:04:24.125604 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-efd3-account-create-update-v8hnf"] Nov 27 09:04:24 crc kubenswrapper[4971]: I1127 09:04:24.138262 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-66dtj"] Nov 27 09:04:24 crc kubenswrapper[4971]: I1127 09:04:24.148827 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-15ab-account-create-update-d2vpj"] Nov 27 09:04:24 crc kubenswrapper[4971]: I1127 09:04:24.565690 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27d1bc48-7132-44a9-aa8b-cd0e45e6d106" path="/var/lib/kubelet/pods/27d1bc48-7132-44a9-aa8b-cd0e45e6d106/volumes" Nov 27 09:04:24 crc kubenswrapper[4971]: I1127 09:04:24.566575 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f97874d-2c1b-44b0-b1f3-e5a337a4b209" path="/var/lib/kubelet/pods/6f97874d-2c1b-44b0-b1f3-e5a337a4b209/volumes" Nov 27 09:04:24 crc kubenswrapper[4971]: I1127 09:04:24.567381 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85bff7b4-875a-4b89-b961-06b4177b91cf" path="/var/lib/kubelet/pods/85bff7b4-875a-4b89-b961-06b4177b91cf/volumes" Nov 27 09:04:24 crc kubenswrapper[4971]: I1127 09:04:24.568016 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96677690-6dcc-4fe6-a2ec-418a02ca414b" path="/var/lib/kubelet/pods/96677690-6dcc-4fe6-a2ec-418a02ca414b/volumes" Nov 27 09:04:25 crc kubenswrapper[4971]: I1127 09:04:25.037155 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-bh6jw"] Nov 27 09:04:25 crc kubenswrapper[4971]: I1127 09:04:25.051417 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-bh6jw"] Nov 27 09:04:25 crc kubenswrapper[4971]: I1127 09:04:25.068654 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-4814-account-create-update-5lnrc"] Nov 27 09:04:25 crc kubenswrapper[4971]: I1127 09:04:25.079266 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-4814-account-create-update-5lnrc"] Nov 27 09:04:26 crc kubenswrapper[4971]: I1127 09:04:26.563519 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25227c00-9f73-4bfa-9cfe-90bc9aa2c0b2" path="/var/lib/kubelet/pods/25227c00-9f73-4bfa-9cfe-90bc9aa2c0b2/volumes" Nov 27 09:04:26 crc kubenswrapper[4971]: I1127 09:04:26.564656 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="82e0248c-1ccd-489a-8eb8-253782084ef2" path="/var/lib/kubelet/pods/82e0248c-1ccd-489a-8eb8-253782084ef2/volumes" Nov 27 09:04:28 crc kubenswrapper[4971]: I1127 09:04:28.553794 4971 scope.go:117] "RemoveContainer" containerID="994a193d8aefca1605e71d1a50a8591da0b0ae972faf08604e8f68555feb1168" Nov 27 09:04:28 crc kubenswrapper[4971]: E1127 09:04:28.554848 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.360065 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7c79ddd697-nn45w"] Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.366071 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c79ddd697-nn45w" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.376096 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.384337 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7c79ddd697-nn45w"] Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.457984 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bcc68fe9-aa80-4fe9-8ce9-85904b760b2e-config\") pod \"dnsmasq-dns-7c79ddd697-nn45w\" (UID: \"bcc68fe9-aa80-4fe9-8ce9-85904b760b2e\") " pod="openstack/dnsmasq-dns-7c79ddd697-nn45w" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.458251 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqqw5\" (UniqueName: \"kubernetes.io/projected/bcc68fe9-aa80-4fe9-8ce9-85904b760b2e-kube-api-access-fqqw5\") pod \"dnsmasq-dns-7c79ddd697-nn45w\" (UID: \"bcc68fe9-aa80-4fe9-8ce9-85904b760b2e\") " pod="openstack/dnsmasq-dns-7c79ddd697-nn45w" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.458290 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/bcc68fe9-aa80-4fe9-8ce9-85904b760b2e-openstack-cell1\") pod \"dnsmasq-dns-7c79ddd697-nn45w\" (UID: \"bcc68fe9-aa80-4fe9-8ce9-85904b760b2e\") " pod="openstack/dnsmasq-dns-7c79ddd697-nn45w" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.458329 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bcc68fe9-aa80-4fe9-8ce9-85904b760b2e-dns-svc\") pod \"dnsmasq-dns-7c79ddd697-nn45w\" (UID: \"bcc68fe9-aa80-4fe9-8ce9-85904b760b2e\") " pod="openstack/dnsmasq-dns-7c79ddd697-nn45w" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.458360 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bcc68fe9-aa80-4fe9-8ce9-85904b760b2e-ovsdbserver-nb\") pod \"dnsmasq-dns-7c79ddd697-nn45w\" (UID: \"bcc68fe9-aa80-4fe9-8ce9-85904b760b2e\") " pod="openstack/dnsmasq-dns-7c79ddd697-nn45w" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.458473 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bcc68fe9-aa80-4fe9-8ce9-85904b760b2e-ovsdbserver-sb\") pod \"dnsmasq-dns-7c79ddd697-nn45w\" (UID: \"bcc68fe9-aa80-4fe9-8ce9-85904b760b2e\") " pod="openstack/dnsmasq-dns-7c79ddd697-nn45w" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.487571 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c79ddd697-nn45w"] Nov 27 09:04:35 crc kubenswrapper[4971]: E1127 09:04:35.488986 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config dns-svc kube-api-access-fqqw5 openstack-cell1 ovsdbserver-nb ovsdbserver-sb], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-7c79ddd697-nn45w" podUID="bcc68fe9-aa80-4fe9-8ce9-85904b760b2e" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.524837 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-95cd49b99-n79qb"] Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.526799 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95cd49b99-n79qb" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.528823 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-networker" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.561554 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqqw5\" (UniqueName: \"kubernetes.io/projected/bcc68fe9-aa80-4fe9-8ce9-85904b760b2e-kube-api-access-fqqw5\") pod \"dnsmasq-dns-7c79ddd697-nn45w\" (UID: \"bcc68fe9-aa80-4fe9-8ce9-85904b760b2e\") " pod="openstack/dnsmasq-dns-7c79ddd697-nn45w" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.561612 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/bcc68fe9-aa80-4fe9-8ce9-85904b760b2e-openstack-cell1\") pod \"dnsmasq-dns-7c79ddd697-nn45w\" (UID: \"bcc68fe9-aa80-4fe9-8ce9-85904b760b2e\") " pod="openstack/dnsmasq-dns-7c79ddd697-nn45w" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.561643 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bcc68fe9-aa80-4fe9-8ce9-85904b760b2e-dns-svc\") pod \"dnsmasq-dns-7c79ddd697-nn45w\" (UID: \"bcc68fe9-aa80-4fe9-8ce9-85904b760b2e\") " pod="openstack/dnsmasq-dns-7c79ddd697-nn45w" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.561668 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bcc68fe9-aa80-4fe9-8ce9-85904b760b2e-ovsdbserver-nb\") pod \"dnsmasq-dns-7c79ddd697-nn45w\" (UID: \"bcc68fe9-aa80-4fe9-8ce9-85904b760b2e\") " pod="openstack/dnsmasq-dns-7c79ddd697-nn45w" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.561725 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bcc68fe9-aa80-4fe9-8ce9-85904b760b2e-ovsdbserver-sb\") pod \"dnsmasq-dns-7c79ddd697-nn45w\" (UID: \"bcc68fe9-aa80-4fe9-8ce9-85904b760b2e\") " pod="openstack/dnsmasq-dns-7c79ddd697-nn45w" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.561755 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bcc68fe9-aa80-4fe9-8ce9-85904b760b2e-config\") pod \"dnsmasq-dns-7c79ddd697-nn45w\" (UID: \"bcc68fe9-aa80-4fe9-8ce9-85904b760b2e\") " pod="openstack/dnsmasq-dns-7c79ddd697-nn45w" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.562889 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bcc68fe9-aa80-4fe9-8ce9-85904b760b2e-config\") pod \"dnsmasq-dns-7c79ddd697-nn45w\" (UID: \"bcc68fe9-aa80-4fe9-8ce9-85904b760b2e\") " pod="openstack/dnsmasq-dns-7c79ddd697-nn45w" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.563283 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bcc68fe9-aa80-4fe9-8ce9-85904b760b2e-dns-svc\") pod \"dnsmasq-dns-7c79ddd697-nn45w\" (UID: \"bcc68fe9-aa80-4fe9-8ce9-85904b760b2e\") " pod="openstack/dnsmasq-dns-7c79ddd697-nn45w" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.563450 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bcc68fe9-aa80-4fe9-8ce9-85904b760b2e-ovsdbserver-nb\") pod \"dnsmasq-dns-7c79ddd697-nn45w\" (UID: \"bcc68fe9-aa80-4fe9-8ce9-85904b760b2e\") " pod="openstack/dnsmasq-dns-7c79ddd697-nn45w" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.564007 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bcc68fe9-aa80-4fe9-8ce9-85904b760b2e-ovsdbserver-sb\") pod \"dnsmasq-dns-7c79ddd697-nn45w\" (UID: \"bcc68fe9-aa80-4fe9-8ce9-85904b760b2e\") " pod="openstack/dnsmasq-dns-7c79ddd697-nn45w" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.564229 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/bcc68fe9-aa80-4fe9-8ce9-85904b760b2e-openstack-cell1\") pod \"dnsmasq-dns-7c79ddd697-nn45w\" (UID: \"bcc68fe9-aa80-4fe9-8ce9-85904b760b2e\") " pod="openstack/dnsmasq-dns-7c79ddd697-nn45w" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.564821 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-95cd49b99-n79qb"] Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.593611 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqqw5\" (UniqueName: \"kubernetes.io/projected/bcc68fe9-aa80-4fe9-8ce9-85904b760b2e-kube-api-access-fqqw5\") pod \"dnsmasq-dns-7c79ddd697-nn45w\" (UID: \"bcc68fe9-aa80-4fe9-8ce9-85904b760b2e\") " pod="openstack/dnsmasq-dns-7c79ddd697-nn45w" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.663592 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-ovsdbserver-nb\") pod \"dnsmasq-dns-95cd49b99-n79qb\" (UID: \"8365fe35-1adf-4591-bfc8-5bc1ead4a89d\") " pod="openstack/dnsmasq-dns-95cd49b99-n79qb" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.665028 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8v9n\" (UniqueName: \"kubernetes.io/projected/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-kube-api-access-b8v9n\") pod \"dnsmasq-dns-95cd49b99-n79qb\" (UID: \"8365fe35-1adf-4591-bfc8-5bc1ead4a89d\") " pod="openstack/dnsmasq-dns-95cd49b99-n79qb" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.665310 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-config\") pod \"dnsmasq-dns-95cd49b99-n79qb\" (UID: \"8365fe35-1adf-4591-bfc8-5bc1ead4a89d\") " pod="openstack/dnsmasq-dns-95cd49b99-n79qb" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.666600 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-openstack-cell1\") pod \"dnsmasq-dns-95cd49b99-n79qb\" (UID: \"8365fe35-1adf-4591-bfc8-5bc1ead4a89d\") " pod="openstack/dnsmasq-dns-95cd49b99-n79qb" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.666702 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-dns-svc\") pod \"dnsmasq-dns-95cd49b99-n79qb\" (UID: \"8365fe35-1adf-4591-bfc8-5bc1ead4a89d\") " pod="openstack/dnsmasq-dns-95cd49b99-n79qb" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.666781 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-ovsdbserver-sb\") pod \"dnsmasq-dns-95cd49b99-n79qb\" (UID: \"8365fe35-1adf-4591-bfc8-5bc1ead4a89d\") " pod="openstack/dnsmasq-dns-95cd49b99-n79qb" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.666848 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-networker\" (UniqueName: \"kubernetes.io/configmap/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-openstack-networker\") pod \"dnsmasq-dns-95cd49b99-n79qb\" (UID: \"8365fe35-1adf-4591-bfc8-5bc1ead4a89d\") " pod="openstack/dnsmasq-dns-95cd49b99-n79qb" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.718166 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c79ddd697-nn45w" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.718625 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-95cd49b99-n79qb"] Nov 27 09:04:35 crc kubenswrapper[4971]: E1127 09:04:35.724577 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config dns-svc kube-api-access-b8v9n openstack-cell1 openstack-networker ovsdbserver-nb ovsdbserver-sb], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-95cd49b99-n79qb" podUID="8365fe35-1adf-4591-bfc8-5bc1ead4a89d" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.734583 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c79ddd697-nn45w" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.769348 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-openstack-cell1\") pod \"dnsmasq-dns-95cd49b99-n79qb\" (UID: \"8365fe35-1adf-4591-bfc8-5bc1ead4a89d\") " pod="openstack/dnsmasq-dns-95cd49b99-n79qb" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.769428 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-dns-svc\") pod \"dnsmasq-dns-95cd49b99-n79qb\" (UID: \"8365fe35-1adf-4591-bfc8-5bc1ead4a89d\") " pod="openstack/dnsmasq-dns-95cd49b99-n79qb" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.769467 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-ovsdbserver-sb\") pod \"dnsmasq-dns-95cd49b99-n79qb\" (UID: \"8365fe35-1adf-4591-bfc8-5bc1ead4a89d\") " pod="openstack/dnsmasq-dns-95cd49b99-n79qb" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.769495 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-networker\" (UniqueName: \"kubernetes.io/configmap/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-openstack-networker\") pod \"dnsmasq-dns-95cd49b99-n79qb\" (UID: \"8365fe35-1adf-4591-bfc8-5bc1ead4a89d\") " pod="openstack/dnsmasq-dns-95cd49b99-n79qb" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.769563 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-ovsdbserver-nb\") pod \"dnsmasq-dns-95cd49b99-n79qb\" (UID: \"8365fe35-1adf-4591-bfc8-5bc1ead4a89d\") " pod="openstack/dnsmasq-dns-95cd49b99-n79qb" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.769909 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8v9n\" (UniqueName: \"kubernetes.io/projected/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-kube-api-access-b8v9n\") pod \"dnsmasq-dns-95cd49b99-n79qb\" (UID: \"8365fe35-1adf-4591-bfc8-5bc1ead4a89d\") " pod="openstack/dnsmasq-dns-95cd49b99-n79qb" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.769932 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-599c85c499-qv6wx"] Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.769997 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-config\") pod \"dnsmasq-dns-95cd49b99-n79qb\" (UID: \"8365fe35-1adf-4591-bfc8-5bc1ead4a89d\") " pod="openstack/dnsmasq-dns-95cd49b99-n79qb" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.772084 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-599c85c499-qv6wx" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.774636 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-dns-svc\") pod \"dnsmasq-dns-95cd49b99-n79qb\" (UID: \"8365fe35-1adf-4591-bfc8-5bc1ead4a89d\") " pod="openstack/dnsmasq-dns-95cd49b99-n79qb" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.774760 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-ovsdbserver-sb\") pod \"dnsmasq-dns-95cd49b99-n79qb\" (UID: \"8365fe35-1adf-4591-bfc8-5bc1ead4a89d\") " pod="openstack/dnsmasq-dns-95cd49b99-n79qb" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.776121 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-networker\" (UniqueName: \"kubernetes.io/configmap/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-openstack-networker\") pod \"dnsmasq-dns-95cd49b99-n79qb\" (UID: \"8365fe35-1adf-4591-bfc8-5bc1ead4a89d\") " pod="openstack/dnsmasq-dns-95cd49b99-n79qb" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.779838 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-openstack-cell1\") pod \"dnsmasq-dns-95cd49b99-n79qb\" (UID: \"8365fe35-1adf-4591-bfc8-5bc1ead4a89d\") " pod="openstack/dnsmasq-dns-95cd49b99-n79qb" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.779954 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-ovsdbserver-nb\") pod \"dnsmasq-dns-95cd49b99-n79qb\" (UID: \"8365fe35-1adf-4591-bfc8-5bc1ead4a89d\") " pod="openstack/dnsmasq-dns-95cd49b99-n79qb" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.780650 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-config\") pod \"dnsmasq-dns-95cd49b99-n79qb\" (UID: \"8365fe35-1adf-4591-bfc8-5bc1ead4a89d\") " pod="openstack/dnsmasq-dns-95cd49b99-n79qb" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.801168 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8v9n\" (UniqueName: \"kubernetes.io/projected/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-kube-api-access-b8v9n\") pod \"dnsmasq-dns-95cd49b99-n79qb\" (UID: \"8365fe35-1adf-4591-bfc8-5bc1ead4a89d\") " pod="openstack/dnsmasq-dns-95cd49b99-n79qb" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.813677 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-599c85c499-qv6wx"] Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.872474 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bcc68fe9-aa80-4fe9-8ce9-85904b760b2e-dns-svc\") pod \"bcc68fe9-aa80-4fe9-8ce9-85904b760b2e\" (UID: \"bcc68fe9-aa80-4fe9-8ce9-85904b760b2e\") " Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.872579 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bcc68fe9-aa80-4fe9-8ce9-85904b760b2e-ovsdbserver-nb\") pod \"bcc68fe9-aa80-4fe9-8ce9-85904b760b2e\" (UID: \"bcc68fe9-aa80-4fe9-8ce9-85904b760b2e\") " Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.872607 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bcc68fe9-aa80-4fe9-8ce9-85904b760b2e-config\") pod \"bcc68fe9-aa80-4fe9-8ce9-85904b760b2e\" (UID: \"bcc68fe9-aa80-4fe9-8ce9-85904b760b2e\") " Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.872654 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/bcc68fe9-aa80-4fe9-8ce9-85904b760b2e-openstack-cell1\") pod \"bcc68fe9-aa80-4fe9-8ce9-85904b760b2e\" (UID: \"bcc68fe9-aa80-4fe9-8ce9-85904b760b2e\") " Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.872702 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqqw5\" (UniqueName: \"kubernetes.io/projected/bcc68fe9-aa80-4fe9-8ce9-85904b760b2e-kube-api-access-fqqw5\") pod \"bcc68fe9-aa80-4fe9-8ce9-85904b760b2e\" (UID: \"bcc68fe9-aa80-4fe9-8ce9-85904b760b2e\") " Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.872814 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bcc68fe9-aa80-4fe9-8ce9-85904b760b2e-ovsdbserver-sb\") pod \"bcc68fe9-aa80-4fe9-8ce9-85904b760b2e\" (UID: \"bcc68fe9-aa80-4fe9-8ce9-85904b760b2e\") " Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.873295 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7771659e-e6ad-48ad-b05e-d868c4e882f5-ovsdbserver-nb\") pod \"dnsmasq-dns-599c85c499-qv6wx\" (UID: \"7771659e-e6ad-48ad-b05e-d868c4e882f5\") " pod="openstack/dnsmasq-dns-599c85c499-qv6wx" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.873393 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7771659e-e6ad-48ad-b05e-d868c4e882f5-config\") pod \"dnsmasq-dns-599c85c499-qv6wx\" (UID: \"7771659e-e6ad-48ad-b05e-d868c4e882f5\") " pod="openstack/dnsmasq-dns-599c85c499-qv6wx" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.873441 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-networker\" (UniqueName: \"kubernetes.io/configmap/7771659e-e6ad-48ad-b05e-d868c4e882f5-openstack-networker\") pod \"dnsmasq-dns-599c85c499-qv6wx\" (UID: \"7771659e-e6ad-48ad-b05e-d868c4e882f5\") " pod="openstack/dnsmasq-dns-599c85c499-qv6wx" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.873473 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7771659e-e6ad-48ad-b05e-d868c4e882f5-ovsdbserver-sb\") pod \"dnsmasq-dns-599c85c499-qv6wx\" (UID: \"7771659e-e6ad-48ad-b05e-d868c4e882f5\") " pod="openstack/dnsmasq-dns-599c85c499-qv6wx" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.873858 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sx4nv\" (UniqueName: \"kubernetes.io/projected/7771659e-e6ad-48ad-b05e-d868c4e882f5-kube-api-access-sx4nv\") pod \"dnsmasq-dns-599c85c499-qv6wx\" (UID: \"7771659e-e6ad-48ad-b05e-d868c4e882f5\") " pod="openstack/dnsmasq-dns-599c85c499-qv6wx" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.873986 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7771659e-e6ad-48ad-b05e-d868c4e882f5-dns-svc\") pod \"dnsmasq-dns-599c85c499-qv6wx\" (UID: \"7771659e-e6ad-48ad-b05e-d868c4e882f5\") " pod="openstack/dnsmasq-dns-599c85c499-qv6wx" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.874008 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/7771659e-e6ad-48ad-b05e-d868c4e882f5-openstack-cell1\") pod \"dnsmasq-dns-599c85c499-qv6wx\" (UID: \"7771659e-e6ad-48ad-b05e-d868c4e882f5\") " pod="openstack/dnsmasq-dns-599c85c499-qv6wx" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.874827 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bcc68fe9-aa80-4fe9-8ce9-85904b760b2e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "bcc68fe9-aa80-4fe9-8ce9-85904b760b2e" (UID: "bcc68fe9-aa80-4fe9-8ce9-85904b760b2e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.874880 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bcc68fe9-aa80-4fe9-8ce9-85904b760b2e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "bcc68fe9-aa80-4fe9-8ce9-85904b760b2e" (UID: "bcc68fe9-aa80-4fe9-8ce9-85904b760b2e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.874924 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bcc68fe9-aa80-4fe9-8ce9-85904b760b2e-openstack-cell1" (OuterVolumeSpecName: "openstack-cell1") pod "bcc68fe9-aa80-4fe9-8ce9-85904b760b2e" (UID: "bcc68fe9-aa80-4fe9-8ce9-85904b760b2e"). InnerVolumeSpecName "openstack-cell1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.875434 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bcc68fe9-aa80-4fe9-8ce9-85904b760b2e-config" (OuterVolumeSpecName: "config") pod "bcc68fe9-aa80-4fe9-8ce9-85904b760b2e" (UID: "bcc68fe9-aa80-4fe9-8ce9-85904b760b2e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.875839 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bcc68fe9-aa80-4fe9-8ce9-85904b760b2e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "bcc68fe9-aa80-4fe9-8ce9-85904b760b2e" (UID: "bcc68fe9-aa80-4fe9-8ce9-85904b760b2e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.879959 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bcc68fe9-aa80-4fe9-8ce9-85904b760b2e-kube-api-access-fqqw5" (OuterVolumeSpecName: "kube-api-access-fqqw5") pod "bcc68fe9-aa80-4fe9-8ce9-85904b760b2e" (UID: "bcc68fe9-aa80-4fe9-8ce9-85904b760b2e"). InnerVolumeSpecName "kube-api-access-fqqw5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.976315 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sx4nv\" (UniqueName: \"kubernetes.io/projected/7771659e-e6ad-48ad-b05e-d868c4e882f5-kube-api-access-sx4nv\") pod \"dnsmasq-dns-599c85c499-qv6wx\" (UID: \"7771659e-e6ad-48ad-b05e-d868c4e882f5\") " pod="openstack/dnsmasq-dns-599c85c499-qv6wx" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.976886 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7771659e-e6ad-48ad-b05e-d868c4e882f5-dns-svc\") pod \"dnsmasq-dns-599c85c499-qv6wx\" (UID: \"7771659e-e6ad-48ad-b05e-d868c4e882f5\") " pod="openstack/dnsmasq-dns-599c85c499-qv6wx" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.976928 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/7771659e-e6ad-48ad-b05e-d868c4e882f5-openstack-cell1\") pod \"dnsmasq-dns-599c85c499-qv6wx\" (UID: \"7771659e-e6ad-48ad-b05e-d868c4e882f5\") " pod="openstack/dnsmasq-dns-599c85c499-qv6wx" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.977008 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7771659e-e6ad-48ad-b05e-d868c4e882f5-ovsdbserver-nb\") pod \"dnsmasq-dns-599c85c499-qv6wx\" (UID: \"7771659e-e6ad-48ad-b05e-d868c4e882f5\") " pod="openstack/dnsmasq-dns-599c85c499-qv6wx" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.977080 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7771659e-e6ad-48ad-b05e-d868c4e882f5-config\") pod \"dnsmasq-dns-599c85c499-qv6wx\" (UID: \"7771659e-e6ad-48ad-b05e-d868c4e882f5\") " pod="openstack/dnsmasq-dns-599c85c499-qv6wx" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.977119 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-networker\" (UniqueName: \"kubernetes.io/configmap/7771659e-e6ad-48ad-b05e-d868c4e882f5-openstack-networker\") pod \"dnsmasq-dns-599c85c499-qv6wx\" (UID: \"7771659e-e6ad-48ad-b05e-d868c4e882f5\") " pod="openstack/dnsmasq-dns-599c85c499-qv6wx" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.977149 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7771659e-e6ad-48ad-b05e-d868c4e882f5-ovsdbserver-sb\") pod \"dnsmasq-dns-599c85c499-qv6wx\" (UID: \"7771659e-e6ad-48ad-b05e-d868c4e882f5\") " pod="openstack/dnsmasq-dns-599c85c499-qv6wx" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.977227 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bcc68fe9-aa80-4fe9-8ce9-85904b760b2e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.977242 4971 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bcc68fe9-aa80-4fe9-8ce9-85904b760b2e-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.977257 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bcc68fe9-aa80-4fe9-8ce9-85904b760b2e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.977267 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bcc68fe9-aa80-4fe9-8ce9-85904b760b2e-config\") on node \"crc\" DevicePath \"\"" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.977280 4971 reconciler_common.go:293] "Volume detached for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/bcc68fe9-aa80-4fe9-8ce9-85904b760b2e-openstack-cell1\") on node \"crc\" DevicePath \"\"" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.977290 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqqw5\" (UniqueName: \"kubernetes.io/projected/bcc68fe9-aa80-4fe9-8ce9-85904b760b2e-kube-api-access-fqqw5\") on node \"crc\" DevicePath \"\"" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.978502 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7771659e-e6ad-48ad-b05e-d868c4e882f5-ovsdbserver-sb\") pod \"dnsmasq-dns-599c85c499-qv6wx\" (UID: \"7771659e-e6ad-48ad-b05e-d868c4e882f5\") " pod="openstack/dnsmasq-dns-599c85c499-qv6wx" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.978942 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7771659e-e6ad-48ad-b05e-d868c4e882f5-dns-svc\") pod \"dnsmasq-dns-599c85c499-qv6wx\" (UID: \"7771659e-e6ad-48ad-b05e-d868c4e882f5\") " pod="openstack/dnsmasq-dns-599c85c499-qv6wx" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.979254 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-networker\" (UniqueName: \"kubernetes.io/configmap/7771659e-e6ad-48ad-b05e-d868c4e882f5-openstack-networker\") pod \"dnsmasq-dns-599c85c499-qv6wx\" (UID: \"7771659e-e6ad-48ad-b05e-d868c4e882f5\") " pod="openstack/dnsmasq-dns-599c85c499-qv6wx" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.979386 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7771659e-e6ad-48ad-b05e-d868c4e882f5-ovsdbserver-nb\") pod \"dnsmasq-dns-599c85c499-qv6wx\" (UID: \"7771659e-e6ad-48ad-b05e-d868c4e882f5\") " pod="openstack/dnsmasq-dns-599c85c499-qv6wx" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.979545 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7771659e-e6ad-48ad-b05e-d868c4e882f5-config\") pod \"dnsmasq-dns-599c85c499-qv6wx\" (UID: \"7771659e-e6ad-48ad-b05e-d868c4e882f5\") " pod="openstack/dnsmasq-dns-599c85c499-qv6wx" Nov 27 09:04:35 crc kubenswrapper[4971]: I1127 09:04:35.980184 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/7771659e-e6ad-48ad-b05e-d868c4e882f5-openstack-cell1\") pod \"dnsmasq-dns-599c85c499-qv6wx\" (UID: \"7771659e-e6ad-48ad-b05e-d868c4e882f5\") " pod="openstack/dnsmasq-dns-599c85c499-qv6wx" Nov 27 09:04:36 crc kubenswrapper[4971]: I1127 09:04:36.004270 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sx4nv\" (UniqueName: \"kubernetes.io/projected/7771659e-e6ad-48ad-b05e-d868c4e882f5-kube-api-access-sx4nv\") pod \"dnsmasq-dns-599c85c499-qv6wx\" (UID: \"7771659e-e6ad-48ad-b05e-d868c4e882f5\") " pod="openstack/dnsmasq-dns-599c85c499-qv6wx" Nov 27 09:04:36 crc kubenswrapper[4971]: I1127 09:04:36.099200 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-599c85c499-qv6wx" Nov 27 09:04:36 crc kubenswrapper[4971]: I1127 09:04:36.731334 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c79ddd697-nn45w" Nov 27 09:04:36 crc kubenswrapper[4971]: I1127 09:04:36.740747 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95cd49b99-n79qb" Nov 27 09:04:36 crc kubenswrapper[4971]: I1127 09:04:36.765312 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95cd49b99-n79qb" Nov 27 09:04:36 crc kubenswrapper[4971]: I1127 09:04:36.866792 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c79ddd697-nn45w"] Nov 27 09:04:36 crc kubenswrapper[4971]: I1127 09:04:36.877066 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7c79ddd697-nn45w"] Nov 27 09:04:36 crc kubenswrapper[4971]: W1127 09:04:36.877166 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7771659e_e6ad_48ad_b05e_d868c4e882f5.slice/crio-ddfc68957c4723cd18ac3bca29e175677f1b88e556693d220ea8059a259d608e WatchSource:0}: Error finding container ddfc68957c4723cd18ac3bca29e175677f1b88e556693d220ea8059a259d608e: Status 404 returned error can't find the container with id ddfc68957c4723cd18ac3bca29e175677f1b88e556693d220ea8059a259d608e Nov 27 09:04:36 crc kubenswrapper[4971]: I1127 09:04:36.889244 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-599c85c499-qv6wx"] Nov 27 09:04:36 crc kubenswrapper[4971]: E1127 09:04:36.896049 4971 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbcc68fe9_aa80_4fe9_8ce9_85904b760b2e.slice\": RecentStats: unable to find data in memory cache]" Nov 27 09:04:36 crc kubenswrapper[4971]: I1127 09:04:36.931600 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-config\") pod \"8365fe35-1adf-4591-bfc8-5bc1ead4a89d\" (UID: \"8365fe35-1adf-4591-bfc8-5bc1ead4a89d\") " Nov 27 09:04:36 crc kubenswrapper[4971]: I1127 09:04:36.931994 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-ovsdbserver-sb\") pod \"8365fe35-1adf-4591-bfc8-5bc1ead4a89d\" (UID: \"8365fe35-1adf-4591-bfc8-5bc1ead4a89d\") " Nov 27 09:04:36 crc kubenswrapper[4971]: I1127 09:04:36.932120 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-config" (OuterVolumeSpecName: "config") pod "8365fe35-1adf-4591-bfc8-5bc1ead4a89d" (UID: "8365fe35-1adf-4591-bfc8-5bc1ead4a89d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:04:36 crc kubenswrapper[4971]: I1127 09:04:36.932251 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-dns-svc\") pod \"8365fe35-1adf-4591-bfc8-5bc1ead4a89d\" (UID: \"8365fe35-1adf-4591-bfc8-5bc1ead4a89d\") " Nov 27 09:04:36 crc kubenswrapper[4971]: I1127 09:04:36.932299 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-ovsdbserver-nb\") pod \"8365fe35-1adf-4591-bfc8-5bc1ead4a89d\" (UID: \"8365fe35-1adf-4591-bfc8-5bc1ead4a89d\") " Nov 27 09:04:36 crc kubenswrapper[4971]: I1127 09:04:36.932386 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-openstack-cell1\") pod \"8365fe35-1adf-4591-bfc8-5bc1ead4a89d\" (UID: \"8365fe35-1adf-4591-bfc8-5bc1ead4a89d\") " Nov 27 09:04:36 crc kubenswrapper[4971]: I1127 09:04:36.932433 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-networker\" (UniqueName: \"kubernetes.io/configmap/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-openstack-networker\") pod \"8365fe35-1adf-4591-bfc8-5bc1ead4a89d\" (UID: \"8365fe35-1adf-4591-bfc8-5bc1ead4a89d\") " Nov 27 09:04:36 crc kubenswrapper[4971]: I1127 09:04:36.932478 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b8v9n\" (UniqueName: \"kubernetes.io/projected/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-kube-api-access-b8v9n\") pod \"8365fe35-1adf-4591-bfc8-5bc1ead4a89d\" (UID: \"8365fe35-1adf-4591-bfc8-5bc1ead4a89d\") " Nov 27 09:04:36 crc kubenswrapper[4971]: I1127 09:04:36.932651 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8365fe35-1adf-4591-bfc8-5bc1ead4a89d" (UID: "8365fe35-1adf-4591-bfc8-5bc1ead4a89d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:04:36 crc kubenswrapper[4971]: I1127 09:04:36.932672 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8365fe35-1adf-4591-bfc8-5bc1ead4a89d" (UID: "8365fe35-1adf-4591-bfc8-5bc1ead4a89d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:04:36 crc kubenswrapper[4971]: I1127 09:04:36.933030 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-openstack-cell1" (OuterVolumeSpecName: "openstack-cell1") pod "8365fe35-1adf-4591-bfc8-5bc1ead4a89d" (UID: "8365fe35-1adf-4591-bfc8-5bc1ead4a89d"). InnerVolumeSpecName "openstack-cell1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:04:36 crc kubenswrapper[4971]: I1127 09:04:36.933084 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8365fe35-1adf-4591-bfc8-5bc1ead4a89d" (UID: "8365fe35-1adf-4591-bfc8-5bc1ead4a89d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:04:36 crc kubenswrapper[4971]: I1127 09:04:36.933472 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-openstack-networker" (OuterVolumeSpecName: "openstack-networker") pod "8365fe35-1adf-4591-bfc8-5bc1ead4a89d" (UID: "8365fe35-1adf-4591-bfc8-5bc1ead4a89d"). InnerVolumeSpecName "openstack-networker". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:04:36 crc kubenswrapper[4971]: I1127 09:04:36.933764 4971 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 27 09:04:36 crc kubenswrapper[4971]: I1127 09:04:36.933790 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 27 09:04:36 crc kubenswrapper[4971]: I1127 09:04:36.933806 4971 reconciler_common.go:293] "Volume detached for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-openstack-cell1\") on node \"crc\" DevicePath \"\"" Nov 27 09:04:36 crc kubenswrapper[4971]: I1127 09:04:36.933815 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-config\") on node \"crc\" DevicePath \"\"" Nov 27 09:04:36 crc kubenswrapper[4971]: I1127 09:04:36.933827 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 27 09:04:36 crc kubenswrapper[4971]: I1127 09:04:36.939766 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-kube-api-access-b8v9n" (OuterVolumeSpecName: "kube-api-access-b8v9n") pod "8365fe35-1adf-4591-bfc8-5bc1ead4a89d" (UID: "8365fe35-1adf-4591-bfc8-5bc1ead4a89d"). InnerVolumeSpecName "kube-api-access-b8v9n". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:04:37 crc kubenswrapper[4971]: I1127 09:04:37.035957 4971 reconciler_common.go:293] "Volume detached for volume \"openstack-networker\" (UniqueName: \"kubernetes.io/configmap/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-openstack-networker\") on node \"crc\" DevicePath \"\"" Nov 27 09:04:37 crc kubenswrapper[4971]: I1127 09:04:37.035988 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b8v9n\" (UniqueName: \"kubernetes.io/projected/8365fe35-1adf-4591-bfc8-5bc1ead4a89d-kube-api-access-b8v9n\") on node \"crc\" DevicePath \"\"" Nov 27 09:04:37 crc kubenswrapper[4971]: I1127 09:04:37.746170 4971 generic.go:334] "Generic (PLEG): container finished" podID="7771659e-e6ad-48ad-b05e-d868c4e882f5" containerID="26d7591779302d8fc12337571ede5b5ef73f0dfe5f95dc3de26bf0a4b62aabb4" exitCode=0 Nov 27 09:04:37 crc kubenswrapper[4971]: I1127 09:04:37.746236 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-599c85c499-qv6wx" event={"ID":"7771659e-e6ad-48ad-b05e-d868c4e882f5","Type":"ContainerDied","Data":"26d7591779302d8fc12337571ede5b5ef73f0dfe5f95dc3de26bf0a4b62aabb4"} Nov 27 09:04:37 crc kubenswrapper[4971]: I1127 09:04:37.746614 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-599c85c499-qv6wx" event={"ID":"7771659e-e6ad-48ad-b05e-d868c4e882f5","Type":"ContainerStarted","Data":"ddfc68957c4723cd18ac3bca29e175677f1b88e556693d220ea8059a259d608e"} Nov 27 09:04:37 crc kubenswrapper[4971]: I1127 09:04:37.746621 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95cd49b99-n79qb" Nov 27 09:04:38 crc kubenswrapper[4971]: I1127 09:04:38.036241 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-95cd49b99-n79qb"] Nov 27 09:04:38 crc kubenswrapper[4971]: I1127 09:04:38.047724 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-95cd49b99-n79qb"] Nov 27 09:04:38 crc kubenswrapper[4971]: I1127 09:04:38.565092 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8365fe35-1adf-4591-bfc8-5bc1ead4a89d" path="/var/lib/kubelet/pods/8365fe35-1adf-4591-bfc8-5bc1ead4a89d/volumes" Nov 27 09:04:38 crc kubenswrapper[4971]: I1127 09:04:38.566362 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bcc68fe9-aa80-4fe9-8ce9-85904b760b2e" path="/var/lib/kubelet/pods/bcc68fe9-aa80-4fe9-8ce9-85904b760b2e/volumes" Nov 27 09:04:38 crc kubenswrapper[4971]: I1127 09:04:38.761688 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-599c85c499-qv6wx" event={"ID":"7771659e-e6ad-48ad-b05e-d868c4e882f5","Type":"ContainerStarted","Data":"e22c4baa305eb134779c1def51928ef09092aec7642aa877a967f8e8e0921b7d"} Nov 27 09:04:38 crc kubenswrapper[4971]: I1127 09:04:38.761940 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-599c85c499-qv6wx" Nov 27 09:04:38 crc kubenswrapper[4971]: I1127 09:04:38.788755 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-599c85c499-qv6wx" podStartSLOduration=3.78872319 podStartE2EDuration="3.78872319s" podCreationTimestamp="2025-11-27 09:04:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 09:04:38.78348465 +0000 UTC m=+7916.975528578" watchObservedRunningTime="2025-11-27 09:04:38.78872319 +0000 UTC m=+7916.980767138" Nov 27 09:04:41 crc kubenswrapper[4971]: I1127 09:04:41.556202 4971 scope.go:117] "RemoveContainer" containerID="994a193d8aefca1605e71d1a50a8591da0b0ae972faf08604e8f68555feb1168" Nov 27 09:04:41 crc kubenswrapper[4971]: E1127 09:04:41.557060 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:04:44 crc kubenswrapper[4971]: I1127 09:04:44.048897 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-d4wtj"] Nov 27 09:04:44 crc kubenswrapper[4971]: I1127 09:04:44.062573 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-d4wtj"] Nov 27 09:04:44 crc kubenswrapper[4971]: I1127 09:04:44.575965 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="356faab6-ab83-48bd-96e6-9c241d711bc7" path="/var/lib/kubelet/pods/356faab6-ab83-48bd-96e6-9c241d711bc7/volumes" Nov 27 09:04:46 crc kubenswrapper[4971]: I1127 09:04:46.100780 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-599c85c499-qv6wx" Nov 27 09:04:46 crc kubenswrapper[4971]: I1127 09:04:46.222509 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bc779bffc-k8jn2"] Nov 27 09:04:46 crc kubenswrapper[4971]: I1127 09:04:46.223605 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6bc779bffc-k8jn2" podUID="09c00568-2483-43ee-b406-28f3b60dd660" containerName="dnsmasq-dns" containerID="cri-o://7b0b0ee9a39768b33b6beb3ec1ceca199b5bf272227f3f316ec4f4db5476d95b" gracePeriod=10 Nov 27 09:04:46 crc kubenswrapper[4971]: I1127 09:04:46.549548 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-79c966c8f9-zl9w6"] Nov 27 09:04:46 crc kubenswrapper[4971]: I1127 09:04:46.557469 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79c966c8f9-zl9w6" Nov 27 09:04:46 crc kubenswrapper[4971]: I1127 09:04:46.605368 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79c966c8f9-zl9w6"] Nov 27 09:04:46 crc kubenswrapper[4971]: I1127 09:04:46.707229 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d3464217-b299-4e96-bab3-c00569d46839-dns-svc\") pod \"dnsmasq-dns-79c966c8f9-zl9w6\" (UID: \"d3464217-b299-4e96-bab3-c00569d46839\") " pod="openstack/dnsmasq-dns-79c966c8f9-zl9w6" Nov 27 09:04:46 crc kubenswrapper[4971]: I1127 09:04:46.707364 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zbt9m\" (UniqueName: \"kubernetes.io/projected/d3464217-b299-4e96-bab3-c00569d46839-kube-api-access-zbt9m\") pod \"dnsmasq-dns-79c966c8f9-zl9w6\" (UID: \"d3464217-b299-4e96-bab3-c00569d46839\") " pod="openstack/dnsmasq-dns-79c966c8f9-zl9w6" Nov 27 09:04:46 crc kubenswrapper[4971]: I1127 09:04:46.707400 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/d3464217-b299-4e96-bab3-c00569d46839-openstack-cell1\") pod \"dnsmasq-dns-79c966c8f9-zl9w6\" (UID: \"d3464217-b299-4e96-bab3-c00569d46839\") " pod="openstack/dnsmasq-dns-79c966c8f9-zl9w6" Nov 27 09:04:46 crc kubenswrapper[4971]: I1127 09:04:46.707443 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-networker\" (UniqueName: \"kubernetes.io/configmap/d3464217-b299-4e96-bab3-c00569d46839-openstack-networker\") pod \"dnsmasq-dns-79c966c8f9-zl9w6\" (UID: \"d3464217-b299-4e96-bab3-c00569d46839\") " pod="openstack/dnsmasq-dns-79c966c8f9-zl9w6" Nov 27 09:04:46 crc kubenswrapper[4971]: I1127 09:04:46.707608 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d3464217-b299-4e96-bab3-c00569d46839-ovsdbserver-sb\") pod \"dnsmasq-dns-79c966c8f9-zl9w6\" (UID: \"d3464217-b299-4e96-bab3-c00569d46839\") " pod="openstack/dnsmasq-dns-79c966c8f9-zl9w6" Nov 27 09:04:46 crc kubenswrapper[4971]: I1127 09:04:46.707638 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3464217-b299-4e96-bab3-c00569d46839-config\") pod \"dnsmasq-dns-79c966c8f9-zl9w6\" (UID: \"d3464217-b299-4e96-bab3-c00569d46839\") " pod="openstack/dnsmasq-dns-79c966c8f9-zl9w6" Nov 27 09:04:46 crc kubenswrapper[4971]: I1127 09:04:46.707896 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d3464217-b299-4e96-bab3-c00569d46839-ovsdbserver-nb\") pod \"dnsmasq-dns-79c966c8f9-zl9w6\" (UID: \"d3464217-b299-4e96-bab3-c00569d46839\") " pod="openstack/dnsmasq-dns-79c966c8f9-zl9w6" Nov 27 09:04:46 crc kubenswrapper[4971]: I1127 09:04:46.810678 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/d3464217-b299-4e96-bab3-c00569d46839-openstack-cell1\") pod \"dnsmasq-dns-79c966c8f9-zl9w6\" (UID: \"d3464217-b299-4e96-bab3-c00569d46839\") " pod="openstack/dnsmasq-dns-79c966c8f9-zl9w6" Nov 27 09:04:46 crc kubenswrapper[4971]: I1127 09:04:46.810756 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-networker\" (UniqueName: \"kubernetes.io/configmap/d3464217-b299-4e96-bab3-c00569d46839-openstack-networker\") pod \"dnsmasq-dns-79c966c8f9-zl9w6\" (UID: \"d3464217-b299-4e96-bab3-c00569d46839\") " pod="openstack/dnsmasq-dns-79c966c8f9-zl9w6" Nov 27 09:04:46 crc kubenswrapper[4971]: I1127 09:04:46.810882 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d3464217-b299-4e96-bab3-c00569d46839-ovsdbserver-sb\") pod \"dnsmasq-dns-79c966c8f9-zl9w6\" (UID: \"d3464217-b299-4e96-bab3-c00569d46839\") " pod="openstack/dnsmasq-dns-79c966c8f9-zl9w6" Nov 27 09:04:46 crc kubenswrapper[4971]: I1127 09:04:46.810914 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3464217-b299-4e96-bab3-c00569d46839-config\") pod \"dnsmasq-dns-79c966c8f9-zl9w6\" (UID: \"d3464217-b299-4e96-bab3-c00569d46839\") " pod="openstack/dnsmasq-dns-79c966c8f9-zl9w6" Nov 27 09:04:46 crc kubenswrapper[4971]: I1127 09:04:46.811000 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d3464217-b299-4e96-bab3-c00569d46839-ovsdbserver-nb\") pod \"dnsmasq-dns-79c966c8f9-zl9w6\" (UID: \"d3464217-b299-4e96-bab3-c00569d46839\") " pod="openstack/dnsmasq-dns-79c966c8f9-zl9w6" Nov 27 09:04:46 crc kubenswrapper[4971]: I1127 09:04:46.811057 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d3464217-b299-4e96-bab3-c00569d46839-dns-svc\") pod \"dnsmasq-dns-79c966c8f9-zl9w6\" (UID: \"d3464217-b299-4e96-bab3-c00569d46839\") " pod="openstack/dnsmasq-dns-79c966c8f9-zl9w6" Nov 27 09:04:46 crc kubenswrapper[4971]: I1127 09:04:46.811104 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zbt9m\" (UniqueName: \"kubernetes.io/projected/d3464217-b299-4e96-bab3-c00569d46839-kube-api-access-zbt9m\") pod \"dnsmasq-dns-79c966c8f9-zl9w6\" (UID: \"d3464217-b299-4e96-bab3-c00569d46839\") " pod="openstack/dnsmasq-dns-79c966c8f9-zl9w6" Nov 27 09:04:46 crc kubenswrapper[4971]: I1127 09:04:46.812639 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/d3464217-b299-4e96-bab3-c00569d46839-openstack-cell1\") pod \"dnsmasq-dns-79c966c8f9-zl9w6\" (UID: \"d3464217-b299-4e96-bab3-c00569d46839\") " pod="openstack/dnsmasq-dns-79c966c8f9-zl9w6" Nov 27 09:04:46 crc kubenswrapper[4971]: I1127 09:04:46.813346 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-networker\" (UniqueName: \"kubernetes.io/configmap/d3464217-b299-4e96-bab3-c00569d46839-openstack-networker\") pod \"dnsmasq-dns-79c966c8f9-zl9w6\" (UID: \"d3464217-b299-4e96-bab3-c00569d46839\") " pod="openstack/dnsmasq-dns-79c966c8f9-zl9w6" Nov 27 09:04:46 crc kubenswrapper[4971]: I1127 09:04:46.813993 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d3464217-b299-4e96-bab3-c00569d46839-ovsdbserver-sb\") pod \"dnsmasq-dns-79c966c8f9-zl9w6\" (UID: \"d3464217-b299-4e96-bab3-c00569d46839\") " pod="openstack/dnsmasq-dns-79c966c8f9-zl9w6" Nov 27 09:04:46 crc kubenswrapper[4971]: I1127 09:04:46.815476 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d3464217-b299-4e96-bab3-c00569d46839-dns-svc\") pod \"dnsmasq-dns-79c966c8f9-zl9w6\" (UID: \"d3464217-b299-4e96-bab3-c00569d46839\") " pod="openstack/dnsmasq-dns-79c966c8f9-zl9w6" Nov 27 09:04:46 crc kubenswrapper[4971]: I1127 09:04:46.815496 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d3464217-b299-4e96-bab3-c00569d46839-ovsdbserver-nb\") pod \"dnsmasq-dns-79c966c8f9-zl9w6\" (UID: \"d3464217-b299-4e96-bab3-c00569d46839\") " pod="openstack/dnsmasq-dns-79c966c8f9-zl9w6" Nov 27 09:04:46 crc kubenswrapper[4971]: I1127 09:04:46.816033 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3464217-b299-4e96-bab3-c00569d46839-config\") pod \"dnsmasq-dns-79c966c8f9-zl9w6\" (UID: \"d3464217-b299-4e96-bab3-c00569d46839\") " pod="openstack/dnsmasq-dns-79c966c8f9-zl9w6" Nov 27 09:04:46 crc kubenswrapper[4971]: I1127 09:04:46.828655 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc779bffc-k8jn2" Nov 27 09:04:46 crc kubenswrapper[4971]: I1127 09:04:46.833345 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zbt9m\" (UniqueName: \"kubernetes.io/projected/d3464217-b299-4e96-bab3-c00569d46839-kube-api-access-zbt9m\") pod \"dnsmasq-dns-79c966c8f9-zl9w6\" (UID: \"d3464217-b299-4e96-bab3-c00569d46839\") " pod="openstack/dnsmasq-dns-79c966c8f9-zl9w6" Nov 27 09:04:46 crc kubenswrapper[4971]: I1127 09:04:46.877992 4971 generic.go:334] "Generic (PLEG): container finished" podID="09c00568-2483-43ee-b406-28f3b60dd660" containerID="7b0b0ee9a39768b33b6beb3ec1ceca199b5bf272227f3f316ec4f4db5476d95b" exitCode=0 Nov 27 09:04:46 crc kubenswrapper[4971]: I1127 09:04:46.878037 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc779bffc-k8jn2" event={"ID":"09c00568-2483-43ee-b406-28f3b60dd660","Type":"ContainerDied","Data":"7b0b0ee9a39768b33b6beb3ec1ceca199b5bf272227f3f316ec4f4db5476d95b"} Nov 27 09:04:46 crc kubenswrapper[4971]: I1127 09:04:46.878066 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc779bffc-k8jn2" event={"ID":"09c00568-2483-43ee-b406-28f3b60dd660","Type":"ContainerDied","Data":"1f81aaaeec789a5a9b323b41c3596b2e8f28868f7a23558a727f75e117e413db"} Nov 27 09:04:46 crc kubenswrapper[4971]: I1127 09:04:46.878083 4971 scope.go:117] "RemoveContainer" containerID="7b0b0ee9a39768b33b6beb3ec1ceca199b5bf272227f3f316ec4f4db5476d95b" Nov 27 09:04:46 crc kubenswrapper[4971]: I1127 09:04:46.878246 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc779bffc-k8jn2" Nov 27 09:04:46 crc kubenswrapper[4971]: I1127 09:04:46.898188 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79c966c8f9-zl9w6" Nov 27 09:04:46 crc kubenswrapper[4971]: I1127 09:04:46.934710 4971 scope.go:117] "RemoveContainer" containerID="54ffa50611ffa0eec5961247c221e99066bb00cb56e1de4b812bdaa2ad8ed220" Nov 27 09:04:47 crc kubenswrapper[4971]: I1127 09:04:47.015761 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/09c00568-2483-43ee-b406-28f3b60dd660-ovsdbserver-nb\") pod \"09c00568-2483-43ee-b406-28f3b60dd660\" (UID: \"09c00568-2483-43ee-b406-28f3b60dd660\") " Nov 27 09:04:47 crc kubenswrapper[4971]: I1127 09:04:47.016384 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09c00568-2483-43ee-b406-28f3b60dd660-config\") pod \"09c00568-2483-43ee-b406-28f3b60dd660\" (UID: \"09c00568-2483-43ee-b406-28f3b60dd660\") " Nov 27 09:04:47 crc kubenswrapper[4971]: I1127 09:04:47.016447 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/09c00568-2483-43ee-b406-28f3b60dd660-ovsdbserver-sb\") pod \"09c00568-2483-43ee-b406-28f3b60dd660\" (UID: \"09c00568-2483-43ee-b406-28f3b60dd660\") " Nov 27 09:04:47 crc kubenswrapper[4971]: I1127 09:04:47.016562 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/09c00568-2483-43ee-b406-28f3b60dd660-dns-svc\") pod \"09c00568-2483-43ee-b406-28f3b60dd660\" (UID: \"09c00568-2483-43ee-b406-28f3b60dd660\") " Nov 27 09:04:47 crc kubenswrapper[4971]: I1127 09:04:47.016680 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-btk2m\" (UniqueName: \"kubernetes.io/projected/09c00568-2483-43ee-b406-28f3b60dd660-kube-api-access-btk2m\") pod \"09c00568-2483-43ee-b406-28f3b60dd660\" (UID: \"09c00568-2483-43ee-b406-28f3b60dd660\") " Nov 27 09:04:47 crc kubenswrapper[4971]: I1127 09:04:47.021048 4971 scope.go:117] "RemoveContainer" containerID="7b0b0ee9a39768b33b6beb3ec1ceca199b5bf272227f3f316ec4f4db5476d95b" Nov 27 09:04:47 crc kubenswrapper[4971]: E1127 09:04:47.022198 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b0b0ee9a39768b33b6beb3ec1ceca199b5bf272227f3f316ec4f4db5476d95b\": container with ID starting with 7b0b0ee9a39768b33b6beb3ec1ceca199b5bf272227f3f316ec4f4db5476d95b not found: ID does not exist" containerID="7b0b0ee9a39768b33b6beb3ec1ceca199b5bf272227f3f316ec4f4db5476d95b" Nov 27 09:04:47 crc kubenswrapper[4971]: I1127 09:04:47.022440 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b0b0ee9a39768b33b6beb3ec1ceca199b5bf272227f3f316ec4f4db5476d95b"} err="failed to get container status \"7b0b0ee9a39768b33b6beb3ec1ceca199b5bf272227f3f316ec4f4db5476d95b\": rpc error: code = NotFound desc = could not find container \"7b0b0ee9a39768b33b6beb3ec1ceca199b5bf272227f3f316ec4f4db5476d95b\": container with ID starting with 7b0b0ee9a39768b33b6beb3ec1ceca199b5bf272227f3f316ec4f4db5476d95b not found: ID does not exist" Nov 27 09:04:47 crc kubenswrapper[4971]: I1127 09:04:47.022580 4971 scope.go:117] "RemoveContainer" containerID="54ffa50611ffa0eec5961247c221e99066bb00cb56e1de4b812bdaa2ad8ed220" Nov 27 09:04:47 crc kubenswrapper[4971]: I1127 09:04:47.023382 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09c00568-2483-43ee-b406-28f3b60dd660-kube-api-access-btk2m" (OuterVolumeSpecName: "kube-api-access-btk2m") pod "09c00568-2483-43ee-b406-28f3b60dd660" (UID: "09c00568-2483-43ee-b406-28f3b60dd660"). InnerVolumeSpecName "kube-api-access-btk2m". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:04:47 crc kubenswrapper[4971]: E1127 09:04:47.023414 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54ffa50611ffa0eec5961247c221e99066bb00cb56e1de4b812bdaa2ad8ed220\": container with ID starting with 54ffa50611ffa0eec5961247c221e99066bb00cb56e1de4b812bdaa2ad8ed220 not found: ID does not exist" containerID="54ffa50611ffa0eec5961247c221e99066bb00cb56e1de4b812bdaa2ad8ed220" Nov 27 09:04:47 crc kubenswrapper[4971]: I1127 09:04:47.023473 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54ffa50611ffa0eec5961247c221e99066bb00cb56e1de4b812bdaa2ad8ed220"} err="failed to get container status \"54ffa50611ffa0eec5961247c221e99066bb00cb56e1de4b812bdaa2ad8ed220\": rpc error: code = NotFound desc = could not find container \"54ffa50611ffa0eec5961247c221e99066bb00cb56e1de4b812bdaa2ad8ed220\": container with ID starting with 54ffa50611ffa0eec5961247c221e99066bb00cb56e1de4b812bdaa2ad8ed220 not found: ID does not exist" Nov 27 09:04:47 crc kubenswrapper[4971]: I1127 09:04:47.078701 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09c00568-2483-43ee-b406-28f3b60dd660-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "09c00568-2483-43ee-b406-28f3b60dd660" (UID: "09c00568-2483-43ee-b406-28f3b60dd660"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:04:47 crc kubenswrapper[4971]: I1127 09:04:47.092321 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09c00568-2483-43ee-b406-28f3b60dd660-config" (OuterVolumeSpecName: "config") pod "09c00568-2483-43ee-b406-28f3b60dd660" (UID: "09c00568-2483-43ee-b406-28f3b60dd660"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:04:47 crc kubenswrapper[4971]: I1127 09:04:47.100807 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09c00568-2483-43ee-b406-28f3b60dd660-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "09c00568-2483-43ee-b406-28f3b60dd660" (UID: "09c00568-2483-43ee-b406-28f3b60dd660"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:04:47 crc kubenswrapper[4971]: I1127 09:04:47.102685 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09c00568-2483-43ee-b406-28f3b60dd660-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "09c00568-2483-43ee-b406-28f3b60dd660" (UID: "09c00568-2483-43ee-b406-28f3b60dd660"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:04:47 crc kubenswrapper[4971]: I1127 09:04:47.122996 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/09c00568-2483-43ee-b406-28f3b60dd660-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 27 09:04:47 crc kubenswrapper[4971]: I1127 09:04:47.123033 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09c00568-2483-43ee-b406-28f3b60dd660-config\") on node \"crc\" DevicePath \"\"" Nov 27 09:04:47 crc kubenswrapper[4971]: I1127 09:04:47.123045 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/09c00568-2483-43ee-b406-28f3b60dd660-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 27 09:04:47 crc kubenswrapper[4971]: I1127 09:04:47.123061 4971 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/09c00568-2483-43ee-b406-28f3b60dd660-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 27 09:04:47 crc kubenswrapper[4971]: I1127 09:04:47.123097 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-btk2m\" (UniqueName: \"kubernetes.io/projected/09c00568-2483-43ee-b406-28f3b60dd660-kube-api-access-btk2m\") on node \"crc\" DevicePath \"\"" Nov 27 09:04:47 crc kubenswrapper[4971]: I1127 09:04:47.231387 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bc779bffc-k8jn2"] Nov 27 09:04:47 crc kubenswrapper[4971]: I1127 09:04:47.241102 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6bc779bffc-k8jn2"] Nov 27 09:04:47 crc kubenswrapper[4971]: I1127 09:04:47.534411 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79c966c8f9-zl9w6"] Nov 27 09:04:47 crc kubenswrapper[4971]: W1127 09:04:47.542718 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd3464217_b299_4e96_bab3_c00569d46839.slice/crio-dec39dbd694ae0db28913d8b95b5ccdae8684ecc1de382958f8a6183730a34c5 WatchSource:0}: Error finding container dec39dbd694ae0db28913d8b95b5ccdae8684ecc1de382958f8a6183730a34c5: Status 404 returned error can't find the container with id dec39dbd694ae0db28913d8b95b5ccdae8684ecc1de382958f8a6183730a34c5 Nov 27 09:04:47 crc kubenswrapper[4971]: I1127 09:04:47.889880 4971 generic.go:334] "Generic (PLEG): container finished" podID="d3464217-b299-4e96-bab3-c00569d46839" containerID="f97c5f2916be13d8b165d83e68f23c242f1c5696ec04cd687c74e1df9f3de711" exitCode=0 Nov 27 09:04:47 crc kubenswrapper[4971]: I1127 09:04:47.890021 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79c966c8f9-zl9w6" event={"ID":"d3464217-b299-4e96-bab3-c00569d46839","Type":"ContainerDied","Data":"f97c5f2916be13d8b165d83e68f23c242f1c5696ec04cd687c74e1df9f3de711"} Nov 27 09:04:47 crc kubenswrapper[4971]: I1127 09:04:47.890600 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79c966c8f9-zl9w6" event={"ID":"d3464217-b299-4e96-bab3-c00569d46839","Type":"ContainerStarted","Data":"dec39dbd694ae0db28913d8b95b5ccdae8684ecc1de382958f8a6183730a34c5"} Nov 27 09:04:48 crc kubenswrapper[4971]: I1127 09:04:48.562832 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09c00568-2483-43ee-b406-28f3b60dd660" path="/var/lib/kubelet/pods/09c00568-2483-43ee-b406-28f3b60dd660/volumes" Nov 27 09:04:48 crc kubenswrapper[4971]: I1127 09:04:48.907080 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79c966c8f9-zl9w6" event={"ID":"d3464217-b299-4e96-bab3-c00569d46839","Type":"ContainerStarted","Data":"7f71a55bf8e1e3c77c04001fac4c6b9ec61bb0e92962d28c6e3f022f611ad688"} Nov 27 09:04:48 crc kubenswrapper[4971]: I1127 09:04:48.907645 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-79c966c8f9-zl9w6" Nov 27 09:04:48 crc kubenswrapper[4971]: I1127 09:04:48.934114 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-79c966c8f9-zl9w6" podStartSLOduration=2.9340917859999998 podStartE2EDuration="2.934091786s" podCreationTimestamp="2025-11-27 09:04:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 09:04:48.924549412 +0000 UTC m=+7927.116593330" watchObservedRunningTime="2025-11-27 09:04:48.934091786 +0000 UTC m=+7927.126135704" Nov 27 09:04:52 crc kubenswrapper[4971]: I1127 09:04:52.557181 4971 scope.go:117] "RemoveContainer" containerID="994a193d8aefca1605e71d1a50a8591da0b0ae972faf08604e8f68555feb1168" Nov 27 09:04:52 crc kubenswrapper[4971]: E1127 09:04:52.557951 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:04:56 crc kubenswrapper[4971]: I1127 09:04:56.194931 4971 scope.go:117] "RemoveContainer" containerID="cc2916b73b97ece859f8abc68826e4215470d807b9ae4e2c8ceaa6a7a4790447" Nov 27 09:04:56 crc kubenswrapper[4971]: I1127 09:04:56.225286 4971 scope.go:117] "RemoveContainer" containerID="217b88d2bffc8b52d0946665d723fe0f2c3589500d8debe45c7d281749be052a" Nov 27 09:04:56 crc kubenswrapper[4971]: I1127 09:04:56.280862 4971 scope.go:117] "RemoveContainer" containerID="5dbfca667d556d21fb7bd2060f1eb017e32c820a35886d5269913cf3d92fa360" Nov 27 09:04:56 crc kubenswrapper[4971]: I1127 09:04:56.352859 4971 scope.go:117] "RemoveContainer" containerID="cc757a4e86a535473ae72e0025a96aa9a3c01ef23b0bd305016b8230903dfaeb" Nov 27 09:04:56 crc kubenswrapper[4971]: I1127 09:04:56.383316 4971 scope.go:117] "RemoveContainer" containerID="f0e915b9be57e48e3b5aee1002fae8d3fc94328f723583ceb5e063155efd5cea" Nov 27 09:04:56 crc kubenswrapper[4971]: I1127 09:04:56.435662 4971 scope.go:117] "RemoveContainer" containerID="60e0950e080fd373e5c08cf73708afaa541cacd867fe60c4cdcb8a8650af61d1" Nov 27 09:04:56 crc kubenswrapper[4971]: I1127 09:04:56.497126 4971 scope.go:117] "RemoveContainer" containerID="06b8e541e148417e34aa29d66e1a98adc537fb510ea0fa8574487380a2988ada" Nov 27 09:04:56 crc kubenswrapper[4971]: I1127 09:04:56.901746 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-79c966c8f9-zl9w6" Nov 27 09:04:56 crc kubenswrapper[4971]: I1127 09:04:56.964976 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-599c85c499-qv6wx"] Nov 27 09:04:56 crc kubenswrapper[4971]: I1127 09:04:56.965264 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-599c85c499-qv6wx" podUID="7771659e-e6ad-48ad-b05e-d868c4e882f5" containerName="dnsmasq-dns" containerID="cri-o://e22c4baa305eb134779c1def51928ef09092aec7642aa877a967f8e8e0921b7d" gracePeriod=10 Nov 27 09:04:57 crc kubenswrapper[4971]: I1127 09:04:57.532973 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-599c85c499-qv6wx" Nov 27 09:04:57 crc kubenswrapper[4971]: I1127 09:04:57.716693 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-networker\" (UniqueName: \"kubernetes.io/configmap/7771659e-e6ad-48ad-b05e-d868c4e882f5-openstack-networker\") pod \"7771659e-e6ad-48ad-b05e-d868c4e882f5\" (UID: \"7771659e-e6ad-48ad-b05e-d868c4e882f5\") " Nov 27 09:04:57 crc kubenswrapper[4971]: I1127 09:04:57.717630 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/7771659e-e6ad-48ad-b05e-d868c4e882f5-openstack-cell1\") pod \"7771659e-e6ad-48ad-b05e-d868c4e882f5\" (UID: \"7771659e-e6ad-48ad-b05e-d868c4e882f5\") " Nov 27 09:04:57 crc kubenswrapper[4971]: I1127 09:04:57.717868 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7771659e-e6ad-48ad-b05e-d868c4e882f5-dns-svc\") pod \"7771659e-e6ad-48ad-b05e-d868c4e882f5\" (UID: \"7771659e-e6ad-48ad-b05e-d868c4e882f5\") " Nov 27 09:04:57 crc kubenswrapper[4971]: I1127 09:04:57.718110 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7771659e-e6ad-48ad-b05e-d868c4e882f5-config\") pod \"7771659e-e6ad-48ad-b05e-d868c4e882f5\" (UID: \"7771659e-e6ad-48ad-b05e-d868c4e882f5\") " Nov 27 09:04:57 crc kubenswrapper[4971]: I1127 09:04:57.718306 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sx4nv\" (UniqueName: \"kubernetes.io/projected/7771659e-e6ad-48ad-b05e-d868c4e882f5-kube-api-access-sx4nv\") pod \"7771659e-e6ad-48ad-b05e-d868c4e882f5\" (UID: \"7771659e-e6ad-48ad-b05e-d868c4e882f5\") " Nov 27 09:04:57 crc kubenswrapper[4971]: I1127 09:04:57.718431 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7771659e-e6ad-48ad-b05e-d868c4e882f5-ovsdbserver-sb\") pod \"7771659e-e6ad-48ad-b05e-d868c4e882f5\" (UID: \"7771659e-e6ad-48ad-b05e-d868c4e882f5\") " Nov 27 09:04:57 crc kubenswrapper[4971]: I1127 09:04:57.718758 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7771659e-e6ad-48ad-b05e-d868c4e882f5-ovsdbserver-nb\") pod \"7771659e-e6ad-48ad-b05e-d868c4e882f5\" (UID: \"7771659e-e6ad-48ad-b05e-d868c4e882f5\") " Nov 27 09:04:57 crc kubenswrapper[4971]: I1127 09:04:57.748114 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7771659e-e6ad-48ad-b05e-d868c4e882f5-kube-api-access-sx4nv" (OuterVolumeSpecName: "kube-api-access-sx4nv") pod "7771659e-e6ad-48ad-b05e-d868c4e882f5" (UID: "7771659e-e6ad-48ad-b05e-d868c4e882f5"). InnerVolumeSpecName "kube-api-access-sx4nv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:04:57 crc kubenswrapper[4971]: I1127 09:04:57.782779 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7771659e-e6ad-48ad-b05e-d868c4e882f5-config" (OuterVolumeSpecName: "config") pod "7771659e-e6ad-48ad-b05e-d868c4e882f5" (UID: "7771659e-e6ad-48ad-b05e-d868c4e882f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:04:57 crc kubenswrapper[4971]: I1127 09:04:57.786291 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7771659e-e6ad-48ad-b05e-d868c4e882f5-openstack-cell1" (OuterVolumeSpecName: "openstack-cell1") pod "7771659e-e6ad-48ad-b05e-d868c4e882f5" (UID: "7771659e-e6ad-48ad-b05e-d868c4e882f5"). InnerVolumeSpecName "openstack-cell1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:04:57 crc kubenswrapper[4971]: I1127 09:04:57.791930 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7771659e-e6ad-48ad-b05e-d868c4e882f5-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7771659e-e6ad-48ad-b05e-d868c4e882f5" (UID: "7771659e-e6ad-48ad-b05e-d868c4e882f5"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:04:57 crc kubenswrapper[4971]: I1127 09:04:57.792041 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7771659e-e6ad-48ad-b05e-d868c4e882f5-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7771659e-e6ad-48ad-b05e-d868c4e882f5" (UID: "7771659e-e6ad-48ad-b05e-d868c4e882f5"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:04:57 crc kubenswrapper[4971]: I1127 09:04:57.799957 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7771659e-e6ad-48ad-b05e-d868c4e882f5-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7771659e-e6ad-48ad-b05e-d868c4e882f5" (UID: "7771659e-e6ad-48ad-b05e-d868c4e882f5"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:04:57 crc kubenswrapper[4971]: I1127 09:04:57.816427 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7771659e-e6ad-48ad-b05e-d868c4e882f5-openstack-networker" (OuterVolumeSpecName: "openstack-networker") pod "7771659e-e6ad-48ad-b05e-d868c4e882f5" (UID: "7771659e-e6ad-48ad-b05e-d868c4e882f5"). InnerVolumeSpecName "openstack-networker". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:04:57 crc kubenswrapper[4971]: I1127 09:04:57.824623 4971 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7771659e-e6ad-48ad-b05e-d868c4e882f5-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 27 09:04:57 crc kubenswrapper[4971]: I1127 09:04:57.824674 4971 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7771659e-e6ad-48ad-b05e-d868c4e882f5-config\") on node \"crc\" DevicePath \"\"" Nov 27 09:04:57 crc kubenswrapper[4971]: I1127 09:04:57.824687 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sx4nv\" (UniqueName: \"kubernetes.io/projected/7771659e-e6ad-48ad-b05e-d868c4e882f5-kube-api-access-sx4nv\") on node \"crc\" DevicePath \"\"" Nov 27 09:04:57 crc kubenswrapper[4971]: I1127 09:04:57.824703 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7771659e-e6ad-48ad-b05e-d868c4e882f5-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 27 09:04:57 crc kubenswrapper[4971]: I1127 09:04:57.824722 4971 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7771659e-e6ad-48ad-b05e-d868c4e882f5-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 27 09:04:57 crc kubenswrapper[4971]: I1127 09:04:57.824732 4971 reconciler_common.go:293] "Volume detached for volume \"openstack-networker\" (UniqueName: \"kubernetes.io/configmap/7771659e-e6ad-48ad-b05e-d868c4e882f5-openstack-networker\") on node \"crc\" DevicePath \"\"" Nov 27 09:04:57 crc kubenswrapper[4971]: I1127 09:04:57.824741 4971 reconciler_common.go:293] "Volume detached for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/7771659e-e6ad-48ad-b05e-d868c4e882f5-openstack-cell1\") on node \"crc\" DevicePath \"\"" Nov 27 09:04:58 crc kubenswrapper[4971]: I1127 09:04:58.013553 4971 generic.go:334] "Generic (PLEG): container finished" podID="7771659e-e6ad-48ad-b05e-d868c4e882f5" containerID="e22c4baa305eb134779c1def51928ef09092aec7642aa877a967f8e8e0921b7d" exitCode=0 Nov 27 09:04:58 crc kubenswrapper[4971]: I1127 09:04:58.013612 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-599c85c499-qv6wx" event={"ID":"7771659e-e6ad-48ad-b05e-d868c4e882f5","Type":"ContainerDied","Data":"e22c4baa305eb134779c1def51928ef09092aec7642aa877a967f8e8e0921b7d"} Nov 27 09:04:58 crc kubenswrapper[4971]: I1127 09:04:58.013700 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-599c85c499-qv6wx" event={"ID":"7771659e-e6ad-48ad-b05e-d868c4e882f5","Type":"ContainerDied","Data":"ddfc68957c4723cd18ac3bca29e175677f1b88e556693d220ea8059a259d608e"} Nov 27 09:04:58 crc kubenswrapper[4971]: I1127 09:04:58.013726 4971 scope.go:117] "RemoveContainer" containerID="e22c4baa305eb134779c1def51928ef09092aec7642aa877a967f8e8e0921b7d" Nov 27 09:04:58 crc kubenswrapper[4971]: I1127 09:04:58.013947 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-599c85c499-qv6wx" Nov 27 09:04:58 crc kubenswrapper[4971]: I1127 09:04:58.075333 4971 scope.go:117] "RemoveContainer" containerID="26d7591779302d8fc12337571ede5b5ef73f0dfe5f95dc3de26bf0a4b62aabb4" Nov 27 09:04:58 crc kubenswrapper[4971]: I1127 09:04:58.075476 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-7dg5v"] Nov 27 09:04:58 crc kubenswrapper[4971]: I1127 09:04:58.090495 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-7dg5v"] Nov 27 09:04:58 crc kubenswrapper[4971]: I1127 09:04:58.100953 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-599c85c499-qv6wx"] Nov 27 09:04:58 crc kubenswrapper[4971]: I1127 09:04:58.115193 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-599c85c499-qv6wx"] Nov 27 09:04:58 crc kubenswrapper[4971]: I1127 09:04:58.131301 4971 scope.go:117] "RemoveContainer" containerID="e22c4baa305eb134779c1def51928ef09092aec7642aa877a967f8e8e0921b7d" Nov 27 09:04:58 crc kubenswrapper[4971]: E1127 09:04:58.131842 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e22c4baa305eb134779c1def51928ef09092aec7642aa877a967f8e8e0921b7d\": container with ID starting with e22c4baa305eb134779c1def51928ef09092aec7642aa877a967f8e8e0921b7d not found: ID does not exist" containerID="e22c4baa305eb134779c1def51928ef09092aec7642aa877a967f8e8e0921b7d" Nov 27 09:04:58 crc kubenswrapper[4971]: I1127 09:04:58.131875 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e22c4baa305eb134779c1def51928ef09092aec7642aa877a967f8e8e0921b7d"} err="failed to get container status \"e22c4baa305eb134779c1def51928ef09092aec7642aa877a967f8e8e0921b7d\": rpc error: code = NotFound desc = could not find container \"e22c4baa305eb134779c1def51928ef09092aec7642aa877a967f8e8e0921b7d\": container with ID starting with e22c4baa305eb134779c1def51928ef09092aec7642aa877a967f8e8e0921b7d not found: ID does not exist" Nov 27 09:04:58 crc kubenswrapper[4971]: I1127 09:04:58.131898 4971 scope.go:117] "RemoveContainer" containerID="26d7591779302d8fc12337571ede5b5ef73f0dfe5f95dc3de26bf0a4b62aabb4" Nov 27 09:04:58 crc kubenswrapper[4971]: E1127 09:04:58.132099 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"26d7591779302d8fc12337571ede5b5ef73f0dfe5f95dc3de26bf0a4b62aabb4\": container with ID starting with 26d7591779302d8fc12337571ede5b5ef73f0dfe5f95dc3de26bf0a4b62aabb4 not found: ID does not exist" containerID="26d7591779302d8fc12337571ede5b5ef73f0dfe5f95dc3de26bf0a4b62aabb4" Nov 27 09:04:58 crc kubenswrapper[4971]: I1127 09:04:58.132120 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"26d7591779302d8fc12337571ede5b5ef73f0dfe5f95dc3de26bf0a4b62aabb4"} err="failed to get container status \"26d7591779302d8fc12337571ede5b5ef73f0dfe5f95dc3de26bf0a4b62aabb4\": rpc error: code = NotFound desc = could not find container \"26d7591779302d8fc12337571ede5b5ef73f0dfe5f95dc3de26bf0a4b62aabb4\": container with ID starting with 26d7591779302d8fc12337571ede5b5ef73f0dfe5f95dc3de26bf0a4b62aabb4 not found: ID does not exist" Nov 27 09:04:58 crc kubenswrapper[4971]: I1127 09:04:58.778578 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7771659e-e6ad-48ad-b05e-d868c4e882f5" path="/var/lib/kubelet/pods/7771659e-e6ad-48ad-b05e-d868c4e882f5/volumes" Nov 27 09:04:58 crc kubenswrapper[4971]: I1127 09:04:58.780116 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be8ebf05-06da-4453-a63a-60a42cbe569f" path="/var/lib/kubelet/pods/be8ebf05-06da-4453-a63a-60a42cbe569f/volumes" Nov 27 09:04:59 crc kubenswrapper[4971]: I1127 09:04:59.041146 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-f6m7q"] Nov 27 09:04:59 crc kubenswrapper[4971]: I1127 09:04:59.053276 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-f6m7q"] Nov 27 09:05:00 crc kubenswrapper[4971]: I1127 09:05:00.563909 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4cb90f92-2547-47f8-adc6-7c7033567f90" path="/var/lib/kubelet/pods/4cb90f92-2547-47f8-adc6-7c7033567f90/volumes" Nov 27 09:05:05 crc kubenswrapper[4971]: I1127 09:05:05.550548 4971 scope.go:117] "RemoveContainer" containerID="994a193d8aefca1605e71d1a50a8591da0b0ae972faf08604e8f68555feb1168" Nov 27 09:05:05 crc kubenswrapper[4971]: E1127 09:05:05.551547 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.224690 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89"] Nov 27 09:05:07 crc kubenswrapper[4971]: E1127 09:05:07.225570 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09c00568-2483-43ee-b406-28f3b60dd660" containerName="init" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.225585 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="09c00568-2483-43ee-b406-28f3b60dd660" containerName="init" Nov 27 09:05:07 crc kubenswrapper[4971]: E1127 09:05:07.225610 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09c00568-2483-43ee-b406-28f3b60dd660" containerName="dnsmasq-dns" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.225616 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="09c00568-2483-43ee-b406-28f3b60dd660" containerName="dnsmasq-dns" Nov 27 09:05:07 crc kubenswrapper[4971]: E1127 09:05:07.225634 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7771659e-e6ad-48ad-b05e-d868c4e882f5" containerName="dnsmasq-dns" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.225640 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="7771659e-e6ad-48ad-b05e-d868c4e882f5" containerName="dnsmasq-dns" Nov 27 09:05:07 crc kubenswrapper[4971]: E1127 09:05:07.225655 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7771659e-e6ad-48ad-b05e-d868c4e882f5" containerName="init" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.225661 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="7771659e-e6ad-48ad-b05e-d868c4e882f5" containerName="init" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.225916 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="09c00568-2483-43ee-b406-28f3b60dd660" containerName="dnsmasq-dns" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.225951 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="7771659e-e6ad-48ad-b05e-d868c4e882f5" containerName="dnsmasq-dns" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.226932 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.234900 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.235313 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-jnkbm" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.235518 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.235931 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.246722 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89"] Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.262634 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-nb7djh"] Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.264662 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-nb7djh" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.271984 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-networker" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.274033 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-networker-dockercfg-hbhvx" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.275714 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1dbb8cb6-a78a-4bdd-8508-aa1439f484a9-ceph\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89\" (UID: \"1dbb8cb6-a78a-4bdd-8508-aa1439f484a9\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.275792 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1dbb8cb6-a78a-4bdd-8508-aa1439f484a9-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89\" (UID: \"1dbb8cb6-a78a-4bdd-8508-aa1439f484a9\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.275982 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1dbb8cb6-a78a-4bdd-8508-aa1439f484a9-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89\" (UID: \"1dbb8cb6-a78a-4bdd-8508-aa1439f484a9\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.276138 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmdzp\" (UniqueName: \"kubernetes.io/projected/1dbb8cb6-a78a-4bdd-8508-aa1439f484a9-kube-api-access-wmdzp\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89\" (UID: \"1dbb8cb6-a78a-4bdd-8508-aa1439f484a9\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.276293 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1dbb8cb6-a78a-4bdd-8508-aa1439f484a9-ssh-key\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89\" (UID: \"1dbb8cb6-a78a-4bdd-8508-aa1439f484a9\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.297234 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-nb7djh"] Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.379429 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1dbb8cb6-a78a-4bdd-8508-aa1439f484a9-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89\" (UID: \"1dbb8cb6-a78a-4bdd-8508-aa1439f484a9\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.379583 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1dbb8cb6-a78a-4bdd-8508-aa1439f484a9-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89\" (UID: \"1dbb8cb6-a78a-4bdd-8508-aa1439f484a9\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.379636 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-467zt\" (UniqueName: \"kubernetes.io/projected/2da7d353-1007-422a-aebb-e80704a87fba-kube-api-access-467zt\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-nb7djh\" (UID: \"2da7d353-1007-422a-aebb-e80704a87fba\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-nb7djh" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.379680 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmdzp\" (UniqueName: \"kubernetes.io/projected/1dbb8cb6-a78a-4bdd-8508-aa1439f484a9-kube-api-access-wmdzp\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89\" (UID: \"1dbb8cb6-a78a-4bdd-8508-aa1439f484a9\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.379822 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2da7d353-1007-422a-aebb-e80704a87fba-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-nb7djh\" (UID: \"2da7d353-1007-422a-aebb-e80704a87fba\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-nb7djh" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.380119 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1dbb8cb6-a78a-4bdd-8508-aa1439f484a9-ssh-key\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89\" (UID: \"1dbb8cb6-a78a-4bdd-8508-aa1439f484a9\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.380199 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2da7d353-1007-422a-aebb-e80704a87fba-ssh-key\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-nb7djh\" (UID: \"2da7d353-1007-422a-aebb-e80704a87fba\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-nb7djh" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.380297 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2da7d353-1007-422a-aebb-e80704a87fba-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-nb7djh\" (UID: \"2da7d353-1007-422a-aebb-e80704a87fba\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-nb7djh" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.380483 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1dbb8cb6-a78a-4bdd-8508-aa1439f484a9-ceph\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89\" (UID: \"1dbb8cb6-a78a-4bdd-8508-aa1439f484a9\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.386601 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1dbb8cb6-a78a-4bdd-8508-aa1439f484a9-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89\" (UID: \"1dbb8cb6-a78a-4bdd-8508-aa1439f484a9\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.386706 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1dbb8cb6-a78a-4bdd-8508-aa1439f484a9-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89\" (UID: \"1dbb8cb6-a78a-4bdd-8508-aa1439f484a9\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.395877 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1dbb8cb6-a78a-4bdd-8508-aa1439f484a9-ssh-key\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89\" (UID: \"1dbb8cb6-a78a-4bdd-8508-aa1439f484a9\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.398878 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmdzp\" (UniqueName: \"kubernetes.io/projected/1dbb8cb6-a78a-4bdd-8508-aa1439f484a9-kube-api-access-wmdzp\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89\" (UID: \"1dbb8cb6-a78a-4bdd-8508-aa1439f484a9\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.402078 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1dbb8cb6-a78a-4bdd-8508-aa1439f484a9-ceph\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89\" (UID: \"1dbb8cb6-a78a-4bdd-8508-aa1439f484a9\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.481961 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-467zt\" (UniqueName: \"kubernetes.io/projected/2da7d353-1007-422a-aebb-e80704a87fba-kube-api-access-467zt\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-nb7djh\" (UID: \"2da7d353-1007-422a-aebb-e80704a87fba\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-nb7djh" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.482352 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2da7d353-1007-422a-aebb-e80704a87fba-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-nb7djh\" (UID: \"2da7d353-1007-422a-aebb-e80704a87fba\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-nb7djh" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.482510 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2da7d353-1007-422a-aebb-e80704a87fba-ssh-key\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-nb7djh\" (UID: \"2da7d353-1007-422a-aebb-e80704a87fba\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-nb7djh" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.482654 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2da7d353-1007-422a-aebb-e80704a87fba-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-nb7djh\" (UID: \"2da7d353-1007-422a-aebb-e80704a87fba\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-nb7djh" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.489025 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2da7d353-1007-422a-aebb-e80704a87fba-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-nb7djh\" (UID: \"2da7d353-1007-422a-aebb-e80704a87fba\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-nb7djh" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.490210 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2da7d353-1007-422a-aebb-e80704a87fba-ssh-key\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-nb7djh\" (UID: \"2da7d353-1007-422a-aebb-e80704a87fba\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-nb7djh" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.492092 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2da7d353-1007-422a-aebb-e80704a87fba-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-nb7djh\" (UID: \"2da7d353-1007-422a-aebb-e80704a87fba\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-nb7djh" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.503589 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-467zt\" (UniqueName: \"kubernetes.io/projected/2da7d353-1007-422a-aebb-e80704a87fba-kube-api-access-467zt\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-nb7djh\" (UID: \"2da7d353-1007-422a-aebb-e80704a87fba\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-nb7djh" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.551168 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89" Nov 27 09:05:07 crc kubenswrapper[4971]: I1127 09:05:07.599128 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-nb7djh" Nov 27 09:05:08 crc kubenswrapper[4971]: I1127 09:05:08.162727 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89"] Nov 27 09:05:08 crc kubenswrapper[4971]: I1127 09:05:08.960697 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-nb7djh"] Nov 27 09:05:09 crc kubenswrapper[4971]: I1127 09:05:09.135150 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89" event={"ID":"1dbb8cb6-a78a-4bdd-8508-aa1439f484a9","Type":"ContainerStarted","Data":"ed4c1a333f613d860552213b8f042a729ee31ad53857dfe75ba01b2c29104926"} Nov 27 09:05:09 crc kubenswrapper[4971]: I1127 09:05:09.139919 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-nb7djh" event={"ID":"2da7d353-1007-422a-aebb-e80704a87fba","Type":"ContainerStarted","Data":"3891ad1bfda31f3521ab462cb05ad337322e8850aa4ed3273a70ac236ab35167"} Nov 27 09:05:18 crc kubenswrapper[4971]: I1127 09:05:18.057266 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-zfsxx"] Nov 27 09:05:18 crc kubenswrapper[4971]: I1127 09:05:18.068187 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-zfsxx"] Nov 27 09:05:18 crc kubenswrapper[4971]: I1127 09:05:18.567792 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5bea64ae-37fc-465f-9313-83d1f02236f1" path="/var/lib/kubelet/pods/5bea64ae-37fc-465f-9313-83d1f02236f1/volumes" Nov 27 09:05:20 crc kubenswrapper[4971]: I1127 09:05:20.300636 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-nb7djh" event={"ID":"2da7d353-1007-422a-aebb-e80704a87fba","Type":"ContainerStarted","Data":"738635cf1863c683fdc400a57eadf1e90c5112b2308b881ff7b1a8f6a789f5eb"} Nov 27 09:05:20 crc kubenswrapper[4971]: I1127 09:05:20.315366 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89" event={"ID":"1dbb8cb6-a78a-4bdd-8508-aa1439f484a9","Type":"ContainerStarted","Data":"764fce0e5c28b3ead5aed83a092af2f0913840010ab9574e2860dae1ff33d8f4"} Nov 27 09:05:20 crc kubenswrapper[4971]: I1127 09:05:20.327984 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-nb7djh" podStartSLOduration=3.031126996 podStartE2EDuration="13.327959513s" podCreationTimestamp="2025-11-27 09:05:07 +0000 UTC" firstStartedPulling="2025-11-27 09:05:08.974829831 +0000 UTC m=+7947.166873739" lastFinishedPulling="2025-11-27 09:05:19.271662298 +0000 UTC m=+7957.463706256" observedRunningTime="2025-11-27 09:05:20.323085123 +0000 UTC m=+7958.515129071" watchObservedRunningTime="2025-11-27 09:05:20.327959513 +0000 UTC m=+7958.520003431" Nov 27 09:05:20 crc kubenswrapper[4971]: I1127 09:05:20.347354 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89" podStartSLOduration=2.222583432 podStartE2EDuration="13.347335118s" podCreationTimestamp="2025-11-27 09:05:07 +0000 UTC" firstStartedPulling="2025-11-27 09:05:08.164852716 +0000 UTC m=+7946.356896624" lastFinishedPulling="2025-11-27 09:05:19.289604382 +0000 UTC m=+7957.481648310" observedRunningTime="2025-11-27 09:05:20.342285983 +0000 UTC m=+7958.534329921" watchObservedRunningTime="2025-11-27 09:05:20.347335118 +0000 UTC m=+7958.539379036" Nov 27 09:05:20 crc kubenswrapper[4971]: I1127 09:05:20.550713 4971 scope.go:117] "RemoveContainer" containerID="994a193d8aefca1605e71d1a50a8591da0b0ae972faf08604e8f68555feb1168" Nov 27 09:05:20 crc kubenswrapper[4971]: E1127 09:05:20.551267 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:05:30 crc kubenswrapper[4971]: I1127 09:05:30.445694 4971 generic.go:334] "Generic (PLEG): container finished" podID="2da7d353-1007-422a-aebb-e80704a87fba" containerID="738635cf1863c683fdc400a57eadf1e90c5112b2308b881ff7b1a8f6a789f5eb" exitCode=0 Nov 27 09:05:30 crc kubenswrapper[4971]: I1127 09:05:30.445778 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-nb7djh" event={"ID":"2da7d353-1007-422a-aebb-e80704a87fba","Type":"ContainerDied","Data":"738635cf1863c683fdc400a57eadf1e90c5112b2308b881ff7b1a8f6a789f5eb"} Nov 27 09:05:32 crc kubenswrapper[4971]: I1127 09:05:32.010097 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-nb7djh" Nov 27 09:05:32 crc kubenswrapper[4971]: I1127 09:05:32.095022 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2da7d353-1007-422a-aebb-e80704a87fba-inventory\") pod \"2da7d353-1007-422a-aebb-e80704a87fba\" (UID: \"2da7d353-1007-422a-aebb-e80704a87fba\") " Nov 27 09:05:32 crc kubenswrapper[4971]: I1127 09:05:32.095233 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2da7d353-1007-422a-aebb-e80704a87fba-ssh-key\") pod \"2da7d353-1007-422a-aebb-e80704a87fba\" (UID: \"2da7d353-1007-422a-aebb-e80704a87fba\") " Nov 27 09:05:32 crc kubenswrapper[4971]: I1127 09:05:32.095429 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-467zt\" (UniqueName: \"kubernetes.io/projected/2da7d353-1007-422a-aebb-e80704a87fba-kube-api-access-467zt\") pod \"2da7d353-1007-422a-aebb-e80704a87fba\" (UID: \"2da7d353-1007-422a-aebb-e80704a87fba\") " Nov 27 09:05:32 crc kubenswrapper[4971]: I1127 09:05:32.095598 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2da7d353-1007-422a-aebb-e80704a87fba-pre-adoption-validation-combined-ca-bundle\") pod \"2da7d353-1007-422a-aebb-e80704a87fba\" (UID: \"2da7d353-1007-422a-aebb-e80704a87fba\") " Nov 27 09:05:32 crc kubenswrapper[4971]: I1127 09:05:32.102303 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2da7d353-1007-422a-aebb-e80704a87fba-kube-api-access-467zt" (OuterVolumeSpecName: "kube-api-access-467zt") pod "2da7d353-1007-422a-aebb-e80704a87fba" (UID: "2da7d353-1007-422a-aebb-e80704a87fba"). InnerVolumeSpecName "kube-api-access-467zt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:05:32 crc kubenswrapper[4971]: I1127 09:05:32.103303 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2da7d353-1007-422a-aebb-e80704a87fba-pre-adoption-validation-combined-ca-bundle" (OuterVolumeSpecName: "pre-adoption-validation-combined-ca-bundle") pod "2da7d353-1007-422a-aebb-e80704a87fba" (UID: "2da7d353-1007-422a-aebb-e80704a87fba"). InnerVolumeSpecName "pre-adoption-validation-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:05:32 crc kubenswrapper[4971]: I1127 09:05:32.125306 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2da7d353-1007-422a-aebb-e80704a87fba-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2da7d353-1007-422a-aebb-e80704a87fba" (UID: "2da7d353-1007-422a-aebb-e80704a87fba"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:05:32 crc kubenswrapper[4971]: I1127 09:05:32.125911 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2da7d353-1007-422a-aebb-e80704a87fba-inventory" (OuterVolumeSpecName: "inventory") pod "2da7d353-1007-422a-aebb-e80704a87fba" (UID: "2da7d353-1007-422a-aebb-e80704a87fba"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:05:32 crc kubenswrapper[4971]: I1127 09:05:32.199700 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-467zt\" (UniqueName: \"kubernetes.io/projected/2da7d353-1007-422a-aebb-e80704a87fba-kube-api-access-467zt\") on node \"crc\" DevicePath \"\"" Nov 27 09:05:32 crc kubenswrapper[4971]: I1127 09:05:32.199761 4971 reconciler_common.go:293] "Volume detached for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2da7d353-1007-422a-aebb-e80704a87fba-pre-adoption-validation-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:05:32 crc kubenswrapper[4971]: I1127 09:05:32.199797 4971 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2da7d353-1007-422a-aebb-e80704a87fba-inventory\") on node \"crc\" DevicePath \"\"" Nov 27 09:05:32 crc kubenswrapper[4971]: I1127 09:05:32.199847 4971 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2da7d353-1007-422a-aebb-e80704a87fba-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 27 09:05:32 crc kubenswrapper[4971]: I1127 09:05:32.469252 4971 generic.go:334] "Generic (PLEG): container finished" podID="1dbb8cb6-a78a-4bdd-8508-aa1439f484a9" containerID="764fce0e5c28b3ead5aed83a092af2f0913840010ab9574e2860dae1ff33d8f4" exitCode=0 Nov 27 09:05:32 crc kubenswrapper[4971]: I1127 09:05:32.469692 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89" event={"ID":"1dbb8cb6-a78a-4bdd-8508-aa1439f484a9","Type":"ContainerDied","Data":"764fce0e5c28b3ead5aed83a092af2f0913840010ab9574e2860dae1ff33d8f4"} Nov 27 09:05:32 crc kubenswrapper[4971]: I1127 09:05:32.472700 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-nb7djh" event={"ID":"2da7d353-1007-422a-aebb-e80704a87fba","Type":"ContainerDied","Data":"3891ad1bfda31f3521ab462cb05ad337322e8850aa4ed3273a70ac236ab35167"} Nov 27 09:05:32 crc kubenswrapper[4971]: I1127 09:05:32.472728 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3891ad1bfda31f3521ab462cb05ad337322e8850aa4ed3273a70ac236ab35167" Nov 27 09:05:32 crc kubenswrapper[4971]: I1127 09:05:32.472768 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-nb7djh" Nov 27 09:05:33 crc kubenswrapper[4971]: I1127 09:05:33.997169 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89" Nov 27 09:05:34 crc kubenswrapper[4971]: I1127 09:05:34.149922 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1dbb8cb6-a78a-4bdd-8508-aa1439f484a9-pre-adoption-validation-combined-ca-bundle\") pod \"1dbb8cb6-a78a-4bdd-8508-aa1439f484a9\" (UID: \"1dbb8cb6-a78a-4bdd-8508-aa1439f484a9\") " Nov 27 09:05:34 crc kubenswrapper[4971]: I1127 09:05:34.150163 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1dbb8cb6-a78a-4bdd-8508-aa1439f484a9-inventory\") pod \"1dbb8cb6-a78a-4bdd-8508-aa1439f484a9\" (UID: \"1dbb8cb6-a78a-4bdd-8508-aa1439f484a9\") " Nov 27 09:05:34 crc kubenswrapper[4971]: I1127 09:05:34.150203 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wmdzp\" (UniqueName: \"kubernetes.io/projected/1dbb8cb6-a78a-4bdd-8508-aa1439f484a9-kube-api-access-wmdzp\") pod \"1dbb8cb6-a78a-4bdd-8508-aa1439f484a9\" (UID: \"1dbb8cb6-a78a-4bdd-8508-aa1439f484a9\") " Nov 27 09:05:34 crc kubenswrapper[4971]: I1127 09:05:34.150287 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1dbb8cb6-a78a-4bdd-8508-aa1439f484a9-ceph\") pod \"1dbb8cb6-a78a-4bdd-8508-aa1439f484a9\" (UID: \"1dbb8cb6-a78a-4bdd-8508-aa1439f484a9\") " Nov 27 09:05:34 crc kubenswrapper[4971]: I1127 09:05:34.150319 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1dbb8cb6-a78a-4bdd-8508-aa1439f484a9-ssh-key\") pod \"1dbb8cb6-a78a-4bdd-8508-aa1439f484a9\" (UID: \"1dbb8cb6-a78a-4bdd-8508-aa1439f484a9\") " Nov 27 09:05:34 crc kubenswrapper[4971]: I1127 09:05:34.157133 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1dbb8cb6-a78a-4bdd-8508-aa1439f484a9-pre-adoption-validation-combined-ca-bundle" (OuterVolumeSpecName: "pre-adoption-validation-combined-ca-bundle") pod "1dbb8cb6-a78a-4bdd-8508-aa1439f484a9" (UID: "1dbb8cb6-a78a-4bdd-8508-aa1439f484a9"). InnerVolumeSpecName "pre-adoption-validation-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:05:34 crc kubenswrapper[4971]: I1127 09:05:34.157173 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1dbb8cb6-a78a-4bdd-8508-aa1439f484a9-ceph" (OuterVolumeSpecName: "ceph") pod "1dbb8cb6-a78a-4bdd-8508-aa1439f484a9" (UID: "1dbb8cb6-a78a-4bdd-8508-aa1439f484a9"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:05:34 crc kubenswrapper[4971]: I1127 09:05:34.157874 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1dbb8cb6-a78a-4bdd-8508-aa1439f484a9-kube-api-access-wmdzp" (OuterVolumeSpecName: "kube-api-access-wmdzp") pod "1dbb8cb6-a78a-4bdd-8508-aa1439f484a9" (UID: "1dbb8cb6-a78a-4bdd-8508-aa1439f484a9"). InnerVolumeSpecName "kube-api-access-wmdzp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:05:34 crc kubenswrapper[4971]: I1127 09:05:34.180765 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1dbb8cb6-a78a-4bdd-8508-aa1439f484a9-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "1dbb8cb6-a78a-4bdd-8508-aa1439f484a9" (UID: "1dbb8cb6-a78a-4bdd-8508-aa1439f484a9"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:05:34 crc kubenswrapper[4971]: I1127 09:05:34.200724 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1dbb8cb6-a78a-4bdd-8508-aa1439f484a9-inventory" (OuterVolumeSpecName: "inventory") pod "1dbb8cb6-a78a-4bdd-8508-aa1439f484a9" (UID: "1dbb8cb6-a78a-4bdd-8508-aa1439f484a9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:05:34 crc kubenswrapper[4971]: I1127 09:05:34.253636 4971 reconciler_common.go:293] "Volume detached for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1dbb8cb6-a78a-4bdd-8508-aa1439f484a9-pre-adoption-validation-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:05:34 crc kubenswrapper[4971]: I1127 09:05:34.253690 4971 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1dbb8cb6-a78a-4bdd-8508-aa1439f484a9-inventory\") on node \"crc\" DevicePath \"\"" Nov 27 09:05:34 crc kubenswrapper[4971]: I1127 09:05:34.253707 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wmdzp\" (UniqueName: \"kubernetes.io/projected/1dbb8cb6-a78a-4bdd-8508-aa1439f484a9-kube-api-access-wmdzp\") on node \"crc\" DevicePath \"\"" Nov 27 09:05:34 crc kubenswrapper[4971]: I1127 09:05:34.253723 4971 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1dbb8cb6-a78a-4bdd-8508-aa1439f484a9-ceph\") on node \"crc\" DevicePath \"\"" Nov 27 09:05:34 crc kubenswrapper[4971]: I1127 09:05:34.253735 4971 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1dbb8cb6-a78a-4bdd-8508-aa1439f484a9-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 27 09:05:34 crc kubenswrapper[4971]: I1127 09:05:34.499256 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89" event={"ID":"1dbb8cb6-a78a-4bdd-8508-aa1439f484a9","Type":"ContainerDied","Data":"ed4c1a333f613d860552213b8f042a729ee31ad53857dfe75ba01b2c29104926"} Nov 27 09:05:34 crc kubenswrapper[4971]: I1127 09:05:34.499311 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ed4c1a333f613d860552213b8f042a729ee31ad53857dfe75ba01b2c29104926" Nov 27 09:05:34 crc kubenswrapper[4971]: I1127 09:05:34.499417 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89" Nov 27 09:05:35 crc kubenswrapper[4971]: I1127 09:05:35.552435 4971 scope.go:117] "RemoveContainer" containerID="994a193d8aefca1605e71d1a50a8591da0b0ae972faf08604e8f68555feb1168" Nov 27 09:05:35 crc kubenswrapper[4971]: E1127 09:05:35.553512 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.041349 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd"] Nov 27 09:05:40 crc kubenswrapper[4971]: E1127 09:05:40.042708 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1dbb8cb6-a78a-4bdd-8508-aa1439f484a9" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.042728 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="1dbb8cb6-a78a-4bdd-8508-aa1439f484a9" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Nov 27 09:05:40 crc kubenswrapper[4971]: E1127 09:05:40.042757 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2da7d353-1007-422a-aebb-e80704a87fba" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-networ" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.042769 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="2da7d353-1007-422a-aebb-e80704a87fba" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-networ" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.044575 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="2da7d353-1007-422a-aebb-e80704a87fba" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-networ" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.044640 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="1dbb8cb6-a78a-4bdd-8508-aa1439f484a9" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.045692 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.052933 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-6j8mr"] Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.055079 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-6j8mr" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.061895 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.069507 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-jnkbm" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.069651 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.069687 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-networker" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.069525 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd"] Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.069874 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.069905 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-networker-dockercfg-hbhvx" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.078973 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-6j8mr"] Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.118451 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qhwlc\" (UniqueName: \"kubernetes.io/projected/f3bbe788-f3b6-4502-b497-d9aa6937ae74-kube-api-access-qhwlc\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-networker-6j8mr\" (UID: \"f3bbe788-f3b6-4502-b497-d9aa6937ae74\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-6j8mr" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.118496 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ea8298a5-6819-4be7-a1d9-1af470a81021-ceph\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd\" (UID: \"ea8298a5-6819-4be7-a1d9-1af470a81021\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.118709 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f3bbe788-f3b6-4502-b497-d9aa6937ae74-ssh-key\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-networker-6j8mr\" (UID: \"f3bbe788-f3b6-4502-b497-d9aa6937ae74\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-6j8mr" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.118814 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45qt8\" (UniqueName: \"kubernetes.io/projected/ea8298a5-6819-4be7-a1d9-1af470a81021-kube-api-access-45qt8\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd\" (UID: \"ea8298a5-6819-4be7-a1d9-1af470a81021\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.118837 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f3bbe788-f3b6-4502-b497-d9aa6937ae74-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-networker-6j8mr\" (UID: \"f3bbe788-f3b6-4502-b497-d9aa6937ae74\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-6j8mr" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.118892 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea8298a5-6819-4be7-a1d9-1af470a81021-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd\" (UID: \"ea8298a5-6819-4be7-a1d9-1af470a81021\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.118986 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ea8298a5-6819-4be7-a1d9-1af470a81021-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd\" (UID: \"ea8298a5-6819-4be7-a1d9-1af470a81021\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.119056 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ea8298a5-6819-4be7-a1d9-1af470a81021-ssh-key\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd\" (UID: \"ea8298a5-6819-4be7-a1d9-1af470a81021\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.119230 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3bbe788-f3b6-4502-b497-d9aa6937ae74-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-networker-6j8mr\" (UID: \"f3bbe788-f3b6-4502-b497-d9aa6937ae74\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-6j8mr" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.221286 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3bbe788-f3b6-4502-b497-d9aa6937ae74-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-networker-6j8mr\" (UID: \"f3bbe788-f3b6-4502-b497-d9aa6937ae74\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-6j8mr" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.221354 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qhwlc\" (UniqueName: \"kubernetes.io/projected/f3bbe788-f3b6-4502-b497-d9aa6937ae74-kube-api-access-qhwlc\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-networker-6j8mr\" (UID: \"f3bbe788-f3b6-4502-b497-d9aa6937ae74\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-6j8mr" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.221393 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ea8298a5-6819-4be7-a1d9-1af470a81021-ceph\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd\" (UID: \"ea8298a5-6819-4be7-a1d9-1af470a81021\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.221497 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f3bbe788-f3b6-4502-b497-d9aa6937ae74-ssh-key\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-networker-6j8mr\" (UID: \"f3bbe788-f3b6-4502-b497-d9aa6937ae74\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-6j8mr" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.221565 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45qt8\" (UniqueName: \"kubernetes.io/projected/ea8298a5-6819-4be7-a1d9-1af470a81021-kube-api-access-45qt8\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd\" (UID: \"ea8298a5-6819-4be7-a1d9-1af470a81021\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.221595 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f3bbe788-f3b6-4502-b497-d9aa6937ae74-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-networker-6j8mr\" (UID: \"f3bbe788-f3b6-4502-b497-d9aa6937ae74\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-6j8mr" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.221633 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea8298a5-6819-4be7-a1d9-1af470a81021-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd\" (UID: \"ea8298a5-6819-4be7-a1d9-1af470a81021\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.221752 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ea8298a5-6819-4be7-a1d9-1af470a81021-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd\" (UID: \"ea8298a5-6819-4be7-a1d9-1af470a81021\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.221812 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ea8298a5-6819-4be7-a1d9-1af470a81021-ssh-key\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd\" (UID: \"ea8298a5-6819-4be7-a1d9-1af470a81021\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.227962 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ea8298a5-6819-4be7-a1d9-1af470a81021-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd\" (UID: \"ea8298a5-6819-4be7-a1d9-1af470a81021\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.228172 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea8298a5-6819-4be7-a1d9-1af470a81021-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd\" (UID: \"ea8298a5-6819-4be7-a1d9-1af470a81021\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.228460 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3bbe788-f3b6-4502-b497-d9aa6937ae74-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-networker-6j8mr\" (UID: \"f3bbe788-f3b6-4502-b497-d9aa6937ae74\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-6j8mr" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.228658 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f3bbe788-f3b6-4502-b497-d9aa6937ae74-ssh-key\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-networker-6j8mr\" (UID: \"f3bbe788-f3b6-4502-b497-d9aa6937ae74\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-6j8mr" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.230085 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ea8298a5-6819-4be7-a1d9-1af470a81021-ssh-key\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd\" (UID: \"ea8298a5-6819-4be7-a1d9-1af470a81021\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.230918 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ea8298a5-6819-4be7-a1d9-1af470a81021-ceph\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd\" (UID: \"ea8298a5-6819-4be7-a1d9-1af470a81021\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.236369 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qhwlc\" (UniqueName: \"kubernetes.io/projected/f3bbe788-f3b6-4502-b497-d9aa6937ae74-kube-api-access-qhwlc\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-networker-6j8mr\" (UID: \"f3bbe788-f3b6-4502-b497-d9aa6937ae74\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-6j8mr" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.238333 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f3bbe788-f3b6-4502-b497-d9aa6937ae74-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-networker-6j8mr\" (UID: \"f3bbe788-f3b6-4502-b497-d9aa6937ae74\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-6j8mr" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.244289 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45qt8\" (UniqueName: \"kubernetes.io/projected/ea8298a5-6819-4be7-a1d9-1af470a81021-kube-api-access-45qt8\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd\" (UID: \"ea8298a5-6819-4be7-a1d9-1af470a81021\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.392232 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd" Nov 27 09:05:40 crc kubenswrapper[4971]: I1127 09:05:40.408616 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-6j8mr" Nov 27 09:05:41 crc kubenswrapper[4971]: I1127 09:05:41.044386 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd"] Nov 27 09:05:41 crc kubenswrapper[4971]: I1127 09:05:41.177430 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-6j8mr"] Nov 27 09:05:41 crc kubenswrapper[4971]: I1127 09:05:41.586018 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd" event={"ID":"ea8298a5-6819-4be7-a1d9-1af470a81021","Type":"ContainerStarted","Data":"7bd643b13cc4af49985312cdbae70261da24f69456472e8b43fa421a544b0913"} Nov 27 09:05:41 crc kubenswrapper[4971]: I1127 09:05:41.587572 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-6j8mr" event={"ID":"f3bbe788-f3b6-4502-b497-d9aa6937ae74","Type":"ContainerStarted","Data":"9b56f04aee9f2979a492e1d72eda2bce0ac7f719c53199e091f55293b3af11c5"} Nov 27 09:05:42 crc kubenswrapper[4971]: I1127 09:05:42.601590 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-6j8mr" event={"ID":"f3bbe788-f3b6-4502-b497-d9aa6937ae74","Type":"ContainerStarted","Data":"73829276cbd766b978e41cb1bec0cf4c1135ed1b697e15c6e1536424b781c561"} Nov 27 09:05:42 crc kubenswrapper[4971]: I1127 09:05:42.611167 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd" event={"ID":"ea8298a5-6819-4be7-a1d9-1af470a81021","Type":"ContainerStarted","Data":"51990f92580c866971e8beb94e4862bcddb43aee1b2755f05bfd1335e3085026"} Nov 27 09:05:42 crc kubenswrapper[4971]: I1127 09:05:42.642659 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-6j8mr" podStartSLOduration=1.984078157 podStartE2EDuration="2.642637122s" podCreationTimestamp="2025-11-27 09:05:40 +0000 UTC" firstStartedPulling="2025-11-27 09:05:41.165141587 +0000 UTC m=+7979.357185505" lastFinishedPulling="2025-11-27 09:05:41.823700552 +0000 UTC m=+7980.015744470" observedRunningTime="2025-11-27 09:05:42.632222004 +0000 UTC m=+7980.824265922" watchObservedRunningTime="2025-11-27 09:05:42.642637122 +0000 UTC m=+7980.834681040" Nov 27 09:05:42 crc kubenswrapper[4971]: I1127 09:05:42.672280 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd" podStartSLOduration=2.160410311 podStartE2EDuration="2.67222616s" podCreationTimestamp="2025-11-27 09:05:40 +0000 UTC" firstStartedPulling="2025-11-27 09:05:41.060351343 +0000 UTC m=+7979.252395261" lastFinishedPulling="2025-11-27 09:05:41.572167192 +0000 UTC m=+7979.764211110" observedRunningTime="2025-11-27 09:05:42.652830404 +0000 UTC m=+7980.844874342" watchObservedRunningTime="2025-11-27 09:05:42.67222616 +0000 UTC m=+7980.864270078" Nov 27 09:05:48 crc kubenswrapper[4971]: I1127 09:05:48.550921 4971 scope.go:117] "RemoveContainer" containerID="994a193d8aefca1605e71d1a50a8591da0b0ae972faf08604e8f68555feb1168" Nov 27 09:05:48 crc kubenswrapper[4971]: E1127 09:05:48.552092 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:05:56 crc kubenswrapper[4971]: I1127 09:05:56.714976 4971 scope.go:117] "RemoveContainer" containerID="e842f2ce0f923c6341eb67d260fd16876582af19ecbe87c4db886e8fda5a6d84" Nov 27 09:05:56 crc kubenswrapper[4971]: I1127 09:05:56.750917 4971 scope.go:117] "RemoveContainer" containerID="dfb1afa4f1ab9e1635b8900422da78d85a3b7c2515e7be13b7f15cb1a1a60cd8" Nov 27 09:05:56 crc kubenswrapper[4971]: I1127 09:05:56.799866 4971 scope.go:117] "RemoveContainer" containerID="16a37aa11cc69085e1f1d0086e25071c7aa52d629323f75d4e6a60c2876ff5d6" Nov 27 09:05:56 crc kubenswrapper[4971]: I1127 09:05:56.868435 4971 scope.go:117] "RemoveContainer" containerID="1419e8c3813c3f1f54d513cf61c79e8c86406d1c6fdfc6d27d1ad512294f5e94" Nov 27 09:05:57 crc kubenswrapper[4971]: I1127 09:05:57.091946 4971 scope.go:117] "RemoveContainer" containerID="196658c0cde7d522b136ba2b922f7123a780e5b94965ec4244d4a8f6a129d110" Nov 27 09:06:01 crc kubenswrapper[4971]: I1127 09:06:01.057090 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-87c3-account-create-update-wwwnv"] Nov 27 09:06:01 crc kubenswrapper[4971]: I1127 09:06:01.072844 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-87c3-account-create-update-wwwnv"] Nov 27 09:06:01 crc kubenswrapper[4971]: I1127 09:06:01.085778 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-t7qgx"] Nov 27 09:06:01 crc kubenswrapper[4971]: I1127 09:06:01.098878 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-t7qgx"] Nov 27 09:06:02 crc kubenswrapper[4971]: I1127 09:06:02.557204 4971 scope.go:117] "RemoveContainer" containerID="994a193d8aefca1605e71d1a50a8591da0b0ae972faf08604e8f68555feb1168" Nov 27 09:06:02 crc kubenswrapper[4971]: E1127 09:06:02.558017 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:06:02 crc kubenswrapper[4971]: I1127 09:06:02.564944 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="adaff279-cded-4a90-9cb8-d6e29cc2b11e" path="/var/lib/kubelet/pods/adaff279-cded-4a90-9cb8-d6e29cc2b11e/volumes" Nov 27 09:06:02 crc kubenswrapper[4971]: I1127 09:06:02.565552 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2d8103f-0a96-40d7-afe9-2d87072e305c" path="/var/lib/kubelet/pods/c2d8103f-0a96-40d7-afe9-2d87072e305c/volumes" Nov 27 09:06:16 crc kubenswrapper[4971]: I1127 09:06:16.557481 4971 scope.go:117] "RemoveContainer" containerID="994a193d8aefca1605e71d1a50a8591da0b0ae972faf08604e8f68555feb1168" Nov 27 09:06:16 crc kubenswrapper[4971]: E1127 09:06:16.563246 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:06:26 crc kubenswrapper[4971]: I1127 09:06:26.040597 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-hxk8s"] Nov 27 09:06:26 crc kubenswrapper[4971]: I1127 09:06:26.053474 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-hxk8s"] Nov 27 09:06:26 crc kubenswrapper[4971]: I1127 09:06:26.568727 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd" path="/var/lib/kubelet/pods/d7b1abd6-ac27-47ca-ae0b-f7b5cf8f9dfd/volumes" Nov 27 09:06:29 crc kubenswrapper[4971]: I1127 09:06:29.552135 4971 scope.go:117] "RemoveContainer" containerID="994a193d8aefca1605e71d1a50a8591da0b0ae972faf08604e8f68555feb1168" Nov 27 09:06:29 crc kubenswrapper[4971]: E1127 09:06:29.553720 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:06:41 crc kubenswrapper[4971]: I1127 09:06:41.550902 4971 scope.go:117] "RemoveContainer" containerID="994a193d8aefca1605e71d1a50a8591da0b0ae972faf08604e8f68555feb1168" Nov 27 09:06:41 crc kubenswrapper[4971]: E1127 09:06:41.551761 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:06:56 crc kubenswrapper[4971]: I1127 09:06:56.551480 4971 scope.go:117] "RemoveContainer" containerID="994a193d8aefca1605e71d1a50a8591da0b0ae972faf08604e8f68555feb1168" Nov 27 09:06:57 crc kubenswrapper[4971]: I1127 09:06:57.248676 4971 scope.go:117] "RemoveContainer" containerID="ce2a5274d731d2ead02a7bc5663e52e170b92ce82f26bd63930a767849a94564" Nov 27 09:06:57 crc kubenswrapper[4971]: I1127 09:06:57.273147 4971 scope.go:117] "RemoveContainer" containerID="79e1cff6761b5ec238e16d9670cf9df84497397534cd528b4b9c76fbd10352bb" Nov 27 09:06:57 crc kubenswrapper[4971]: I1127 09:06:57.334367 4971 scope.go:117] "RemoveContainer" containerID="10577e750b4cd111d9891dc9d5b4720c0e5bd96f687fa9efca1a269f543ca1b0" Nov 27 09:06:57 crc kubenswrapper[4971]: I1127 09:06:57.483853 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"a6e0ac0b7d8b5de8ba640136d7618fa0300e44b75a6d4515b5a7d9cedb252962"} Nov 27 09:07:43 crc kubenswrapper[4971]: I1127 09:07:43.840462 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-kbdvc"] Nov 27 09:07:43 crc kubenswrapper[4971]: I1127 09:07:43.844140 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kbdvc" Nov 27 09:07:43 crc kubenswrapper[4971]: I1127 09:07:43.853905 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kbdvc"] Nov 27 09:07:43 crc kubenswrapper[4971]: I1127 09:07:43.997145 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76a354b8-798d-4b51-bc38-06bc9bb516c3-utilities\") pod \"community-operators-kbdvc\" (UID: \"76a354b8-798d-4b51-bc38-06bc9bb516c3\") " pod="openshift-marketplace/community-operators-kbdvc" Nov 27 09:07:43 crc kubenswrapper[4971]: I1127 09:07:43.997551 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76a354b8-798d-4b51-bc38-06bc9bb516c3-catalog-content\") pod \"community-operators-kbdvc\" (UID: \"76a354b8-798d-4b51-bc38-06bc9bb516c3\") " pod="openshift-marketplace/community-operators-kbdvc" Nov 27 09:07:43 crc kubenswrapper[4971]: I1127 09:07:43.997599 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qtpc7\" (UniqueName: \"kubernetes.io/projected/76a354b8-798d-4b51-bc38-06bc9bb516c3-kube-api-access-qtpc7\") pod \"community-operators-kbdvc\" (UID: \"76a354b8-798d-4b51-bc38-06bc9bb516c3\") " pod="openshift-marketplace/community-operators-kbdvc" Nov 27 09:07:44 crc kubenswrapper[4971]: I1127 09:07:44.099796 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76a354b8-798d-4b51-bc38-06bc9bb516c3-catalog-content\") pod \"community-operators-kbdvc\" (UID: \"76a354b8-798d-4b51-bc38-06bc9bb516c3\") " pod="openshift-marketplace/community-operators-kbdvc" Nov 27 09:07:44 crc kubenswrapper[4971]: I1127 09:07:44.099915 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qtpc7\" (UniqueName: \"kubernetes.io/projected/76a354b8-798d-4b51-bc38-06bc9bb516c3-kube-api-access-qtpc7\") pod \"community-operators-kbdvc\" (UID: \"76a354b8-798d-4b51-bc38-06bc9bb516c3\") " pod="openshift-marketplace/community-operators-kbdvc" Nov 27 09:07:44 crc kubenswrapper[4971]: I1127 09:07:44.100071 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76a354b8-798d-4b51-bc38-06bc9bb516c3-utilities\") pod \"community-operators-kbdvc\" (UID: \"76a354b8-798d-4b51-bc38-06bc9bb516c3\") " pod="openshift-marketplace/community-operators-kbdvc" Nov 27 09:07:44 crc kubenswrapper[4971]: I1127 09:07:44.100386 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76a354b8-798d-4b51-bc38-06bc9bb516c3-catalog-content\") pod \"community-operators-kbdvc\" (UID: \"76a354b8-798d-4b51-bc38-06bc9bb516c3\") " pod="openshift-marketplace/community-operators-kbdvc" Nov 27 09:07:44 crc kubenswrapper[4971]: I1127 09:07:44.100664 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76a354b8-798d-4b51-bc38-06bc9bb516c3-utilities\") pod \"community-operators-kbdvc\" (UID: \"76a354b8-798d-4b51-bc38-06bc9bb516c3\") " pod="openshift-marketplace/community-operators-kbdvc" Nov 27 09:07:44 crc kubenswrapper[4971]: I1127 09:07:44.123292 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qtpc7\" (UniqueName: \"kubernetes.io/projected/76a354b8-798d-4b51-bc38-06bc9bb516c3-kube-api-access-qtpc7\") pod \"community-operators-kbdvc\" (UID: \"76a354b8-798d-4b51-bc38-06bc9bb516c3\") " pod="openshift-marketplace/community-operators-kbdvc" Nov 27 09:07:44 crc kubenswrapper[4971]: I1127 09:07:44.169593 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kbdvc" Nov 27 09:07:44 crc kubenswrapper[4971]: I1127 09:07:44.683122 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kbdvc"] Nov 27 09:07:45 crc kubenswrapper[4971]: I1127 09:07:45.009493 4971 generic.go:334] "Generic (PLEG): container finished" podID="76a354b8-798d-4b51-bc38-06bc9bb516c3" containerID="91998d6b80a8d7f0308c6ffd030fa2633de4d68ec10b36ac7334ed78d7c56665" exitCode=0 Nov 27 09:07:45 crc kubenswrapper[4971]: I1127 09:07:45.009587 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kbdvc" event={"ID":"76a354b8-798d-4b51-bc38-06bc9bb516c3","Type":"ContainerDied","Data":"91998d6b80a8d7f0308c6ffd030fa2633de4d68ec10b36ac7334ed78d7c56665"} Nov 27 09:07:45 crc kubenswrapper[4971]: I1127 09:07:45.009927 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kbdvc" event={"ID":"76a354b8-798d-4b51-bc38-06bc9bb516c3","Type":"ContainerStarted","Data":"f6e799c8595a6cd20bf9ed354fee159b4c777264c45d19e67b5e7c15fdef030b"} Nov 27 09:07:45 crc kubenswrapper[4971]: I1127 09:07:45.011838 4971 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 27 09:07:47 crc kubenswrapper[4971]: I1127 09:07:47.032682 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kbdvc" event={"ID":"76a354b8-798d-4b51-bc38-06bc9bb516c3","Type":"ContainerStarted","Data":"007c3f29c47765515272633cc5fa9e1ad4ef8a9aaa45d21e35ac53e629d4fc8f"} Nov 27 09:07:48 crc kubenswrapper[4971]: I1127 09:07:48.051842 4971 generic.go:334] "Generic (PLEG): container finished" podID="76a354b8-798d-4b51-bc38-06bc9bb516c3" containerID="007c3f29c47765515272633cc5fa9e1ad4ef8a9aaa45d21e35ac53e629d4fc8f" exitCode=0 Nov 27 09:07:48 crc kubenswrapper[4971]: I1127 09:07:48.051945 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kbdvc" event={"ID":"76a354b8-798d-4b51-bc38-06bc9bb516c3","Type":"ContainerDied","Data":"007c3f29c47765515272633cc5fa9e1ad4ef8a9aaa45d21e35ac53e629d4fc8f"} Nov 27 09:07:50 crc kubenswrapper[4971]: I1127 09:07:50.077865 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kbdvc" event={"ID":"76a354b8-798d-4b51-bc38-06bc9bb516c3","Type":"ContainerStarted","Data":"5226d999272887ce96a4ed1a0e3d5f01c9dc7c78c685ad969136982a3b47bd3f"} Nov 27 09:07:54 crc kubenswrapper[4971]: I1127 09:07:54.169874 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-kbdvc" Nov 27 09:07:54 crc kubenswrapper[4971]: I1127 09:07:54.170457 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-kbdvc" Nov 27 09:07:54 crc kubenswrapper[4971]: I1127 09:07:54.218056 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-kbdvc" Nov 27 09:07:54 crc kubenswrapper[4971]: I1127 09:07:54.242525 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-kbdvc" podStartSLOduration=6.95267856 podStartE2EDuration="11.242506291s" podCreationTimestamp="2025-11-27 09:07:43 +0000 UTC" firstStartedPulling="2025-11-27 09:07:45.011408179 +0000 UTC m=+8103.203452097" lastFinishedPulling="2025-11-27 09:07:49.30123589 +0000 UTC m=+8107.493279828" observedRunningTime="2025-11-27 09:07:50.106121919 +0000 UTC m=+8108.298165847" watchObservedRunningTime="2025-11-27 09:07:54.242506291 +0000 UTC m=+8112.434550209" Nov 27 09:07:55 crc kubenswrapper[4971]: I1127 09:07:55.177951 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-kbdvc" Nov 27 09:07:55 crc kubenswrapper[4971]: I1127 09:07:55.232940 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kbdvc"] Nov 27 09:07:57 crc kubenswrapper[4971]: I1127 09:07:57.150942 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-kbdvc" podUID="76a354b8-798d-4b51-bc38-06bc9bb516c3" containerName="registry-server" containerID="cri-o://5226d999272887ce96a4ed1a0e3d5f01c9dc7c78c685ad969136982a3b47bd3f" gracePeriod=2 Nov 27 09:07:59 crc kubenswrapper[4971]: I1127 09:07:59.179501 4971 generic.go:334] "Generic (PLEG): container finished" podID="76a354b8-798d-4b51-bc38-06bc9bb516c3" containerID="5226d999272887ce96a4ed1a0e3d5f01c9dc7c78c685ad969136982a3b47bd3f" exitCode=0 Nov 27 09:07:59 crc kubenswrapper[4971]: I1127 09:07:59.180279 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kbdvc" event={"ID":"76a354b8-798d-4b51-bc38-06bc9bb516c3","Type":"ContainerDied","Data":"5226d999272887ce96a4ed1a0e3d5f01c9dc7c78c685ad969136982a3b47bd3f"} Nov 27 09:07:59 crc kubenswrapper[4971]: I1127 09:07:59.396326 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kbdvc" Nov 27 09:07:59 crc kubenswrapper[4971]: I1127 09:07:59.595975 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76a354b8-798d-4b51-bc38-06bc9bb516c3-catalog-content\") pod \"76a354b8-798d-4b51-bc38-06bc9bb516c3\" (UID: \"76a354b8-798d-4b51-bc38-06bc9bb516c3\") " Nov 27 09:07:59 crc kubenswrapper[4971]: I1127 09:07:59.596264 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76a354b8-798d-4b51-bc38-06bc9bb516c3-utilities\") pod \"76a354b8-798d-4b51-bc38-06bc9bb516c3\" (UID: \"76a354b8-798d-4b51-bc38-06bc9bb516c3\") " Nov 27 09:07:59 crc kubenswrapper[4971]: I1127 09:07:59.596307 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qtpc7\" (UniqueName: \"kubernetes.io/projected/76a354b8-798d-4b51-bc38-06bc9bb516c3-kube-api-access-qtpc7\") pod \"76a354b8-798d-4b51-bc38-06bc9bb516c3\" (UID: \"76a354b8-798d-4b51-bc38-06bc9bb516c3\") " Nov 27 09:07:59 crc kubenswrapper[4971]: I1127 09:07:59.596998 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/76a354b8-798d-4b51-bc38-06bc9bb516c3-utilities" (OuterVolumeSpecName: "utilities") pod "76a354b8-798d-4b51-bc38-06bc9bb516c3" (UID: "76a354b8-798d-4b51-bc38-06bc9bb516c3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:07:59 crc kubenswrapper[4971]: I1127 09:07:59.598801 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76a354b8-798d-4b51-bc38-06bc9bb516c3-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 09:07:59 crc kubenswrapper[4971]: I1127 09:07:59.603814 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76a354b8-798d-4b51-bc38-06bc9bb516c3-kube-api-access-qtpc7" (OuterVolumeSpecName: "kube-api-access-qtpc7") pod "76a354b8-798d-4b51-bc38-06bc9bb516c3" (UID: "76a354b8-798d-4b51-bc38-06bc9bb516c3"). InnerVolumeSpecName "kube-api-access-qtpc7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:07:59 crc kubenswrapper[4971]: I1127 09:07:59.649908 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/76a354b8-798d-4b51-bc38-06bc9bb516c3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "76a354b8-798d-4b51-bc38-06bc9bb516c3" (UID: "76a354b8-798d-4b51-bc38-06bc9bb516c3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:07:59 crc kubenswrapper[4971]: I1127 09:07:59.699948 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76a354b8-798d-4b51-bc38-06bc9bb516c3-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 09:07:59 crc kubenswrapper[4971]: I1127 09:07:59.699986 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qtpc7\" (UniqueName: \"kubernetes.io/projected/76a354b8-798d-4b51-bc38-06bc9bb516c3-kube-api-access-qtpc7\") on node \"crc\" DevicePath \"\"" Nov 27 09:08:00 crc kubenswrapper[4971]: I1127 09:08:00.201790 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kbdvc" event={"ID":"76a354b8-798d-4b51-bc38-06bc9bb516c3","Type":"ContainerDied","Data":"f6e799c8595a6cd20bf9ed354fee159b4c777264c45d19e67b5e7c15fdef030b"} Nov 27 09:08:00 crc kubenswrapper[4971]: I1127 09:08:00.202127 4971 scope.go:117] "RemoveContainer" containerID="5226d999272887ce96a4ed1a0e3d5f01c9dc7c78c685ad969136982a3b47bd3f" Nov 27 09:08:00 crc kubenswrapper[4971]: I1127 09:08:00.201867 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kbdvc" Nov 27 09:08:00 crc kubenswrapper[4971]: I1127 09:08:00.244819 4971 scope.go:117] "RemoveContainer" containerID="007c3f29c47765515272633cc5fa9e1ad4ef8a9aaa45d21e35ac53e629d4fc8f" Nov 27 09:08:00 crc kubenswrapper[4971]: I1127 09:08:00.253346 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kbdvc"] Nov 27 09:08:00 crc kubenswrapper[4971]: I1127 09:08:00.269377 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-kbdvc"] Nov 27 09:08:00 crc kubenswrapper[4971]: I1127 09:08:00.275702 4971 scope.go:117] "RemoveContainer" containerID="91998d6b80a8d7f0308c6ffd030fa2633de4d68ec10b36ac7334ed78d7c56665" Nov 27 09:08:00 crc kubenswrapper[4971]: I1127 09:08:00.561863 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76a354b8-798d-4b51-bc38-06bc9bb516c3" path="/var/lib/kubelet/pods/76a354b8-798d-4b51-bc38-06bc9bb516c3/volumes" Nov 27 09:08:01 crc kubenswrapper[4971]: I1127 09:08:01.643679 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-8qnwq"] Nov 27 09:08:01 crc kubenswrapper[4971]: E1127 09:08:01.644644 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76a354b8-798d-4b51-bc38-06bc9bb516c3" containerName="extract-utilities" Nov 27 09:08:01 crc kubenswrapper[4971]: I1127 09:08:01.644664 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="76a354b8-798d-4b51-bc38-06bc9bb516c3" containerName="extract-utilities" Nov 27 09:08:01 crc kubenswrapper[4971]: E1127 09:08:01.644715 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76a354b8-798d-4b51-bc38-06bc9bb516c3" containerName="extract-content" Nov 27 09:08:01 crc kubenswrapper[4971]: I1127 09:08:01.644724 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="76a354b8-798d-4b51-bc38-06bc9bb516c3" containerName="extract-content" Nov 27 09:08:01 crc kubenswrapper[4971]: E1127 09:08:01.644754 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76a354b8-798d-4b51-bc38-06bc9bb516c3" containerName="registry-server" Nov 27 09:08:01 crc kubenswrapper[4971]: I1127 09:08:01.644763 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="76a354b8-798d-4b51-bc38-06bc9bb516c3" containerName="registry-server" Nov 27 09:08:01 crc kubenswrapper[4971]: I1127 09:08:01.645048 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="76a354b8-798d-4b51-bc38-06bc9bb516c3" containerName="registry-server" Nov 27 09:08:01 crc kubenswrapper[4971]: I1127 09:08:01.646897 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8qnwq" Nov 27 09:08:01 crc kubenswrapper[4971]: I1127 09:08:01.657120 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8qnwq"] Nov 27 09:08:01 crc kubenswrapper[4971]: I1127 09:08:01.744577 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61fcbc06-3bea-4954-9c9b-572bb4b9d61d-catalog-content\") pod \"redhat-marketplace-8qnwq\" (UID: \"61fcbc06-3bea-4954-9c9b-572bb4b9d61d\") " pod="openshift-marketplace/redhat-marketplace-8qnwq" Nov 27 09:08:01 crc kubenswrapper[4971]: I1127 09:08:01.744674 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61fcbc06-3bea-4954-9c9b-572bb4b9d61d-utilities\") pod \"redhat-marketplace-8qnwq\" (UID: \"61fcbc06-3bea-4954-9c9b-572bb4b9d61d\") " pod="openshift-marketplace/redhat-marketplace-8qnwq" Nov 27 09:08:01 crc kubenswrapper[4971]: I1127 09:08:01.744741 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s2dfx\" (UniqueName: \"kubernetes.io/projected/61fcbc06-3bea-4954-9c9b-572bb4b9d61d-kube-api-access-s2dfx\") pod \"redhat-marketplace-8qnwq\" (UID: \"61fcbc06-3bea-4954-9c9b-572bb4b9d61d\") " pod="openshift-marketplace/redhat-marketplace-8qnwq" Nov 27 09:08:01 crc kubenswrapper[4971]: I1127 09:08:01.847136 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61fcbc06-3bea-4954-9c9b-572bb4b9d61d-catalog-content\") pod \"redhat-marketplace-8qnwq\" (UID: \"61fcbc06-3bea-4954-9c9b-572bb4b9d61d\") " pod="openshift-marketplace/redhat-marketplace-8qnwq" Nov 27 09:08:01 crc kubenswrapper[4971]: I1127 09:08:01.847210 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61fcbc06-3bea-4954-9c9b-572bb4b9d61d-utilities\") pod \"redhat-marketplace-8qnwq\" (UID: \"61fcbc06-3bea-4954-9c9b-572bb4b9d61d\") " pod="openshift-marketplace/redhat-marketplace-8qnwq" Nov 27 09:08:01 crc kubenswrapper[4971]: I1127 09:08:01.847249 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dfx\" (UniqueName: \"kubernetes.io/projected/61fcbc06-3bea-4954-9c9b-572bb4b9d61d-kube-api-access-s2dfx\") pod \"redhat-marketplace-8qnwq\" (UID: \"61fcbc06-3bea-4954-9c9b-572bb4b9d61d\") " pod="openshift-marketplace/redhat-marketplace-8qnwq" Nov 27 09:08:01 crc kubenswrapper[4971]: I1127 09:08:01.847951 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61fcbc06-3bea-4954-9c9b-572bb4b9d61d-catalog-content\") pod \"redhat-marketplace-8qnwq\" (UID: \"61fcbc06-3bea-4954-9c9b-572bb4b9d61d\") " pod="openshift-marketplace/redhat-marketplace-8qnwq" Nov 27 09:08:01 crc kubenswrapper[4971]: I1127 09:08:01.848060 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61fcbc06-3bea-4954-9c9b-572bb4b9d61d-utilities\") pod \"redhat-marketplace-8qnwq\" (UID: \"61fcbc06-3bea-4954-9c9b-572bb4b9d61d\") " pod="openshift-marketplace/redhat-marketplace-8qnwq" Nov 27 09:08:01 crc kubenswrapper[4971]: I1127 09:08:01.869063 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dfx\" (UniqueName: \"kubernetes.io/projected/61fcbc06-3bea-4954-9c9b-572bb4b9d61d-kube-api-access-s2dfx\") pod \"redhat-marketplace-8qnwq\" (UID: \"61fcbc06-3bea-4954-9c9b-572bb4b9d61d\") " pod="openshift-marketplace/redhat-marketplace-8qnwq" Nov 27 09:08:01 crc kubenswrapper[4971]: I1127 09:08:01.974481 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8qnwq" Nov 27 09:08:02 crc kubenswrapper[4971]: I1127 09:08:02.470581 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8qnwq"] Nov 27 09:08:02 crc kubenswrapper[4971]: W1127 09:08:02.477717 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod61fcbc06_3bea_4954_9c9b_572bb4b9d61d.slice/crio-e29984508712294440ac96345c058d98f8b489f180551766a5315d16d558b911 WatchSource:0}: Error finding container e29984508712294440ac96345c058d98f8b489f180551766a5315d16d558b911: Status 404 returned error can't find the container with id e29984508712294440ac96345c058d98f8b489f180551766a5315d16d558b911 Nov 27 09:08:03 crc kubenswrapper[4971]: I1127 09:08:03.249095 4971 generic.go:334] "Generic (PLEG): container finished" podID="61fcbc06-3bea-4954-9c9b-572bb4b9d61d" containerID="1e509ebaa4890add96d9fa88e057c3a0fa960a3d217484be825d2b4af38f2060" exitCode=0 Nov 27 09:08:03 crc kubenswrapper[4971]: I1127 09:08:03.249151 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8qnwq" event={"ID":"61fcbc06-3bea-4954-9c9b-572bb4b9d61d","Type":"ContainerDied","Data":"1e509ebaa4890add96d9fa88e057c3a0fa960a3d217484be825d2b4af38f2060"} Nov 27 09:08:03 crc kubenswrapper[4971]: I1127 09:08:03.249505 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8qnwq" event={"ID":"61fcbc06-3bea-4954-9c9b-572bb4b9d61d","Type":"ContainerStarted","Data":"e29984508712294440ac96345c058d98f8b489f180551766a5315d16d558b911"} Nov 27 09:08:05 crc kubenswrapper[4971]: I1127 09:08:05.274078 4971 generic.go:334] "Generic (PLEG): container finished" podID="61fcbc06-3bea-4954-9c9b-572bb4b9d61d" containerID="71421fda05d3151121c7b96eb17e484a6e9274088dfa79394441b614477771ff" exitCode=0 Nov 27 09:08:05 crc kubenswrapper[4971]: I1127 09:08:05.274138 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8qnwq" event={"ID":"61fcbc06-3bea-4954-9c9b-572bb4b9d61d","Type":"ContainerDied","Data":"71421fda05d3151121c7b96eb17e484a6e9274088dfa79394441b614477771ff"} Nov 27 09:08:06 crc kubenswrapper[4971]: I1127 09:08:06.292249 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8qnwq" event={"ID":"61fcbc06-3bea-4954-9c9b-572bb4b9d61d","Type":"ContainerStarted","Data":"6c46d6ce88f67c29b44514d772aaae1c9c895d766fe0c29a257c05dd12a85702"} Nov 27 09:08:06 crc kubenswrapper[4971]: I1127 09:08:06.318835 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-8qnwq" podStartSLOduration=2.822484201 podStartE2EDuration="5.318816539s" podCreationTimestamp="2025-11-27 09:08:01 +0000 UTC" firstStartedPulling="2025-11-27 09:08:03.252550737 +0000 UTC m=+8121.444594655" lastFinishedPulling="2025-11-27 09:08:05.748883075 +0000 UTC m=+8123.940926993" observedRunningTime="2025-11-27 09:08:06.310318055 +0000 UTC m=+8124.502361983" watchObservedRunningTime="2025-11-27 09:08:06.318816539 +0000 UTC m=+8124.510860457" Nov 27 09:08:11 crc kubenswrapper[4971]: I1127 09:08:11.974762 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-8qnwq" Nov 27 09:08:11 crc kubenswrapper[4971]: I1127 09:08:11.975344 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-8qnwq" Nov 27 09:08:12 crc kubenswrapper[4971]: I1127 09:08:12.044268 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-8qnwq" Nov 27 09:08:12 crc kubenswrapper[4971]: I1127 09:08:12.411585 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-8qnwq" Nov 27 09:08:12 crc kubenswrapper[4971]: I1127 09:08:12.467925 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8qnwq"] Nov 27 09:08:14 crc kubenswrapper[4971]: I1127 09:08:14.384876 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-8qnwq" podUID="61fcbc06-3bea-4954-9c9b-572bb4b9d61d" containerName="registry-server" containerID="cri-o://6c46d6ce88f67c29b44514d772aaae1c9c895d766fe0c29a257c05dd12a85702" gracePeriod=2 Nov 27 09:08:14 crc kubenswrapper[4971]: I1127 09:08:14.913569 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8qnwq" Nov 27 09:08:15 crc kubenswrapper[4971]: I1127 09:08:15.074201 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61fcbc06-3bea-4954-9c9b-572bb4b9d61d-catalog-content\") pod \"61fcbc06-3bea-4954-9c9b-572bb4b9d61d\" (UID: \"61fcbc06-3bea-4954-9c9b-572bb4b9d61d\") " Nov 27 09:08:15 crc kubenswrapper[4971]: I1127 09:08:15.074337 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s2dfx\" (UniqueName: \"kubernetes.io/projected/61fcbc06-3bea-4954-9c9b-572bb4b9d61d-kube-api-access-s2dfx\") pod \"61fcbc06-3bea-4954-9c9b-572bb4b9d61d\" (UID: \"61fcbc06-3bea-4954-9c9b-572bb4b9d61d\") " Nov 27 09:08:15 crc kubenswrapper[4971]: I1127 09:08:15.074468 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61fcbc06-3bea-4954-9c9b-572bb4b9d61d-utilities\") pod \"61fcbc06-3bea-4954-9c9b-572bb4b9d61d\" (UID: \"61fcbc06-3bea-4954-9c9b-572bb4b9d61d\") " Nov 27 09:08:15 crc kubenswrapper[4971]: I1127 09:08:15.075240 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/61fcbc06-3bea-4954-9c9b-572bb4b9d61d-utilities" (OuterVolumeSpecName: "utilities") pod "61fcbc06-3bea-4954-9c9b-572bb4b9d61d" (UID: "61fcbc06-3bea-4954-9c9b-572bb4b9d61d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:08:15 crc kubenswrapper[4971]: I1127 09:08:15.081839 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61fcbc06-3bea-4954-9c9b-572bb4b9d61d-kube-api-access-s2dfx" (OuterVolumeSpecName: "kube-api-access-s2dfx") pod "61fcbc06-3bea-4954-9c9b-572bb4b9d61d" (UID: "61fcbc06-3bea-4954-9c9b-572bb4b9d61d"). InnerVolumeSpecName "kube-api-access-s2dfx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:08:15 crc kubenswrapper[4971]: I1127 09:08:15.092116 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/61fcbc06-3bea-4954-9c9b-572bb4b9d61d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "61fcbc06-3bea-4954-9c9b-572bb4b9d61d" (UID: "61fcbc06-3bea-4954-9c9b-572bb4b9d61d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:08:15 crc kubenswrapper[4971]: I1127 09:08:15.177021 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61fcbc06-3bea-4954-9c9b-572bb4b9d61d-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 09:08:15 crc kubenswrapper[4971]: I1127 09:08:15.177074 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s2dfx\" (UniqueName: \"kubernetes.io/projected/61fcbc06-3bea-4954-9c9b-572bb4b9d61d-kube-api-access-s2dfx\") on node \"crc\" DevicePath \"\"" Nov 27 09:08:15 crc kubenswrapper[4971]: I1127 09:08:15.177096 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61fcbc06-3bea-4954-9c9b-572bb4b9d61d-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 09:08:15 crc kubenswrapper[4971]: I1127 09:08:15.396434 4971 generic.go:334] "Generic (PLEG): container finished" podID="61fcbc06-3bea-4954-9c9b-572bb4b9d61d" containerID="6c46d6ce88f67c29b44514d772aaae1c9c895d766fe0c29a257c05dd12a85702" exitCode=0 Nov 27 09:08:15 crc kubenswrapper[4971]: I1127 09:08:15.396512 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8qnwq" Nov 27 09:08:15 crc kubenswrapper[4971]: I1127 09:08:15.396528 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8qnwq" event={"ID":"61fcbc06-3bea-4954-9c9b-572bb4b9d61d","Type":"ContainerDied","Data":"6c46d6ce88f67c29b44514d772aaae1c9c895d766fe0c29a257c05dd12a85702"} Nov 27 09:08:15 crc kubenswrapper[4971]: I1127 09:08:15.396695 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8qnwq" event={"ID":"61fcbc06-3bea-4954-9c9b-572bb4b9d61d","Type":"ContainerDied","Data":"e29984508712294440ac96345c058d98f8b489f180551766a5315d16d558b911"} Nov 27 09:08:15 crc kubenswrapper[4971]: I1127 09:08:15.396768 4971 scope.go:117] "RemoveContainer" containerID="6c46d6ce88f67c29b44514d772aaae1c9c895d766fe0c29a257c05dd12a85702" Nov 27 09:08:15 crc kubenswrapper[4971]: I1127 09:08:15.434346 4971 scope.go:117] "RemoveContainer" containerID="71421fda05d3151121c7b96eb17e484a6e9274088dfa79394441b614477771ff" Nov 27 09:08:15 crc kubenswrapper[4971]: I1127 09:08:15.445045 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8qnwq"] Nov 27 09:08:15 crc kubenswrapper[4971]: I1127 09:08:15.454655 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-8qnwq"] Nov 27 09:08:15 crc kubenswrapper[4971]: I1127 09:08:15.459984 4971 scope.go:117] "RemoveContainer" containerID="1e509ebaa4890add96d9fa88e057c3a0fa960a3d217484be825d2b4af38f2060" Nov 27 09:08:15 crc kubenswrapper[4971]: I1127 09:08:15.517659 4971 scope.go:117] "RemoveContainer" containerID="6c46d6ce88f67c29b44514d772aaae1c9c895d766fe0c29a257c05dd12a85702" Nov 27 09:08:15 crc kubenswrapper[4971]: E1127 09:08:15.518330 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c46d6ce88f67c29b44514d772aaae1c9c895d766fe0c29a257c05dd12a85702\": container with ID starting with 6c46d6ce88f67c29b44514d772aaae1c9c895d766fe0c29a257c05dd12a85702 not found: ID does not exist" containerID="6c46d6ce88f67c29b44514d772aaae1c9c895d766fe0c29a257c05dd12a85702" Nov 27 09:08:15 crc kubenswrapper[4971]: I1127 09:08:15.518363 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c46d6ce88f67c29b44514d772aaae1c9c895d766fe0c29a257c05dd12a85702"} err="failed to get container status \"6c46d6ce88f67c29b44514d772aaae1c9c895d766fe0c29a257c05dd12a85702\": rpc error: code = NotFound desc = could not find container \"6c46d6ce88f67c29b44514d772aaae1c9c895d766fe0c29a257c05dd12a85702\": container with ID starting with 6c46d6ce88f67c29b44514d772aaae1c9c895d766fe0c29a257c05dd12a85702 not found: ID does not exist" Nov 27 09:08:15 crc kubenswrapper[4971]: I1127 09:08:15.518387 4971 scope.go:117] "RemoveContainer" containerID="71421fda05d3151121c7b96eb17e484a6e9274088dfa79394441b614477771ff" Nov 27 09:08:15 crc kubenswrapper[4971]: E1127 09:08:15.518797 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71421fda05d3151121c7b96eb17e484a6e9274088dfa79394441b614477771ff\": container with ID starting with 71421fda05d3151121c7b96eb17e484a6e9274088dfa79394441b614477771ff not found: ID does not exist" containerID="71421fda05d3151121c7b96eb17e484a6e9274088dfa79394441b614477771ff" Nov 27 09:08:15 crc kubenswrapper[4971]: I1127 09:08:15.518842 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71421fda05d3151121c7b96eb17e484a6e9274088dfa79394441b614477771ff"} err="failed to get container status \"71421fda05d3151121c7b96eb17e484a6e9274088dfa79394441b614477771ff\": rpc error: code = NotFound desc = could not find container \"71421fda05d3151121c7b96eb17e484a6e9274088dfa79394441b614477771ff\": container with ID starting with 71421fda05d3151121c7b96eb17e484a6e9274088dfa79394441b614477771ff not found: ID does not exist" Nov 27 09:08:15 crc kubenswrapper[4971]: I1127 09:08:15.518877 4971 scope.go:117] "RemoveContainer" containerID="1e509ebaa4890add96d9fa88e057c3a0fa960a3d217484be825d2b4af38f2060" Nov 27 09:08:15 crc kubenswrapper[4971]: E1127 09:08:15.519178 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e509ebaa4890add96d9fa88e057c3a0fa960a3d217484be825d2b4af38f2060\": container with ID starting with 1e509ebaa4890add96d9fa88e057c3a0fa960a3d217484be825d2b4af38f2060 not found: ID does not exist" containerID="1e509ebaa4890add96d9fa88e057c3a0fa960a3d217484be825d2b4af38f2060" Nov 27 09:08:15 crc kubenswrapper[4971]: I1127 09:08:15.519204 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e509ebaa4890add96d9fa88e057c3a0fa960a3d217484be825d2b4af38f2060"} err="failed to get container status \"1e509ebaa4890add96d9fa88e057c3a0fa960a3d217484be825d2b4af38f2060\": rpc error: code = NotFound desc = could not find container \"1e509ebaa4890add96d9fa88e057c3a0fa960a3d217484be825d2b4af38f2060\": container with ID starting with 1e509ebaa4890add96d9fa88e057c3a0fa960a3d217484be825d2b4af38f2060 not found: ID does not exist" Nov 27 09:08:16 crc kubenswrapper[4971]: I1127 09:08:16.563204 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61fcbc06-3bea-4954-9c9b-572bb4b9d61d" path="/var/lib/kubelet/pods/61fcbc06-3bea-4954-9c9b-572bb4b9d61d/volumes" Nov 27 09:08:23 crc kubenswrapper[4971]: I1127 09:08:23.003462 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-q9bwx"] Nov 27 09:08:23 crc kubenswrapper[4971]: E1127 09:08:23.004428 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61fcbc06-3bea-4954-9c9b-572bb4b9d61d" containerName="extract-content" Nov 27 09:08:23 crc kubenswrapper[4971]: I1127 09:08:23.004445 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="61fcbc06-3bea-4954-9c9b-572bb4b9d61d" containerName="extract-content" Nov 27 09:08:23 crc kubenswrapper[4971]: E1127 09:08:23.004477 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61fcbc06-3bea-4954-9c9b-572bb4b9d61d" containerName="registry-server" Nov 27 09:08:23 crc kubenswrapper[4971]: I1127 09:08:23.004485 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="61fcbc06-3bea-4954-9c9b-572bb4b9d61d" containerName="registry-server" Nov 27 09:08:23 crc kubenswrapper[4971]: E1127 09:08:23.004506 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61fcbc06-3bea-4954-9c9b-572bb4b9d61d" containerName="extract-utilities" Nov 27 09:08:23 crc kubenswrapper[4971]: I1127 09:08:23.004513 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="61fcbc06-3bea-4954-9c9b-572bb4b9d61d" containerName="extract-utilities" Nov 27 09:08:23 crc kubenswrapper[4971]: I1127 09:08:23.004758 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="61fcbc06-3bea-4954-9c9b-572bb4b9d61d" containerName="registry-server" Nov 27 09:08:23 crc kubenswrapper[4971]: I1127 09:08:23.006267 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q9bwx" Nov 27 09:08:23 crc kubenswrapper[4971]: I1127 09:08:23.034222 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-q9bwx"] Nov 27 09:08:23 crc kubenswrapper[4971]: I1127 09:08:23.152877 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6-utilities\") pod \"certified-operators-q9bwx\" (UID: \"b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6\") " pod="openshift-marketplace/certified-operators-q9bwx" Nov 27 09:08:23 crc kubenswrapper[4971]: I1127 09:08:23.152967 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6-catalog-content\") pod \"certified-operators-q9bwx\" (UID: \"b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6\") " pod="openshift-marketplace/certified-operators-q9bwx" Nov 27 09:08:23 crc kubenswrapper[4971]: I1127 09:08:23.153402 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bxz2s\" (UniqueName: \"kubernetes.io/projected/b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6-kube-api-access-bxz2s\") pod \"certified-operators-q9bwx\" (UID: \"b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6\") " pod="openshift-marketplace/certified-operators-q9bwx" Nov 27 09:08:23 crc kubenswrapper[4971]: I1127 09:08:23.255597 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6-utilities\") pod \"certified-operators-q9bwx\" (UID: \"b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6\") " pod="openshift-marketplace/certified-operators-q9bwx" Nov 27 09:08:23 crc kubenswrapper[4971]: I1127 09:08:23.255697 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6-catalog-content\") pod \"certified-operators-q9bwx\" (UID: \"b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6\") " pod="openshift-marketplace/certified-operators-q9bwx" Nov 27 09:08:23 crc kubenswrapper[4971]: I1127 09:08:23.255792 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bxz2s\" (UniqueName: \"kubernetes.io/projected/b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6-kube-api-access-bxz2s\") pod \"certified-operators-q9bwx\" (UID: \"b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6\") " pod="openshift-marketplace/certified-operators-q9bwx" Nov 27 09:08:23 crc kubenswrapper[4971]: I1127 09:08:23.256263 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6-utilities\") pod \"certified-operators-q9bwx\" (UID: \"b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6\") " pod="openshift-marketplace/certified-operators-q9bwx" Nov 27 09:08:23 crc kubenswrapper[4971]: I1127 09:08:23.256616 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6-catalog-content\") pod \"certified-operators-q9bwx\" (UID: \"b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6\") " pod="openshift-marketplace/certified-operators-q9bwx" Nov 27 09:08:23 crc kubenswrapper[4971]: I1127 09:08:23.276876 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bxz2s\" (UniqueName: \"kubernetes.io/projected/b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6-kube-api-access-bxz2s\") pod \"certified-operators-q9bwx\" (UID: \"b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6\") " pod="openshift-marketplace/certified-operators-q9bwx" Nov 27 09:08:23 crc kubenswrapper[4971]: I1127 09:08:23.326732 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q9bwx" Nov 27 09:08:23 crc kubenswrapper[4971]: I1127 09:08:23.793106 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-q9bwx"] Nov 27 09:08:24 crc kubenswrapper[4971]: I1127 09:08:24.487482 4971 generic.go:334] "Generic (PLEG): container finished" podID="b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6" containerID="32ec60dc40297160fd459c4992d707f5fdb585e2db94c1549df1d7968c592651" exitCode=0 Nov 27 09:08:24 crc kubenswrapper[4971]: I1127 09:08:24.487571 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q9bwx" event={"ID":"b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6","Type":"ContainerDied","Data":"32ec60dc40297160fd459c4992d707f5fdb585e2db94c1549df1d7968c592651"} Nov 27 09:08:24 crc kubenswrapper[4971]: I1127 09:08:24.487875 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q9bwx" event={"ID":"b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6","Type":"ContainerStarted","Data":"ce3b68d54dc05eaf2ad48956bb3bddd0732b1fafbb42bad638c30523ba21932d"} Nov 27 09:08:26 crc kubenswrapper[4971]: I1127 09:08:26.510478 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q9bwx" event={"ID":"b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6","Type":"ContainerStarted","Data":"468cfad75f8ac56b63fcb67f6541d89201041fd2cacfd7b2bfdb03a21d424295"} Nov 27 09:08:27 crc kubenswrapper[4971]: I1127 09:08:27.534928 4971 generic.go:334] "Generic (PLEG): container finished" podID="b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6" containerID="468cfad75f8ac56b63fcb67f6541d89201041fd2cacfd7b2bfdb03a21d424295" exitCode=0 Nov 27 09:08:27 crc kubenswrapper[4971]: I1127 09:08:27.535337 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q9bwx" event={"ID":"b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6","Type":"ContainerDied","Data":"468cfad75f8ac56b63fcb67f6541d89201041fd2cacfd7b2bfdb03a21d424295"} Nov 27 09:08:28 crc kubenswrapper[4971]: I1127 09:08:28.568025 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q9bwx" event={"ID":"b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6","Type":"ContainerStarted","Data":"28eecbad4cc9ddc4f118a9a73fa5be0bf837db394c0d4a6dd4f4b66437793b64"} Nov 27 09:08:28 crc kubenswrapper[4971]: I1127 09:08:28.583594 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-q9bwx" podStartSLOduration=3.053934365 podStartE2EDuration="6.583564517s" podCreationTimestamp="2025-11-27 09:08:22 +0000 UTC" firstStartedPulling="2025-11-27 09:08:24.490772674 +0000 UTC m=+8142.682816592" lastFinishedPulling="2025-11-27 09:08:28.020402826 +0000 UTC m=+8146.212446744" observedRunningTime="2025-11-27 09:08:28.579547932 +0000 UTC m=+8146.771591860" watchObservedRunningTime="2025-11-27 09:08:28.583564517 +0000 UTC m=+8146.775608435" Nov 27 09:08:33 crc kubenswrapper[4971]: I1127 09:08:33.327711 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-q9bwx" Nov 27 09:08:33 crc kubenswrapper[4971]: I1127 09:08:33.328361 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-q9bwx" Nov 27 09:08:34 crc kubenswrapper[4971]: I1127 09:08:34.391611 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-q9bwx" podUID="b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6" containerName="registry-server" probeResult="failure" output=< Nov 27 09:08:34 crc kubenswrapper[4971]: timeout: failed to connect service ":50051" within 1s Nov 27 09:08:34 crc kubenswrapper[4971]: > Nov 27 09:08:43 crc kubenswrapper[4971]: I1127 09:08:43.371775 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-q9bwx" Nov 27 09:08:43 crc kubenswrapper[4971]: I1127 09:08:43.424517 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-q9bwx" Nov 27 09:08:43 crc kubenswrapper[4971]: I1127 09:08:43.616755 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-q9bwx"] Nov 27 09:08:44 crc kubenswrapper[4971]: I1127 09:08:44.703803 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-q9bwx" podUID="b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6" containerName="registry-server" containerID="cri-o://28eecbad4cc9ddc4f118a9a73fa5be0bf837db394c0d4a6dd4f4b66437793b64" gracePeriod=2 Nov 27 09:08:45 crc kubenswrapper[4971]: I1127 09:08:45.193403 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q9bwx" Nov 27 09:08:45 crc kubenswrapper[4971]: I1127 09:08:45.277344 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6-catalog-content\") pod \"b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6\" (UID: \"b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6\") " Nov 27 09:08:45 crc kubenswrapper[4971]: I1127 09:08:45.277635 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6-utilities\") pod \"b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6\" (UID: \"b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6\") " Nov 27 09:08:45 crc kubenswrapper[4971]: I1127 09:08:45.277835 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bxz2s\" (UniqueName: \"kubernetes.io/projected/b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6-kube-api-access-bxz2s\") pod \"b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6\" (UID: \"b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6\") " Nov 27 09:08:45 crc kubenswrapper[4971]: I1127 09:08:45.278491 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6-utilities" (OuterVolumeSpecName: "utilities") pod "b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6" (UID: "b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:08:45 crc kubenswrapper[4971]: I1127 09:08:45.284391 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6-kube-api-access-bxz2s" (OuterVolumeSpecName: "kube-api-access-bxz2s") pod "b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6" (UID: "b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6"). InnerVolumeSpecName "kube-api-access-bxz2s". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:08:45 crc kubenswrapper[4971]: I1127 09:08:45.343945 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6" (UID: "b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:08:45 crc kubenswrapper[4971]: I1127 09:08:45.380491 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bxz2s\" (UniqueName: \"kubernetes.io/projected/b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6-kube-api-access-bxz2s\") on node \"crc\" DevicePath \"\"" Nov 27 09:08:45 crc kubenswrapper[4971]: I1127 09:08:45.380550 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 09:08:45 crc kubenswrapper[4971]: I1127 09:08:45.380561 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 09:08:45 crc kubenswrapper[4971]: I1127 09:08:45.720369 4971 generic.go:334] "Generic (PLEG): container finished" podID="b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6" containerID="28eecbad4cc9ddc4f118a9a73fa5be0bf837db394c0d4a6dd4f4b66437793b64" exitCode=0 Nov 27 09:08:45 crc kubenswrapper[4971]: I1127 09:08:45.720417 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q9bwx" event={"ID":"b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6","Type":"ContainerDied","Data":"28eecbad4cc9ddc4f118a9a73fa5be0bf837db394c0d4a6dd4f4b66437793b64"} Nov 27 09:08:45 crc kubenswrapper[4971]: I1127 09:08:45.720456 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q9bwx" event={"ID":"b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6","Type":"ContainerDied","Data":"ce3b68d54dc05eaf2ad48956bb3bddd0732b1fafbb42bad638c30523ba21932d"} Nov 27 09:08:45 crc kubenswrapper[4971]: I1127 09:08:45.720490 4971 scope.go:117] "RemoveContainer" containerID="28eecbad4cc9ddc4f118a9a73fa5be0bf837db394c0d4a6dd4f4b66437793b64" Nov 27 09:08:45 crc kubenswrapper[4971]: I1127 09:08:45.720486 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q9bwx" Nov 27 09:08:45 crc kubenswrapper[4971]: I1127 09:08:45.768458 4971 scope.go:117] "RemoveContainer" containerID="468cfad75f8ac56b63fcb67f6541d89201041fd2cacfd7b2bfdb03a21d424295" Nov 27 09:08:45 crc kubenswrapper[4971]: I1127 09:08:45.805367 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-q9bwx"] Nov 27 09:08:45 crc kubenswrapper[4971]: I1127 09:08:45.819262 4971 scope.go:117] "RemoveContainer" containerID="32ec60dc40297160fd459c4992d707f5fdb585e2db94c1549df1d7968c592651" Nov 27 09:08:45 crc kubenswrapper[4971]: I1127 09:08:45.856793 4971 scope.go:117] "RemoveContainer" containerID="28eecbad4cc9ddc4f118a9a73fa5be0bf837db394c0d4a6dd4f4b66437793b64" Nov 27 09:08:45 crc kubenswrapper[4971]: I1127 09:08:45.857353 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-q9bwx"] Nov 27 09:08:45 crc kubenswrapper[4971]: E1127 09:08:45.857458 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28eecbad4cc9ddc4f118a9a73fa5be0bf837db394c0d4a6dd4f4b66437793b64\": container with ID starting with 28eecbad4cc9ddc4f118a9a73fa5be0bf837db394c0d4a6dd4f4b66437793b64 not found: ID does not exist" containerID="28eecbad4cc9ddc4f118a9a73fa5be0bf837db394c0d4a6dd4f4b66437793b64" Nov 27 09:08:45 crc kubenswrapper[4971]: I1127 09:08:45.857487 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28eecbad4cc9ddc4f118a9a73fa5be0bf837db394c0d4a6dd4f4b66437793b64"} err="failed to get container status \"28eecbad4cc9ddc4f118a9a73fa5be0bf837db394c0d4a6dd4f4b66437793b64\": rpc error: code = NotFound desc = could not find container \"28eecbad4cc9ddc4f118a9a73fa5be0bf837db394c0d4a6dd4f4b66437793b64\": container with ID starting with 28eecbad4cc9ddc4f118a9a73fa5be0bf837db394c0d4a6dd4f4b66437793b64 not found: ID does not exist" Nov 27 09:08:45 crc kubenswrapper[4971]: I1127 09:08:45.857510 4971 scope.go:117] "RemoveContainer" containerID="468cfad75f8ac56b63fcb67f6541d89201041fd2cacfd7b2bfdb03a21d424295" Nov 27 09:08:45 crc kubenswrapper[4971]: E1127 09:08:45.857878 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"468cfad75f8ac56b63fcb67f6541d89201041fd2cacfd7b2bfdb03a21d424295\": container with ID starting with 468cfad75f8ac56b63fcb67f6541d89201041fd2cacfd7b2bfdb03a21d424295 not found: ID does not exist" containerID="468cfad75f8ac56b63fcb67f6541d89201041fd2cacfd7b2bfdb03a21d424295" Nov 27 09:08:45 crc kubenswrapper[4971]: I1127 09:08:45.857902 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"468cfad75f8ac56b63fcb67f6541d89201041fd2cacfd7b2bfdb03a21d424295"} err="failed to get container status \"468cfad75f8ac56b63fcb67f6541d89201041fd2cacfd7b2bfdb03a21d424295\": rpc error: code = NotFound desc = could not find container \"468cfad75f8ac56b63fcb67f6541d89201041fd2cacfd7b2bfdb03a21d424295\": container with ID starting with 468cfad75f8ac56b63fcb67f6541d89201041fd2cacfd7b2bfdb03a21d424295 not found: ID does not exist" Nov 27 09:08:45 crc kubenswrapper[4971]: I1127 09:08:45.857916 4971 scope.go:117] "RemoveContainer" containerID="32ec60dc40297160fd459c4992d707f5fdb585e2db94c1549df1d7968c592651" Nov 27 09:08:45 crc kubenswrapper[4971]: E1127 09:08:45.858134 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"32ec60dc40297160fd459c4992d707f5fdb585e2db94c1549df1d7968c592651\": container with ID starting with 32ec60dc40297160fd459c4992d707f5fdb585e2db94c1549df1d7968c592651 not found: ID does not exist" containerID="32ec60dc40297160fd459c4992d707f5fdb585e2db94c1549df1d7968c592651" Nov 27 09:08:45 crc kubenswrapper[4971]: I1127 09:08:45.858161 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32ec60dc40297160fd459c4992d707f5fdb585e2db94c1549df1d7968c592651"} err="failed to get container status \"32ec60dc40297160fd459c4992d707f5fdb585e2db94c1549df1d7968c592651\": rpc error: code = NotFound desc = could not find container \"32ec60dc40297160fd459c4992d707f5fdb585e2db94c1549df1d7968c592651\": container with ID starting with 32ec60dc40297160fd459c4992d707f5fdb585e2db94c1549df1d7968c592651 not found: ID does not exist" Nov 27 09:08:46 crc kubenswrapper[4971]: I1127 09:08:46.566001 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6" path="/var/lib/kubelet/pods/b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6/volumes" Nov 27 09:08:57 crc kubenswrapper[4971]: I1127 09:08:57.476420 4971 scope.go:117] "RemoveContainer" containerID="63272d2818dcccba3530122d6f603b6771cb13cbc7f37e32162bfe5a28a3dcc5" Nov 27 09:08:57 crc kubenswrapper[4971]: I1127 09:08:57.505943 4971 scope.go:117] "RemoveContainer" containerID="4065a9ac93d96eeb972bc92cf37d5e337d28e952f7ac822cf5d232d62605fbde" Nov 27 09:08:57 crc kubenswrapper[4971]: I1127 09:08:57.548562 4971 scope.go:117] "RemoveContainer" containerID="9b0932496ad0b5c4f42ab6838e895192cc87b874f748ae91e9e107da2e2d7df9" Nov 27 09:08:57 crc kubenswrapper[4971]: I1127 09:08:57.878885 4971 scope.go:117] "RemoveContainer" containerID="fdb0cc0273b2b0254c9fbc363a767f4fca5e9dbbfc2550d5a83796409b5c24c4" Nov 27 09:09:26 crc kubenswrapper[4971]: I1127 09:09:26.413632 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 09:09:26 crc kubenswrapper[4971]: I1127 09:09:26.414180 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 09:09:56 crc kubenswrapper[4971]: I1127 09:09:56.412945 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 09:09:56 crc kubenswrapper[4971]: I1127 09:09:56.413605 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 09:10:01 crc kubenswrapper[4971]: I1127 09:10:01.054346 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-create-js7nf"] Nov 27 09:10:01 crc kubenswrapper[4971]: I1127 09:10:01.068357 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-5376-account-create-update-cg684"] Nov 27 09:10:01 crc kubenswrapper[4971]: I1127 09:10:01.079716 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-create-js7nf"] Nov 27 09:10:01 crc kubenswrapper[4971]: I1127 09:10:01.090017 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-5376-account-create-update-cg684"] Nov 27 09:10:02 crc kubenswrapper[4971]: I1127 09:10:02.564404 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="26816754-07f0-4343-aade-0fa746b266f5" path="/var/lib/kubelet/pods/26816754-07f0-4343-aade-0fa746b266f5/volumes" Nov 27 09:10:02 crc kubenswrapper[4971]: I1127 09:10:02.565095 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cbf1aa5f-efca-4f73-a186-3f02978ce830" path="/var/lib/kubelet/pods/cbf1aa5f-efca-4f73-a186-3f02978ce830/volumes" Nov 27 09:10:04 crc kubenswrapper[4971]: I1127 09:10:04.536084 4971 generic.go:334] "Generic (PLEG): container finished" podID="f3bbe788-f3b6-4502-b497-d9aa6937ae74" containerID="73829276cbd766b978e41cb1bec0cf4c1135ed1b697e15c6e1536424b781c561" exitCode=0 Nov 27 09:10:04 crc kubenswrapper[4971]: I1127 09:10:04.536152 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-6j8mr" event={"ID":"f3bbe788-f3b6-4502-b497-d9aa6937ae74","Type":"ContainerDied","Data":"73829276cbd766b978e41cb1bec0cf4c1135ed1b697e15c6e1536424b781c561"} Nov 27 09:10:06 crc kubenswrapper[4971]: I1127 09:10:06.053332 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-6j8mr" Nov 27 09:10:06 crc kubenswrapper[4971]: I1127 09:10:06.194041 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f3bbe788-f3b6-4502-b497-d9aa6937ae74-ssh-key\") pod \"f3bbe788-f3b6-4502-b497-d9aa6937ae74\" (UID: \"f3bbe788-f3b6-4502-b497-d9aa6937ae74\") " Nov 27 09:10:06 crc kubenswrapper[4971]: I1127 09:10:06.194298 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f3bbe788-f3b6-4502-b497-d9aa6937ae74-inventory\") pod \"f3bbe788-f3b6-4502-b497-d9aa6937ae74\" (UID: \"f3bbe788-f3b6-4502-b497-d9aa6937ae74\") " Nov 27 09:10:06 crc kubenswrapper[4971]: I1127 09:10:06.194389 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qhwlc\" (UniqueName: \"kubernetes.io/projected/f3bbe788-f3b6-4502-b497-d9aa6937ae74-kube-api-access-qhwlc\") pod \"f3bbe788-f3b6-4502-b497-d9aa6937ae74\" (UID: \"f3bbe788-f3b6-4502-b497-d9aa6937ae74\") " Nov 27 09:10:06 crc kubenswrapper[4971]: I1127 09:10:06.194632 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3bbe788-f3b6-4502-b497-d9aa6937ae74-tripleo-cleanup-combined-ca-bundle\") pod \"f3bbe788-f3b6-4502-b497-d9aa6937ae74\" (UID: \"f3bbe788-f3b6-4502-b497-d9aa6937ae74\") " Nov 27 09:10:06 crc kubenswrapper[4971]: I1127 09:10:06.201330 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3bbe788-f3b6-4502-b497-d9aa6937ae74-tripleo-cleanup-combined-ca-bundle" (OuterVolumeSpecName: "tripleo-cleanup-combined-ca-bundle") pod "f3bbe788-f3b6-4502-b497-d9aa6937ae74" (UID: "f3bbe788-f3b6-4502-b497-d9aa6937ae74"). InnerVolumeSpecName "tripleo-cleanup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:10:06 crc kubenswrapper[4971]: I1127 09:10:06.201894 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3bbe788-f3b6-4502-b497-d9aa6937ae74-kube-api-access-qhwlc" (OuterVolumeSpecName: "kube-api-access-qhwlc") pod "f3bbe788-f3b6-4502-b497-d9aa6937ae74" (UID: "f3bbe788-f3b6-4502-b497-d9aa6937ae74"). InnerVolumeSpecName "kube-api-access-qhwlc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:10:06 crc kubenswrapper[4971]: I1127 09:10:06.226156 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3bbe788-f3b6-4502-b497-d9aa6937ae74-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f3bbe788-f3b6-4502-b497-d9aa6937ae74" (UID: "f3bbe788-f3b6-4502-b497-d9aa6937ae74"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:10:06 crc kubenswrapper[4971]: I1127 09:10:06.226489 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3bbe788-f3b6-4502-b497-d9aa6937ae74-inventory" (OuterVolumeSpecName: "inventory") pod "f3bbe788-f3b6-4502-b497-d9aa6937ae74" (UID: "f3bbe788-f3b6-4502-b497-d9aa6937ae74"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:10:06 crc kubenswrapper[4971]: I1127 09:10:06.298143 4971 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f3bbe788-f3b6-4502-b497-d9aa6937ae74-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 27 09:10:06 crc kubenswrapper[4971]: I1127 09:10:06.298181 4971 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f3bbe788-f3b6-4502-b497-d9aa6937ae74-inventory\") on node \"crc\" DevicePath \"\"" Nov 27 09:10:06 crc kubenswrapper[4971]: I1127 09:10:06.298192 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qhwlc\" (UniqueName: \"kubernetes.io/projected/f3bbe788-f3b6-4502-b497-d9aa6937ae74-kube-api-access-qhwlc\") on node \"crc\" DevicePath \"\"" Nov 27 09:10:06 crc kubenswrapper[4971]: I1127 09:10:06.298206 4971 reconciler_common.go:293] "Volume detached for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3bbe788-f3b6-4502-b497-d9aa6937ae74-tripleo-cleanup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:10:06 crc kubenswrapper[4971]: I1127 09:10:06.564766 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-6j8mr" Nov 27 09:10:06 crc kubenswrapper[4971]: I1127 09:10:06.565931 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-6j8mr" event={"ID":"f3bbe788-f3b6-4502-b497-d9aa6937ae74","Type":"ContainerDied","Data":"9b56f04aee9f2979a492e1d72eda2bce0ac7f719c53199e091f55293b3af11c5"} Nov 27 09:10:06 crc kubenswrapper[4971]: I1127 09:10:06.565993 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9b56f04aee9f2979a492e1d72eda2bce0ac7f719c53199e091f55293b3af11c5" Nov 27 09:10:06 crc kubenswrapper[4971]: E1127 09:10:06.893742 4971 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf3bbe788_f3b6_4502_b497_d9aa6937ae74.slice/crio-9b56f04aee9f2979a492e1d72eda2bce0ac7f719c53199e091f55293b3af11c5\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf3bbe788_f3b6_4502_b497_d9aa6937ae74.slice\": RecentStats: unable to find data in memory cache]" Nov 27 09:10:14 crc kubenswrapper[4971]: I1127 09:10:14.038594 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-sync-4mph7"] Nov 27 09:10:14 crc kubenswrapper[4971]: I1127 09:10:14.052041 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-sync-4mph7"] Nov 27 09:10:14 crc kubenswrapper[4971]: I1127 09:10:14.569974 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aeb9e021-f0d3-4d35-92f6-22e3c934f8f1" path="/var/lib/kubelet/pods/aeb9e021-f0d3-4d35-92f6-22e3c934f8f1/volumes" Nov 27 09:10:26 crc kubenswrapper[4971]: I1127 09:10:26.413462 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 09:10:26 crc kubenswrapper[4971]: I1127 09:10:26.414287 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 09:10:26 crc kubenswrapper[4971]: I1127 09:10:26.414351 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 09:10:26 crc kubenswrapper[4971]: I1127 09:10:26.415612 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a6e0ac0b7d8b5de8ba640136d7618fa0300e44b75a6d4515b5a7d9cedb252962"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 09:10:26 crc kubenswrapper[4971]: I1127 09:10:26.415685 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://a6e0ac0b7d8b5de8ba640136d7618fa0300e44b75a6d4515b5a7d9cedb252962" gracePeriod=600 Nov 27 09:10:26 crc kubenswrapper[4971]: I1127 09:10:26.789329 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="a6e0ac0b7d8b5de8ba640136d7618fa0300e44b75a6d4515b5a7d9cedb252962" exitCode=0 Nov 27 09:10:26 crc kubenswrapper[4971]: I1127 09:10:26.790265 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"a6e0ac0b7d8b5de8ba640136d7618fa0300e44b75a6d4515b5a7d9cedb252962"} Nov 27 09:10:26 crc kubenswrapper[4971]: I1127 09:10:26.790344 4971 scope.go:117] "RemoveContainer" containerID="994a193d8aefca1605e71d1a50a8591da0b0ae972faf08604e8f68555feb1168" Nov 27 09:10:27 crc kubenswrapper[4971]: I1127 09:10:27.804038 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"6575cb158f547bb22eed9602ec2cad91f3d040d7cbd989460687163c84223d1c"} Nov 27 09:10:57 crc kubenswrapper[4971]: I1127 09:10:57.577041 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-92g6p"] Nov 27 09:10:57 crc kubenswrapper[4971]: E1127 09:10:57.578843 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6" containerName="registry-server" Nov 27 09:10:57 crc kubenswrapper[4971]: I1127 09:10:57.578871 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6" containerName="registry-server" Nov 27 09:10:57 crc kubenswrapper[4971]: E1127 09:10:57.578902 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6" containerName="extract-content" Nov 27 09:10:57 crc kubenswrapper[4971]: I1127 09:10:57.578911 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6" containerName="extract-content" Nov 27 09:10:57 crc kubenswrapper[4971]: E1127 09:10:57.578942 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3bbe788-f3b6-4502-b497-d9aa6937ae74" containerName="tripleo-cleanup-tripleo-cleanup-openstack-networker" Nov 27 09:10:57 crc kubenswrapper[4971]: I1127 09:10:57.578975 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3bbe788-f3b6-4502-b497-d9aa6937ae74" containerName="tripleo-cleanup-tripleo-cleanup-openstack-networker" Nov 27 09:10:57 crc kubenswrapper[4971]: E1127 09:10:57.579013 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6" containerName="extract-utilities" Nov 27 09:10:57 crc kubenswrapper[4971]: I1127 09:10:57.579025 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6" containerName="extract-utilities" Nov 27 09:10:57 crc kubenswrapper[4971]: I1127 09:10:57.579326 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3bbe788-f3b6-4502-b497-d9aa6937ae74" containerName="tripleo-cleanup-tripleo-cleanup-openstack-networker" Nov 27 09:10:57 crc kubenswrapper[4971]: I1127 09:10:57.579350 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7576cc4-b6d2-4ae5-8332-a10ae9a8b0b6" containerName="registry-server" Nov 27 09:10:57 crc kubenswrapper[4971]: I1127 09:10:57.581720 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-92g6p" Nov 27 09:10:57 crc kubenswrapper[4971]: I1127 09:10:57.595148 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-92g6p"] Nov 27 09:10:57 crc kubenswrapper[4971]: I1127 09:10:57.624904 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7mzrr\" (UniqueName: \"kubernetes.io/projected/e1dd1642-197d-4b73-a54b-6911a36d37f2-kube-api-access-7mzrr\") pod \"redhat-operators-92g6p\" (UID: \"e1dd1642-197d-4b73-a54b-6911a36d37f2\") " pod="openshift-marketplace/redhat-operators-92g6p" Nov 27 09:10:57 crc kubenswrapper[4971]: I1127 09:10:57.625083 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1dd1642-197d-4b73-a54b-6911a36d37f2-utilities\") pod \"redhat-operators-92g6p\" (UID: \"e1dd1642-197d-4b73-a54b-6911a36d37f2\") " pod="openshift-marketplace/redhat-operators-92g6p" Nov 27 09:10:57 crc kubenswrapper[4971]: I1127 09:10:57.625146 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1dd1642-197d-4b73-a54b-6911a36d37f2-catalog-content\") pod \"redhat-operators-92g6p\" (UID: \"e1dd1642-197d-4b73-a54b-6911a36d37f2\") " pod="openshift-marketplace/redhat-operators-92g6p" Nov 27 09:10:57 crc kubenswrapper[4971]: I1127 09:10:57.728798 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1dd1642-197d-4b73-a54b-6911a36d37f2-utilities\") pod \"redhat-operators-92g6p\" (UID: \"e1dd1642-197d-4b73-a54b-6911a36d37f2\") " pod="openshift-marketplace/redhat-operators-92g6p" Nov 27 09:10:57 crc kubenswrapper[4971]: I1127 09:10:57.728203 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1dd1642-197d-4b73-a54b-6911a36d37f2-utilities\") pod \"redhat-operators-92g6p\" (UID: \"e1dd1642-197d-4b73-a54b-6911a36d37f2\") " pod="openshift-marketplace/redhat-operators-92g6p" Nov 27 09:10:57 crc kubenswrapper[4971]: I1127 09:10:57.731340 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1dd1642-197d-4b73-a54b-6911a36d37f2-catalog-content\") pod \"redhat-operators-92g6p\" (UID: \"e1dd1642-197d-4b73-a54b-6911a36d37f2\") " pod="openshift-marketplace/redhat-operators-92g6p" Nov 27 09:10:57 crc kubenswrapper[4971]: I1127 09:10:57.731659 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7mzrr\" (UniqueName: \"kubernetes.io/projected/e1dd1642-197d-4b73-a54b-6911a36d37f2-kube-api-access-7mzrr\") pod \"redhat-operators-92g6p\" (UID: \"e1dd1642-197d-4b73-a54b-6911a36d37f2\") " pod="openshift-marketplace/redhat-operators-92g6p" Nov 27 09:10:57 crc kubenswrapper[4971]: I1127 09:10:57.732491 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1dd1642-197d-4b73-a54b-6911a36d37f2-catalog-content\") pod \"redhat-operators-92g6p\" (UID: \"e1dd1642-197d-4b73-a54b-6911a36d37f2\") " pod="openshift-marketplace/redhat-operators-92g6p" Nov 27 09:10:57 crc kubenswrapper[4971]: I1127 09:10:57.762446 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7mzrr\" (UniqueName: \"kubernetes.io/projected/e1dd1642-197d-4b73-a54b-6911a36d37f2-kube-api-access-7mzrr\") pod \"redhat-operators-92g6p\" (UID: \"e1dd1642-197d-4b73-a54b-6911a36d37f2\") " pod="openshift-marketplace/redhat-operators-92g6p" Nov 27 09:10:57 crc kubenswrapper[4971]: I1127 09:10:57.907457 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-92g6p" Nov 27 09:10:58 crc kubenswrapper[4971]: I1127 09:10:58.567987 4971 scope.go:117] "RemoveContainer" containerID="aa10c907aab40ea2c7653e0033c009d4ba39daf586efcf20be088ba5896c59b7" Nov 27 09:10:58 crc kubenswrapper[4971]: I1127 09:10:58.678099 4971 scope.go:117] "RemoveContainer" containerID="ae9393d475c10e39cddc53af063afbfafcacad911f9526112f0f0f8efd3f32b9" Nov 27 09:10:58 crc kubenswrapper[4971]: I1127 09:10:58.743013 4971 scope.go:117] "RemoveContainer" containerID="c871e74c68f49246cab2e0749c25bb770dffb1977912d8659a0ae4800495e697" Nov 27 09:10:58 crc kubenswrapper[4971]: I1127 09:10:58.809362 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-92g6p"] Nov 27 09:10:58 crc kubenswrapper[4971]: W1127 09:10:58.833806 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode1dd1642_197d_4b73_a54b_6911a36d37f2.slice/crio-1404570fd98993e74902e24061d06ec83f4aedcce9b8bfb50d6a658fb48e453a WatchSource:0}: Error finding container 1404570fd98993e74902e24061d06ec83f4aedcce9b8bfb50d6a658fb48e453a: Status 404 returned error can't find the container with id 1404570fd98993e74902e24061d06ec83f4aedcce9b8bfb50d6a658fb48e453a Nov 27 09:10:59 crc kubenswrapper[4971]: I1127 09:10:59.596860 4971 generic.go:334] "Generic (PLEG): container finished" podID="e1dd1642-197d-4b73-a54b-6911a36d37f2" containerID="e362f07dfe0e00179b19baeeaa3f2fc2ba0d12dc2145f76c1546c20571672416" exitCode=0 Nov 27 09:10:59 crc kubenswrapper[4971]: I1127 09:10:59.597044 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-92g6p" event={"ID":"e1dd1642-197d-4b73-a54b-6911a36d37f2","Type":"ContainerDied","Data":"e362f07dfe0e00179b19baeeaa3f2fc2ba0d12dc2145f76c1546c20571672416"} Nov 27 09:10:59 crc kubenswrapper[4971]: I1127 09:10:59.597779 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-92g6p" event={"ID":"e1dd1642-197d-4b73-a54b-6911a36d37f2","Type":"ContainerStarted","Data":"1404570fd98993e74902e24061d06ec83f4aedcce9b8bfb50d6a658fb48e453a"} Nov 27 09:11:00 crc kubenswrapper[4971]: I1127 09:11:00.627331 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-92g6p" event={"ID":"e1dd1642-197d-4b73-a54b-6911a36d37f2","Type":"ContainerStarted","Data":"693d9412dd72d2e1a4b2843e2ca8caec37216c9afa29ed8120957c41697ef07c"} Nov 27 09:11:05 crc kubenswrapper[4971]: I1127 09:11:05.686976 4971 generic.go:334] "Generic (PLEG): container finished" podID="e1dd1642-197d-4b73-a54b-6911a36d37f2" containerID="693d9412dd72d2e1a4b2843e2ca8caec37216c9afa29ed8120957c41697ef07c" exitCode=0 Nov 27 09:11:05 crc kubenswrapper[4971]: I1127 09:11:05.687047 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-92g6p" event={"ID":"e1dd1642-197d-4b73-a54b-6911a36d37f2","Type":"ContainerDied","Data":"693d9412dd72d2e1a4b2843e2ca8caec37216c9afa29ed8120957c41697ef07c"} Nov 27 09:11:07 crc kubenswrapper[4971]: I1127 09:11:07.713017 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-92g6p" event={"ID":"e1dd1642-197d-4b73-a54b-6911a36d37f2","Type":"ContainerStarted","Data":"4c576fec1e88263a21ea40c6acd907ae001e8a57c158bedc4e52738d61e3d487"} Nov 27 09:11:07 crc kubenswrapper[4971]: I1127 09:11:07.909807 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-92g6p" Nov 27 09:11:07 crc kubenswrapper[4971]: I1127 09:11:07.909866 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-92g6p" Nov 27 09:11:08 crc kubenswrapper[4971]: I1127 09:11:08.957758 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-92g6p" podUID="e1dd1642-197d-4b73-a54b-6911a36d37f2" containerName="registry-server" probeResult="failure" output=< Nov 27 09:11:08 crc kubenswrapper[4971]: timeout: failed to connect service ":50051" within 1s Nov 27 09:11:08 crc kubenswrapper[4971]: > Nov 27 09:11:17 crc kubenswrapper[4971]: I1127 09:11:17.964832 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-92g6p" Nov 27 09:11:17 crc kubenswrapper[4971]: I1127 09:11:17.997619 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-92g6p" podStartSLOduration=14.109781192 podStartE2EDuration="20.997588653s" podCreationTimestamp="2025-11-27 09:10:57 +0000 UTC" firstStartedPulling="2025-11-27 09:10:59.601263778 +0000 UTC m=+8297.793307696" lastFinishedPulling="2025-11-27 09:11:06.489071239 +0000 UTC m=+8304.681115157" observedRunningTime="2025-11-27 09:11:07.73607576 +0000 UTC m=+8305.928119698" watchObservedRunningTime="2025-11-27 09:11:17.997588653 +0000 UTC m=+8316.189632571" Nov 27 09:11:18 crc kubenswrapper[4971]: I1127 09:11:18.023214 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-92g6p" Nov 27 09:11:18 crc kubenswrapper[4971]: I1127 09:11:18.213548 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-92g6p"] Nov 27 09:11:19 crc kubenswrapper[4971]: I1127 09:11:19.844716 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-92g6p" podUID="e1dd1642-197d-4b73-a54b-6911a36d37f2" containerName="registry-server" containerID="cri-o://4c576fec1e88263a21ea40c6acd907ae001e8a57c158bedc4e52738d61e3d487" gracePeriod=2 Nov 27 09:11:20 crc kubenswrapper[4971]: I1127 09:11:20.348858 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-92g6p" Nov 27 09:11:20 crc kubenswrapper[4971]: I1127 09:11:20.518676 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1dd1642-197d-4b73-a54b-6911a36d37f2-utilities\") pod \"e1dd1642-197d-4b73-a54b-6911a36d37f2\" (UID: \"e1dd1642-197d-4b73-a54b-6911a36d37f2\") " Nov 27 09:11:20 crc kubenswrapper[4971]: I1127 09:11:20.518780 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1dd1642-197d-4b73-a54b-6911a36d37f2-catalog-content\") pod \"e1dd1642-197d-4b73-a54b-6911a36d37f2\" (UID: \"e1dd1642-197d-4b73-a54b-6911a36d37f2\") " Nov 27 09:11:20 crc kubenswrapper[4971]: I1127 09:11:20.518829 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7mzrr\" (UniqueName: \"kubernetes.io/projected/e1dd1642-197d-4b73-a54b-6911a36d37f2-kube-api-access-7mzrr\") pod \"e1dd1642-197d-4b73-a54b-6911a36d37f2\" (UID: \"e1dd1642-197d-4b73-a54b-6911a36d37f2\") " Nov 27 09:11:20 crc kubenswrapper[4971]: I1127 09:11:20.520314 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1dd1642-197d-4b73-a54b-6911a36d37f2-utilities" (OuterVolumeSpecName: "utilities") pod "e1dd1642-197d-4b73-a54b-6911a36d37f2" (UID: "e1dd1642-197d-4b73-a54b-6911a36d37f2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:11:20 crc kubenswrapper[4971]: I1127 09:11:20.528667 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1dd1642-197d-4b73-a54b-6911a36d37f2-kube-api-access-7mzrr" (OuterVolumeSpecName: "kube-api-access-7mzrr") pod "e1dd1642-197d-4b73-a54b-6911a36d37f2" (UID: "e1dd1642-197d-4b73-a54b-6911a36d37f2"). InnerVolumeSpecName "kube-api-access-7mzrr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:11:20 crc kubenswrapper[4971]: I1127 09:11:20.615199 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1dd1642-197d-4b73-a54b-6911a36d37f2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e1dd1642-197d-4b73-a54b-6911a36d37f2" (UID: "e1dd1642-197d-4b73-a54b-6911a36d37f2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:11:20 crc kubenswrapper[4971]: I1127 09:11:20.622949 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7mzrr\" (UniqueName: \"kubernetes.io/projected/e1dd1642-197d-4b73-a54b-6911a36d37f2-kube-api-access-7mzrr\") on node \"crc\" DevicePath \"\"" Nov 27 09:11:20 crc kubenswrapper[4971]: I1127 09:11:20.623107 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1dd1642-197d-4b73-a54b-6911a36d37f2-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 09:11:20 crc kubenswrapper[4971]: I1127 09:11:20.623133 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1dd1642-197d-4b73-a54b-6911a36d37f2-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 09:11:20 crc kubenswrapper[4971]: I1127 09:11:20.860449 4971 generic.go:334] "Generic (PLEG): container finished" podID="e1dd1642-197d-4b73-a54b-6911a36d37f2" containerID="4c576fec1e88263a21ea40c6acd907ae001e8a57c158bedc4e52738d61e3d487" exitCode=0 Nov 27 09:11:20 crc kubenswrapper[4971]: I1127 09:11:20.860515 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-92g6p" event={"ID":"e1dd1642-197d-4b73-a54b-6911a36d37f2","Type":"ContainerDied","Data":"4c576fec1e88263a21ea40c6acd907ae001e8a57c158bedc4e52738d61e3d487"} Nov 27 09:11:20 crc kubenswrapper[4971]: I1127 09:11:20.860569 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-92g6p" Nov 27 09:11:20 crc kubenswrapper[4971]: I1127 09:11:20.860602 4971 scope.go:117] "RemoveContainer" containerID="4c576fec1e88263a21ea40c6acd907ae001e8a57c158bedc4e52738d61e3d487" Nov 27 09:11:20 crc kubenswrapper[4971]: I1127 09:11:20.860582 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-92g6p" event={"ID":"e1dd1642-197d-4b73-a54b-6911a36d37f2","Type":"ContainerDied","Data":"1404570fd98993e74902e24061d06ec83f4aedcce9b8bfb50d6a658fb48e453a"} Nov 27 09:11:20 crc kubenswrapper[4971]: I1127 09:11:20.910376 4971 scope.go:117] "RemoveContainer" containerID="693d9412dd72d2e1a4b2843e2ca8caec37216c9afa29ed8120957c41697ef07c" Nov 27 09:11:20 crc kubenswrapper[4971]: I1127 09:11:20.912412 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-92g6p"] Nov 27 09:11:20 crc kubenswrapper[4971]: I1127 09:11:20.929606 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-92g6p"] Nov 27 09:11:20 crc kubenswrapper[4971]: I1127 09:11:20.937415 4971 scope.go:117] "RemoveContainer" containerID="e362f07dfe0e00179b19baeeaa3f2fc2ba0d12dc2145f76c1546c20571672416" Nov 27 09:11:20 crc kubenswrapper[4971]: I1127 09:11:20.977929 4971 scope.go:117] "RemoveContainer" containerID="4c576fec1e88263a21ea40c6acd907ae001e8a57c158bedc4e52738d61e3d487" Nov 27 09:11:20 crc kubenswrapper[4971]: E1127 09:11:20.978332 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c576fec1e88263a21ea40c6acd907ae001e8a57c158bedc4e52738d61e3d487\": container with ID starting with 4c576fec1e88263a21ea40c6acd907ae001e8a57c158bedc4e52738d61e3d487 not found: ID does not exist" containerID="4c576fec1e88263a21ea40c6acd907ae001e8a57c158bedc4e52738d61e3d487" Nov 27 09:11:20 crc kubenswrapper[4971]: I1127 09:11:20.978374 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c576fec1e88263a21ea40c6acd907ae001e8a57c158bedc4e52738d61e3d487"} err="failed to get container status \"4c576fec1e88263a21ea40c6acd907ae001e8a57c158bedc4e52738d61e3d487\": rpc error: code = NotFound desc = could not find container \"4c576fec1e88263a21ea40c6acd907ae001e8a57c158bedc4e52738d61e3d487\": container with ID starting with 4c576fec1e88263a21ea40c6acd907ae001e8a57c158bedc4e52738d61e3d487 not found: ID does not exist" Nov 27 09:11:20 crc kubenswrapper[4971]: I1127 09:11:20.978404 4971 scope.go:117] "RemoveContainer" containerID="693d9412dd72d2e1a4b2843e2ca8caec37216c9afa29ed8120957c41697ef07c" Nov 27 09:11:20 crc kubenswrapper[4971]: E1127 09:11:20.978739 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"693d9412dd72d2e1a4b2843e2ca8caec37216c9afa29ed8120957c41697ef07c\": container with ID starting with 693d9412dd72d2e1a4b2843e2ca8caec37216c9afa29ed8120957c41697ef07c not found: ID does not exist" containerID="693d9412dd72d2e1a4b2843e2ca8caec37216c9afa29ed8120957c41697ef07c" Nov 27 09:11:20 crc kubenswrapper[4971]: I1127 09:11:20.978774 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"693d9412dd72d2e1a4b2843e2ca8caec37216c9afa29ed8120957c41697ef07c"} err="failed to get container status \"693d9412dd72d2e1a4b2843e2ca8caec37216c9afa29ed8120957c41697ef07c\": rpc error: code = NotFound desc = could not find container \"693d9412dd72d2e1a4b2843e2ca8caec37216c9afa29ed8120957c41697ef07c\": container with ID starting with 693d9412dd72d2e1a4b2843e2ca8caec37216c9afa29ed8120957c41697ef07c not found: ID does not exist" Nov 27 09:11:20 crc kubenswrapper[4971]: I1127 09:11:20.978796 4971 scope.go:117] "RemoveContainer" containerID="e362f07dfe0e00179b19baeeaa3f2fc2ba0d12dc2145f76c1546c20571672416" Nov 27 09:11:20 crc kubenswrapper[4971]: E1127 09:11:20.979089 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e362f07dfe0e00179b19baeeaa3f2fc2ba0d12dc2145f76c1546c20571672416\": container with ID starting with e362f07dfe0e00179b19baeeaa3f2fc2ba0d12dc2145f76c1546c20571672416 not found: ID does not exist" containerID="e362f07dfe0e00179b19baeeaa3f2fc2ba0d12dc2145f76c1546c20571672416" Nov 27 09:11:20 crc kubenswrapper[4971]: I1127 09:11:20.979118 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e362f07dfe0e00179b19baeeaa3f2fc2ba0d12dc2145f76c1546c20571672416"} err="failed to get container status \"e362f07dfe0e00179b19baeeaa3f2fc2ba0d12dc2145f76c1546c20571672416\": rpc error: code = NotFound desc = could not find container \"e362f07dfe0e00179b19baeeaa3f2fc2ba0d12dc2145f76c1546c20571672416\": container with ID starting with e362f07dfe0e00179b19baeeaa3f2fc2ba0d12dc2145f76c1546c20571672416 not found: ID does not exist" Nov 27 09:11:22 crc kubenswrapper[4971]: I1127 09:11:22.605818 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1dd1642-197d-4b73-a54b-6911a36d37f2" path="/var/lib/kubelet/pods/e1dd1642-197d-4b73-a54b-6911a36d37f2/volumes" Nov 27 09:12:26 crc kubenswrapper[4971]: I1127 09:12:26.413511 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 09:12:26 crc kubenswrapper[4971]: I1127 09:12:26.414504 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 09:12:40 crc kubenswrapper[4971]: I1127 09:12:40.082772 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-6b4a-account-create-update-bdh9t"] Nov 27 09:12:40 crc kubenswrapper[4971]: I1127 09:12:40.099363 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-create-87nq8"] Nov 27 09:12:40 crc kubenswrapper[4971]: I1127 09:12:40.112824 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-6b4a-account-create-update-bdh9t"] Nov 27 09:12:40 crc kubenswrapper[4971]: I1127 09:12:40.123143 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-create-87nq8"] Nov 27 09:12:40 crc kubenswrapper[4971]: I1127 09:12:40.565754 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d7182a5-9bfd-4767-916e-a83a2a32da61" path="/var/lib/kubelet/pods/2d7182a5-9bfd-4767-916e-a83a2a32da61/volumes" Nov 27 09:12:40 crc kubenswrapper[4971]: I1127 09:12:40.566726 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="77cb726e-bcde-40f2-8aee-100180685d40" path="/var/lib/kubelet/pods/77cb726e-bcde-40f2-8aee-100180685d40/volumes" Nov 27 09:12:53 crc kubenswrapper[4971]: I1127 09:12:53.037854 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-sync-48fx9"] Nov 27 09:12:53 crc kubenswrapper[4971]: I1127 09:12:53.046520 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-sync-48fx9"] Nov 27 09:12:54 crc kubenswrapper[4971]: I1127 09:12:54.565911 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ecce1511-2855-4bd4-be9b-408f300f0507" path="/var/lib/kubelet/pods/ecce1511-2855-4bd4-be9b-408f300f0507/volumes" Nov 27 09:12:56 crc kubenswrapper[4971]: I1127 09:12:56.412883 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 09:12:56 crc kubenswrapper[4971]: I1127 09:12:56.413219 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 09:12:59 crc kubenswrapper[4971]: I1127 09:12:59.019428 4971 scope.go:117] "RemoveContainer" containerID="61a4fe1884f6b1d01f71b712b67ad345b23c4270373d14be5e9e9937103dbb01" Nov 27 09:12:59 crc kubenswrapper[4971]: I1127 09:12:59.072864 4971 scope.go:117] "RemoveContainer" containerID="bef5104008fc165d01339a6f002625a99127c121b3efb2dbc980ae7c2bd7dbfd" Nov 27 09:12:59 crc kubenswrapper[4971]: I1127 09:12:59.150270 4971 scope.go:117] "RemoveContainer" containerID="5374cad01698933203dda93f1df8ae23bea849c645b6c57bcea55da85e84d865" Nov 27 09:13:14 crc kubenswrapper[4971]: I1127 09:13:14.054632 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-6269-account-create-update-9ww84"] Nov 27 09:13:14 crc kubenswrapper[4971]: I1127 09:13:14.069219 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-create-5vg66"] Nov 27 09:13:14 crc kubenswrapper[4971]: I1127 09:13:14.079409 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-6269-account-create-update-9ww84"] Nov 27 09:13:14 crc kubenswrapper[4971]: I1127 09:13:14.090596 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-create-5vg66"] Nov 27 09:13:14 crc kubenswrapper[4971]: I1127 09:13:14.569486 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c543498-8218-435c-9dc7-783a4b91f163" path="/var/lib/kubelet/pods/5c543498-8218-435c-9dc7-783a4b91f163/volumes" Nov 27 09:13:14 crc kubenswrapper[4971]: I1127 09:13:14.570302 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="942471a2-354d-46eb-8043-a0b3de2d23ab" path="/var/lib/kubelet/pods/942471a2-354d-46eb-8043-a0b3de2d23ab/volumes" Nov 27 09:13:26 crc kubenswrapper[4971]: I1127 09:13:26.413437 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 09:13:26 crc kubenswrapper[4971]: I1127 09:13:26.414060 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 09:13:26 crc kubenswrapper[4971]: I1127 09:13:26.414130 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 09:13:26 crc kubenswrapper[4971]: I1127 09:13:26.415307 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6575cb158f547bb22eed9602ec2cad91f3d040d7cbd989460687163c84223d1c"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 09:13:26 crc kubenswrapper[4971]: I1127 09:13:26.415384 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://6575cb158f547bb22eed9602ec2cad91f3d040d7cbd989460687163c84223d1c" gracePeriod=600 Nov 27 09:13:26 crc kubenswrapper[4971]: E1127 09:13:26.547479 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:13:26 crc kubenswrapper[4971]: I1127 09:13:26.963020 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="6575cb158f547bb22eed9602ec2cad91f3d040d7cbd989460687163c84223d1c" exitCode=0 Nov 27 09:13:26 crc kubenswrapper[4971]: I1127 09:13:26.963063 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"6575cb158f547bb22eed9602ec2cad91f3d040d7cbd989460687163c84223d1c"} Nov 27 09:13:26 crc kubenswrapper[4971]: I1127 09:13:26.963099 4971 scope.go:117] "RemoveContainer" containerID="a6e0ac0b7d8b5de8ba640136d7618fa0300e44b75a6d4515b5a7d9cedb252962" Nov 27 09:13:26 crc kubenswrapper[4971]: I1127 09:13:26.963810 4971 scope.go:117] "RemoveContainer" containerID="6575cb158f547bb22eed9602ec2cad91f3d040d7cbd989460687163c84223d1c" Nov 27 09:13:26 crc kubenswrapper[4971]: E1127 09:13:26.964180 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:13:27 crc kubenswrapper[4971]: I1127 09:13:27.041837 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-sync-npkfx"] Nov 27 09:13:27 crc kubenswrapper[4971]: I1127 09:13:27.052565 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-sync-npkfx"] Nov 27 09:13:28 crc kubenswrapper[4971]: I1127 09:13:28.575170 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0c4f901-c67c-4848-8631-02c515ba7c7b" path="/var/lib/kubelet/pods/a0c4f901-c67c-4848-8631-02c515ba7c7b/volumes" Nov 27 09:13:40 crc kubenswrapper[4971]: I1127 09:13:40.550690 4971 scope.go:117] "RemoveContainer" containerID="6575cb158f547bb22eed9602ec2cad91f3d040d7cbd989460687163c84223d1c" Nov 27 09:13:40 crc kubenswrapper[4971]: E1127 09:13:40.551502 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:13:52 crc kubenswrapper[4971]: I1127 09:13:52.558462 4971 scope.go:117] "RemoveContainer" containerID="6575cb158f547bb22eed9602ec2cad91f3d040d7cbd989460687163c84223d1c" Nov 27 09:13:52 crc kubenswrapper[4971]: E1127 09:13:52.559725 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:13:54 crc kubenswrapper[4971]: I1127 09:13:54.239299 4971 generic.go:334] "Generic (PLEG): container finished" podID="ea8298a5-6819-4be7-a1d9-1af470a81021" containerID="51990f92580c866971e8beb94e4862bcddb43aee1b2755f05bfd1335e3085026" exitCode=0 Nov 27 09:13:54 crc kubenswrapper[4971]: I1127 09:13:54.239374 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd" event={"ID":"ea8298a5-6819-4be7-a1d9-1af470a81021","Type":"ContainerDied","Data":"51990f92580c866971e8beb94e4862bcddb43aee1b2755f05bfd1335e3085026"} Nov 27 09:13:55 crc kubenswrapper[4971]: I1127 09:13:55.732566 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd" Nov 27 09:13:55 crc kubenswrapper[4971]: I1127 09:13:55.854160 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ea8298a5-6819-4be7-a1d9-1af470a81021-inventory\") pod \"ea8298a5-6819-4be7-a1d9-1af470a81021\" (UID: \"ea8298a5-6819-4be7-a1d9-1af470a81021\") " Nov 27 09:13:55 crc kubenswrapper[4971]: I1127 09:13:55.854806 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ea8298a5-6819-4be7-a1d9-1af470a81021-ceph\") pod \"ea8298a5-6819-4be7-a1d9-1af470a81021\" (UID: \"ea8298a5-6819-4be7-a1d9-1af470a81021\") " Nov 27 09:13:55 crc kubenswrapper[4971]: I1127 09:13:55.854869 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ea8298a5-6819-4be7-a1d9-1af470a81021-ssh-key\") pod \"ea8298a5-6819-4be7-a1d9-1af470a81021\" (UID: \"ea8298a5-6819-4be7-a1d9-1af470a81021\") " Nov 27 09:13:55 crc kubenswrapper[4971]: I1127 09:13:55.854943 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea8298a5-6819-4be7-a1d9-1af470a81021-tripleo-cleanup-combined-ca-bundle\") pod \"ea8298a5-6819-4be7-a1d9-1af470a81021\" (UID: \"ea8298a5-6819-4be7-a1d9-1af470a81021\") " Nov 27 09:13:55 crc kubenswrapper[4971]: I1127 09:13:55.855054 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-45qt8\" (UniqueName: \"kubernetes.io/projected/ea8298a5-6819-4be7-a1d9-1af470a81021-kube-api-access-45qt8\") pod \"ea8298a5-6819-4be7-a1d9-1af470a81021\" (UID: \"ea8298a5-6819-4be7-a1d9-1af470a81021\") " Nov 27 09:13:55 crc kubenswrapper[4971]: I1127 09:13:55.861403 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea8298a5-6819-4be7-a1d9-1af470a81021-kube-api-access-45qt8" (OuterVolumeSpecName: "kube-api-access-45qt8") pod "ea8298a5-6819-4be7-a1d9-1af470a81021" (UID: "ea8298a5-6819-4be7-a1d9-1af470a81021"). InnerVolumeSpecName "kube-api-access-45qt8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:13:55 crc kubenswrapper[4971]: I1127 09:13:55.861820 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea8298a5-6819-4be7-a1d9-1af470a81021-ceph" (OuterVolumeSpecName: "ceph") pod "ea8298a5-6819-4be7-a1d9-1af470a81021" (UID: "ea8298a5-6819-4be7-a1d9-1af470a81021"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:13:55 crc kubenswrapper[4971]: I1127 09:13:55.862152 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea8298a5-6819-4be7-a1d9-1af470a81021-tripleo-cleanup-combined-ca-bundle" (OuterVolumeSpecName: "tripleo-cleanup-combined-ca-bundle") pod "ea8298a5-6819-4be7-a1d9-1af470a81021" (UID: "ea8298a5-6819-4be7-a1d9-1af470a81021"). InnerVolumeSpecName "tripleo-cleanup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:13:55 crc kubenswrapper[4971]: I1127 09:13:55.890387 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea8298a5-6819-4be7-a1d9-1af470a81021-inventory" (OuterVolumeSpecName: "inventory") pod "ea8298a5-6819-4be7-a1d9-1af470a81021" (UID: "ea8298a5-6819-4be7-a1d9-1af470a81021"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:13:55 crc kubenswrapper[4971]: I1127 09:13:55.890514 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea8298a5-6819-4be7-a1d9-1af470a81021-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ea8298a5-6819-4be7-a1d9-1af470a81021" (UID: "ea8298a5-6819-4be7-a1d9-1af470a81021"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:13:55 crc kubenswrapper[4971]: I1127 09:13:55.957779 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-45qt8\" (UniqueName: \"kubernetes.io/projected/ea8298a5-6819-4be7-a1d9-1af470a81021-kube-api-access-45qt8\") on node \"crc\" DevicePath \"\"" Nov 27 09:13:55 crc kubenswrapper[4971]: I1127 09:13:55.957832 4971 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ea8298a5-6819-4be7-a1d9-1af470a81021-inventory\") on node \"crc\" DevicePath \"\"" Nov 27 09:13:55 crc kubenswrapper[4971]: I1127 09:13:55.957842 4971 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ea8298a5-6819-4be7-a1d9-1af470a81021-ceph\") on node \"crc\" DevicePath \"\"" Nov 27 09:13:55 crc kubenswrapper[4971]: I1127 09:13:55.957852 4971 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ea8298a5-6819-4be7-a1d9-1af470a81021-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 27 09:13:55 crc kubenswrapper[4971]: I1127 09:13:55.957864 4971 reconciler_common.go:293] "Volume detached for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea8298a5-6819-4be7-a1d9-1af470a81021-tripleo-cleanup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:13:56 crc kubenswrapper[4971]: I1127 09:13:56.269126 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd" event={"ID":"ea8298a5-6819-4be7-a1d9-1af470a81021","Type":"ContainerDied","Data":"7bd643b13cc4af49985312cdbae70261da24f69456472e8b43fa421a544b0913"} Nov 27 09:13:56 crc kubenswrapper[4971]: I1127 09:13:56.271148 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7bd643b13cc4af49985312cdbae70261da24f69456472e8b43fa421a544b0913" Nov 27 09:13:56 crc kubenswrapper[4971]: I1127 09:13:56.269557 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd" Nov 27 09:13:59 crc kubenswrapper[4971]: I1127 09:13:59.273912 4971 scope.go:117] "RemoveContainer" containerID="5a3ed4d856abc168aab78d8010ca707928a2e589e2c24300a1fe6ddd9854305f" Nov 27 09:13:59 crc kubenswrapper[4971]: I1127 09:13:59.316155 4971 scope.go:117] "RemoveContainer" containerID="a298280c9bf1965ef19a615dfe5785582991b165ad7f08075f4ad09950a29373" Nov 27 09:13:59 crc kubenswrapper[4971]: I1127 09:13:59.364595 4971 scope.go:117] "RemoveContainer" containerID="ee730af8dcb48497458691a1dce04d169dd5dfb16c04fcd662f3f8016435faa6" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.499150 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-qbnqs"] Nov 27 09:14:04 crc kubenswrapper[4971]: E1127 09:14:04.500050 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1dd1642-197d-4b73-a54b-6911a36d37f2" containerName="registry-server" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.500078 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1dd1642-197d-4b73-a54b-6911a36d37f2" containerName="registry-server" Nov 27 09:14:04 crc kubenswrapper[4971]: E1127 09:14:04.500122 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1dd1642-197d-4b73-a54b-6911a36d37f2" containerName="extract-utilities" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.500131 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1dd1642-197d-4b73-a54b-6911a36d37f2" containerName="extract-utilities" Nov 27 09:14:04 crc kubenswrapper[4971]: E1127 09:14:04.500165 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea8298a5-6819-4be7-a1d9-1af470a81021" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.500175 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea8298a5-6819-4be7-a1d9-1af470a81021" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Nov 27 09:14:04 crc kubenswrapper[4971]: E1127 09:14:04.500204 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1dd1642-197d-4b73-a54b-6911a36d37f2" containerName="extract-content" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.500213 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1dd1642-197d-4b73-a54b-6911a36d37f2" containerName="extract-content" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.500469 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1dd1642-197d-4b73-a54b-6911a36d37f2" containerName="registry-server" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.500508 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea8298a5-6819-4be7-a1d9-1af470a81021" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.501645 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-qbnqs" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.504330 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.504370 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.504853 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.514957 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-openstack-openstack-networker-l9thw"] Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.516713 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-jnkbm" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.516880 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-networker-l9thw" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.520017 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-networker-dockercfg-hbhvx" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.530147 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-networker" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.540249 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-qbnqs"] Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.550339 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-openstack-openstack-networker-l9thw"] Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.596078 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/978c7516-1e2e-4d58-a4da-8d85ec47dcd9-ssh-key\") pod \"bootstrap-openstack-openstack-cell1-qbnqs\" (UID: \"978c7516-1e2e-4d58-a4da-8d85ec47dcd9\") " pod="openstack/bootstrap-openstack-openstack-cell1-qbnqs" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.596127 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r2rr5\" (UniqueName: \"kubernetes.io/projected/978c7516-1e2e-4d58-a4da-8d85ec47dcd9-kube-api-access-r2rr5\") pod \"bootstrap-openstack-openstack-cell1-qbnqs\" (UID: \"978c7516-1e2e-4d58-a4da-8d85ec47dcd9\") " pod="openstack/bootstrap-openstack-openstack-cell1-qbnqs" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.596174 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/978c7516-1e2e-4d58-a4da-8d85ec47dcd9-inventory\") pod \"bootstrap-openstack-openstack-cell1-qbnqs\" (UID: \"978c7516-1e2e-4d58-a4da-8d85ec47dcd9\") " pod="openstack/bootstrap-openstack-openstack-cell1-qbnqs" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.596242 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/978c7516-1e2e-4d58-a4da-8d85ec47dcd9-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-qbnqs\" (UID: \"978c7516-1e2e-4d58-a4da-8d85ec47dcd9\") " pod="openstack/bootstrap-openstack-openstack-cell1-qbnqs" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.596389 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/978c7516-1e2e-4d58-a4da-8d85ec47dcd9-ceph\") pod \"bootstrap-openstack-openstack-cell1-qbnqs\" (UID: \"978c7516-1e2e-4d58-a4da-8d85ec47dcd9\") " pod="openstack/bootstrap-openstack-openstack-cell1-qbnqs" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.698612 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/370dcf34-1b59-47fb-9dbc-92e31c352718-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-networker-l9thw\" (UID: \"370dcf34-1b59-47fb-9dbc-92e31c352718\") " pod="openstack/bootstrap-openstack-openstack-networker-l9thw" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.698704 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/978c7516-1e2e-4d58-a4da-8d85ec47dcd9-ceph\") pod \"bootstrap-openstack-openstack-cell1-qbnqs\" (UID: \"978c7516-1e2e-4d58-a4da-8d85ec47dcd9\") " pod="openstack/bootstrap-openstack-openstack-cell1-qbnqs" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.698730 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/370dcf34-1b59-47fb-9dbc-92e31c352718-inventory\") pod \"bootstrap-openstack-openstack-networker-l9thw\" (UID: \"370dcf34-1b59-47fb-9dbc-92e31c352718\") " pod="openstack/bootstrap-openstack-openstack-networker-l9thw" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.699730 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r2rr5\" (UniqueName: \"kubernetes.io/projected/978c7516-1e2e-4d58-a4da-8d85ec47dcd9-kube-api-access-r2rr5\") pod \"bootstrap-openstack-openstack-cell1-qbnqs\" (UID: \"978c7516-1e2e-4d58-a4da-8d85ec47dcd9\") " pod="openstack/bootstrap-openstack-openstack-cell1-qbnqs" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.699766 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/978c7516-1e2e-4d58-a4da-8d85ec47dcd9-ssh-key\") pod \"bootstrap-openstack-openstack-cell1-qbnqs\" (UID: \"978c7516-1e2e-4d58-a4da-8d85ec47dcd9\") " pod="openstack/bootstrap-openstack-openstack-cell1-qbnqs" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.699800 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/978c7516-1e2e-4d58-a4da-8d85ec47dcd9-inventory\") pod \"bootstrap-openstack-openstack-cell1-qbnqs\" (UID: \"978c7516-1e2e-4d58-a4da-8d85ec47dcd9\") " pod="openstack/bootstrap-openstack-openstack-cell1-qbnqs" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.699890 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/978c7516-1e2e-4d58-a4da-8d85ec47dcd9-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-qbnqs\" (UID: \"978c7516-1e2e-4d58-a4da-8d85ec47dcd9\") " pod="openstack/bootstrap-openstack-openstack-cell1-qbnqs" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.699953 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/370dcf34-1b59-47fb-9dbc-92e31c352718-ssh-key\") pod \"bootstrap-openstack-openstack-networker-l9thw\" (UID: \"370dcf34-1b59-47fb-9dbc-92e31c352718\") " pod="openstack/bootstrap-openstack-openstack-networker-l9thw" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.700079 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktwkv\" (UniqueName: \"kubernetes.io/projected/370dcf34-1b59-47fb-9dbc-92e31c352718-kube-api-access-ktwkv\") pod \"bootstrap-openstack-openstack-networker-l9thw\" (UID: \"370dcf34-1b59-47fb-9dbc-92e31c352718\") " pod="openstack/bootstrap-openstack-openstack-networker-l9thw" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.705645 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/978c7516-1e2e-4d58-a4da-8d85ec47dcd9-inventory\") pod \"bootstrap-openstack-openstack-cell1-qbnqs\" (UID: \"978c7516-1e2e-4d58-a4da-8d85ec47dcd9\") " pod="openstack/bootstrap-openstack-openstack-cell1-qbnqs" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.705726 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/978c7516-1e2e-4d58-a4da-8d85ec47dcd9-ssh-key\") pod \"bootstrap-openstack-openstack-cell1-qbnqs\" (UID: \"978c7516-1e2e-4d58-a4da-8d85ec47dcd9\") " pod="openstack/bootstrap-openstack-openstack-cell1-qbnqs" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.708157 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/978c7516-1e2e-4d58-a4da-8d85ec47dcd9-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-qbnqs\" (UID: \"978c7516-1e2e-4d58-a4da-8d85ec47dcd9\") " pod="openstack/bootstrap-openstack-openstack-cell1-qbnqs" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.712154 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/978c7516-1e2e-4d58-a4da-8d85ec47dcd9-ceph\") pod \"bootstrap-openstack-openstack-cell1-qbnqs\" (UID: \"978c7516-1e2e-4d58-a4da-8d85ec47dcd9\") " pod="openstack/bootstrap-openstack-openstack-cell1-qbnqs" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.717024 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r2rr5\" (UniqueName: \"kubernetes.io/projected/978c7516-1e2e-4d58-a4da-8d85ec47dcd9-kube-api-access-r2rr5\") pod \"bootstrap-openstack-openstack-cell1-qbnqs\" (UID: \"978c7516-1e2e-4d58-a4da-8d85ec47dcd9\") " pod="openstack/bootstrap-openstack-openstack-cell1-qbnqs" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.803221 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/370dcf34-1b59-47fb-9dbc-92e31c352718-inventory\") pod \"bootstrap-openstack-openstack-networker-l9thw\" (UID: \"370dcf34-1b59-47fb-9dbc-92e31c352718\") " pod="openstack/bootstrap-openstack-openstack-networker-l9thw" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.803718 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/370dcf34-1b59-47fb-9dbc-92e31c352718-ssh-key\") pod \"bootstrap-openstack-openstack-networker-l9thw\" (UID: \"370dcf34-1b59-47fb-9dbc-92e31c352718\") " pod="openstack/bootstrap-openstack-openstack-networker-l9thw" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.803913 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktwkv\" (UniqueName: \"kubernetes.io/projected/370dcf34-1b59-47fb-9dbc-92e31c352718-kube-api-access-ktwkv\") pod \"bootstrap-openstack-openstack-networker-l9thw\" (UID: \"370dcf34-1b59-47fb-9dbc-92e31c352718\") " pod="openstack/bootstrap-openstack-openstack-networker-l9thw" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.804077 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/370dcf34-1b59-47fb-9dbc-92e31c352718-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-networker-l9thw\" (UID: \"370dcf34-1b59-47fb-9dbc-92e31c352718\") " pod="openstack/bootstrap-openstack-openstack-networker-l9thw" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.806459 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/370dcf34-1b59-47fb-9dbc-92e31c352718-inventory\") pod \"bootstrap-openstack-openstack-networker-l9thw\" (UID: \"370dcf34-1b59-47fb-9dbc-92e31c352718\") " pod="openstack/bootstrap-openstack-openstack-networker-l9thw" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.806656 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/370dcf34-1b59-47fb-9dbc-92e31c352718-ssh-key\") pod \"bootstrap-openstack-openstack-networker-l9thw\" (UID: \"370dcf34-1b59-47fb-9dbc-92e31c352718\") " pod="openstack/bootstrap-openstack-openstack-networker-l9thw" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.809335 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/370dcf34-1b59-47fb-9dbc-92e31c352718-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-networker-l9thw\" (UID: \"370dcf34-1b59-47fb-9dbc-92e31c352718\") " pod="openstack/bootstrap-openstack-openstack-networker-l9thw" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.820614 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktwkv\" (UniqueName: \"kubernetes.io/projected/370dcf34-1b59-47fb-9dbc-92e31c352718-kube-api-access-ktwkv\") pod \"bootstrap-openstack-openstack-networker-l9thw\" (UID: \"370dcf34-1b59-47fb-9dbc-92e31c352718\") " pod="openstack/bootstrap-openstack-openstack-networker-l9thw" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.833871 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-qbnqs" Nov 27 09:14:04 crc kubenswrapper[4971]: I1127 09:14:04.846204 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-networker-l9thw" Nov 27 09:14:05 crc kubenswrapper[4971]: I1127 09:14:05.444853 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-qbnqs"] Nov 27 09:14:05 crc kubenswrapper[4971]: I1127 09:14:05.456312 4971 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 27 09:14:06 crc kubenswrapper[4971]: I1127 09:14:06.401146 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-qbnqs" event={"ID":"978c7516-1e2e-4d58-a4da-8d85ec47dcd9","Type":"ContainerStarted","Data":"bd19b67067c9bee93c79f576f0975eca70db6148a2347a270e18423921eb1844"} Nov 27 09:14:06 crc kubenswrapper[4971]: I1127 09:14:06.542184 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-openstack-openstack-networker-l9thw"] Nov 27 09:14:07 crc kubenswrapper[4971]: I1127 09:14:07.434616 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-qbnqs" event={"ID":"978c7516-1e2e-4d58-a4da-8d85ec47dcd9","Type":"ContainerStarted","Data":"afe5cc894afa78799c4684d43bb79dfeb3ce99d9138fbdfe1b12148259b7d27d"} Nov 27 09:14:07 crc kubenswrapper[4971]: I1127 09:14:07.443273 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-networker-l9thw" event={"ID":"370dcf34-1b59-47fb-9dbc-92e31c352718","Type":"ContainerStarted","Data":"e03ba5e4a0681547dfe692a26829633f2fa1d8a1ebeedf61a77e8fe6e35adf47"} Nov 27 09:14:07 crc kubenswrapper[4971]: I1127 09:14:07.468760 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-openstack-openstack-cell1-qbnqs" podStartSLOduration=2.621335701 podStartE2EDuration="3.468738899s" podCreationTimestamp="2025-11-27 09:14:04 +0000 UTC" firstStartedPulling="2025-11-27 09:14:05.456016223 +0000 UTC m=+8483.648060141" lastFinishedPulling="2025-11-27 09:14:06.303419421 +0000 UTC m=+8484.495463339" observedRunningTime="2025-11-27 09:14:07.458215838 +0000 UTC m=+8485.650259766" watchObservedRunningTime="2025-11-27 09:14:07.468738899 +0000 UTC m=+8485.660782817" Nov 27 09:14:07 crc kubenswrapper[4971]: I1127 09:14:07.550236 4971 scope.go:117] "RemoveContainer" containerID="6575cb158f547bb22eed9602ec2cad91f3d040d7cbd989460687163c84223d1c" Nov 27 09:14:07 crc kubenswrapper[4971]: E1127 09:14:07.551305 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:14:08 crc kubenswrapper[4971]: I1127 09:14:08.453258 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-networker-l9thw" event={"ID":"370dcf34-1b59-47fb-9dbc-92e31c352718","Type":"ContainerStarted","Data":"cb15287e98cdfb23ec7b85c742532a744b23b0aa6a1f06924c7ed96de9364360"} Nov 27 09:14:08 crc kubenswrapper[4971]: I1127 09:14:08.476320 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-openstack-openstack-networker-l9thw" podStartSLOduration=3.719789085 podStartE2EDuration="4.476301827s" podCreationTimestamp="2025-11-27 09:14:04 +0000 UTC" firstStartedPulling="2025-11-27 09:14:06.549167494 +0000 UTC m=+8484.741211412" lastFinishedPulling="2025-11-27 09:14:07.305680246 +0000 UTC m=+8485.497724154" observedRunningTime="2025-11-27 09:14:08.472735805 +0000 UTC m=+8486.664779723" watchObservedRunningTime="2025-11-27 09:14:08.476301827 +0000 UTC m=+8486.668345755" Nov 27 09:14:21 crc kubenswrapper[4971]: I1127 09:14:21.550310 4971 scope.go:117] "RemoveContainer" containerID="6575cb158f547bb22eed9602ec2cad91f3d040d7cbd989460687163c84223d1c" Nov 27 09:14:21 crc kubenswrapper[4971]: E1127 09:14:21.552129 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:14:33 crc kubenswrapper[4971]: I1127 09:14:33.551820 4971 scope.go:117] "RemoveContainer" containerID="6575cb158f547bb22eed9602ec2cad91f3d040d7cbd989460687163c84223d1c" Nov 27 09:14:33 crc kubenswrapper[4971]: E1127 09:14:33.552866 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:14:47 crc kubenswrapper[4971]: I1127 09:14:47.550526 4971 scope.go:117] "RemoveContainer" containerID="6575cb158f547bb22eed9602ec2cad91f3d040d7cbd989460687163c84223d1c" Nov 27 09:14:47 crc kubenswrapper[4971]: E1127 09:14:47.551359 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:15:00 crc kubenswrapper[4971]: I1127 09:15:00.168041 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403915-6gkn2"] Nov 27 09:15:00 crc kubenswrapper[4971]: I1127 09:15:00.170103 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403915-6gkn2" Nov 27 09:15:00 crc kubenswrapper[4971]: I1127 09:15:00.173729 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 27 09:15:00 crc kubenswrapper[4971]: I1127 09:15:00.174274 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 27 09:15:00 crc kubenswrapper[4971]: I1127 09:15:00.208963 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403915-6gkn2"] Nov 27 09:15:00 crc kubenswrapper[4971]: I1127 09:15:00.270737 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/90e15afa-3845-4e37-bd64-c7be4048bb7e-config-volume\") pod \"collect-profiles-29403915-6gkn2\" (UID: \"90e15afa-3845-4e37-bd64-c7be4048bb7e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403915-6gkn2" Nov 27 09:15:00 crc kubenswrapper[4971]: I1127 09:15:00.270823 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8pt7\" (UniqueName: \"kubernetes.io/projected/90e15afa-3845-4e37-bd64-c7be4048bb7e-kube-api-access-c8pt7\") pod \"collect-profiles-29403915-6gkn2\" (UID: \"90e15afa-3845-4e37-bd64-c7be4048bb7e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403915-6gkn2" Nov 27 09:15:00 crc kubenswrapper[4971]: I1127 09:15:00.271311 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/90e15afa-3845-4e37-bd64-c7be4048bb7e-secret-volume\") pod \"collect-profiles-29403915-6gkn2\" (UID: \"90e15afa-3845-4e37-bd64-c7be4048bb7e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403915-6gkn2" Nov 27 09:15:00 crc kubenswrapper[4971]: I1127 09:15:00.373241 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8pt7\" (UniqueName: \"kubernetes.io/projected/90e15afa-3845-4e37-bd64-c7be4048bb7e-kube-api-access-c8pt7\") pod \"collect-profiles-29403915-6gkn2\" (UID: \"90e15afa-3845-4e37-bd64-c7be4048bb7e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403915-6gkn2" Nov 27 09:15:00 crc kubenswrapper[4971]: I1127 09:15:00.373427 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/90e15afa-3845-4e37-bd64-c7be4048bb7e-secret-volume\") pod \"collect-profiles-29403915-6gkn2\" (UID: \"90e15afa-3845-4e37-bd64-c7be4048bb7e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403915-6gkn2" Nov 27 09:15:00 crc kubenswrapper[4971]: I1127 09:15:00.373503 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/90e15afa-3845-4e37-bd64-c7be4048bb7e-config-volume\") pod \"collect-profiles-29403915-6gkn2\" (UID: \"90e15afa-3845-4e37-bd64-c7be4048bb7e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403915-6gkn2" Nov 27 09:15:00 crc kubenswrapper[4971]: I1127 09:15:00.374598 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/90e15afa-3845-4e37-bd64-c7be4048bb7e-config-volume\") pod \"collect-profiles-29403915-6gkn2\" (UID: \"90e15afa-3845-4e37-bd64-c7be4048bb7e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403915-6gkn2" Nov 27 09:15:00 crc kubenswrapper[4971]: I1127 09:15:00.381395 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/90e15afa-3845-4e37-bd64-c7be4048bb7e-secret-volume\") pod \"collect-profiles-29403915-6gkn2\" (UID: \"90e15afa-3845-4e37-bd64-c7be4048bb7e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403915-6gkn2" Nov 27 09:15:00 crc kubenswrapper[4971]: I1127 09:15:00.392839 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8pt7\" (UniqueName: \"kubernetes.io/projected/90e15afa-3845-4e37-bd64-c7be4048bb7e-kube-api-access-c8pt7\") pod \"collect-profiles-29403915-6gkn2\" (UID: \"90e15afa-3845-4e37-bd64-c7be4048bb7e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403915-6gkn2" Nov 27 09:15:00 crc kubenswrapper[4971]: I1127 09:15:00.499062 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403915-6gkn2" Nov 27 09:15:00 crc kubenswrapper[4971]: I1127 09:15:00.987108 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403915-6gkn2"] Nov 27 09:15:01 crc kubenswrapper[4971]: I1127 09:15:01.099659 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403915-6gkn2" event={"ID":"90e15afa-3845-4e37-bd64-c7be4048bb7e","Type":"ContainerStarted","Data":"3ffe8e6323d0ed326e94676a4ed27b8fb7a2421818517f47a27315586a1c84fc"} Nov 27 09:15:01 crc kubenswrapper[4971]: I1127 09:15:01.551892 4971 scope.go:117] "RemoveContainer" containerID="6575cb158f547bb22eed9602ec2cad91f3d040d7cbd989460687163c84223d1c" Nov 27 09:15:01 crc kubenswrapper[4971]: E1127 09:15:01.552720 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:15:02 crc kubenswrapper[4971]: I1127 09:15:02.112942 4971 generic.go:334] "Generic (PLEG): container finished" podID="90e15afa-3845-4e37-bd64-c7be4048bb7e" containerID="dbcb1396dba14457ae596606fcf861328a602f0d6d7c409228b681b96b433242" exitCode=0 Nov 27 09:15:02 crc kubenswrapper[4971]: I1127 09:15:02.113001 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403915-6gkn2" event={"ID":"90e15afa-3845-4e37-bd64-c7be4048bb7e","Type":"ContainerDied","Data":"dbcb1396dba14457ae596606fcf861328a602f0d6d7c409228b681b96b433242"} Nov 27 09:15:03 crc kubenswrapper[4971]: I1127 09:15:03.511918 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403915-6gkn2" Nov 27 09:15:03 crc kubenswrapper[4971]: I1127 09:15:03.665355 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/90e15afa-3845-4e37-bd64-c7be4048bb7e-secret-volume\") pod \"90e15afa-3845-4e37-bd64-c7be4048bb7e\" (UID: \"90e15afa-3845-4e37-bd64-c7be4048bb7e\") " Nov 27 09:15:03 crc kubenswrapper[4971]: I1127 09:15:03.666038 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c8pt7\" (UniqueName: \"kubernetes.io/projected/90e15afa-3845-4e37-bd64-c7be4048bb7e-kube-api-access-c8pt7\") pod \"90e15afa-3845-4e37-bd64-c7be4048bb7e\" (UID: \"90e15afa-3845-4e37-bd64-c7be4048bb7e\") " Nov 27 09:15:03 crc kubenswrapper[4971]: I1127 09:15:03.666347 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/90e15afa-3845-4e37-bd64-c7be4048bb7e-config-volume\") pod \"90e15afa-3845-4e37-bd64-c7be4048bb7e\" (UID: \"90e15afa-3845-4e37-bd64-c7be4048bb7e\") " Nov 27 09:15:03 crc kubenswrapper[4971]: I1127 09:15:03.667274 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90e15afa-3845-4e37-bd64-c7be4048bb7e-config-volume" (OuterVolumeSpecName: "config-volume") pod "90e15afa-3845-4e37-bd64-c7be4048bb7e" (UID: "90e15afa-3845-4e37-bd64-c7be4048bb7e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:15:03 crc kubenswrapper[4971]: I1127 09:15:03.673967 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90e15afa-3845-4e37-bd64-c7be4048bb7e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "90e15afa-3845-4e37-bd64-c7be4048bb7e" (UID: "90e15afa-3845-4e37-bd64-c7be4048bb7e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:15:03 crc kubenswrapper[4971]: I1127 09:15:03.675134 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90e15afa-3845-4e37-bd64-c7be4048bb7e-kube-api-access-c8pt7" (OuterVolumeSpecName: "kube-api-access-c8pt7") pod "90e15afa-3845-4e37-bd64-c7be4048bb7e" (UID: "90e15afa-3845-4e37-bd64-c7be4048bb7e"). InnerVolumeSpecName "kube-api-access-c8pt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:15:03 crc kubenswrapper[4971]: I1127 09:15:03.768796 4971 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/90e15afa-3845-4e37-bd64-c7be4048bb7e-config-volume\") on node \"crc\" DevicePath \"\"" Nov 27 09:15:03 crc kubenswrapper[4971]: I1127 09:15:03.768838 4971 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/90e15afa-3845-4e37-bd64-c7be4048bb7e-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 27 09:15:03 crc kubenswrapper[4971]: I1127 09:15:03.768850 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c8pt7\" (UniqueName: \"kubernetes.io/projected/90e15afa-3845-4e37-bd64-c7be4048bb7e-kube-api-access-c8pt7\") on node \"crc\" DevicePath \"\"" Nov 27 09:15:04 crc kubenswrapper[4971]: I1127 09:15:04.136040 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403915-6gkn2" event={"ID":"90e15afa-3845-4e37-bd64-c7be4048bb7e","Type":"ContainerDied","Data":"3ffe8e6323d0ed326e94676a4ed27b8fb7a2421818517f47a27315586a1c84fc"} Nov 27 09:15:04 crc kubenswrapper[4971]: I1127 09:15:04.136111 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3ffe8e6323d0ed326e94676a4ed27b8fb7a2421818517f47a27315586a1c84fc" Nov 27 09:15:04 crc kubenswrapper[4971]: I1127 09:15:04.136498 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403915-6gkn2" Nov 27 09:15:04 crc kubenswrapper[4971]: I1127 09:15:04.619267 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403870-pw78r"] Nov 27 09:15:04 crc kubenswrapper[4971]: I1127 09:15:04.628785 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403870-pw78r"] Nov 27 09:15:06 crc kubenswrapper[4971]: I1127 09:15:06.563742 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c99677c-3c5d-410f-af02-63380ec76457" path="/var/lib/kubelet/pods/5c99677c-3c5d-410f-af02-63380ec76457/volumes" Nov 27 09:15:13 crc kubenswrapper[4971]: I1127 09:15:13.551685 4971 scope.go:117] "RemoveContainer" containerID="6575cb158f547bb22eed9602ec2cad91f3d040d7cbd989460687163c84223d1c" Nov 27 09:15:13 crc kubenswrapper[4971]: E1127 09:15:13.553245 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:15:25 crc kubenswrapper[4971]: I1127 09:15:25.550899 4971 scope.go:117] "RemoveContainer" containerID="6575cb158f547bb22eed9602ec2cad91f3d040d7cbd989460687163c84223d1c" Nov 27 09:15:25 crc kubenswrapper[4971]: E1127 09:15:25.552351 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:15:36 crc kubenswrapper[4971]: I1127 09:15:36.550896 4971 scope.go:117] "RemoveContainer" containerID="6575cb158f547bb22eed9602ec2cad91f3d040d7cbd989460687163c84223d1c" Nov 27 09:15:36 crc kubenswrapper[4971]: E1127 09:15:36.551674 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:15:49 crc kubenswrapper[4971]: I1127 09:15:49.550956 4971 scope.go:117] "RemoveContainer" containerID="6575cb158f547bb22eed9602ec2cad91f3d040d7cbd989460687163c84223d1c" Nov 27 09:15:49 crc kubenswrapper[4971]: E1127 09:15:49.552171 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:15:59 crc kubenswrapper[4971]: I1127 09:15:59.552360 4971 scope.go:117] "RemoveContainer" containerID="3355ef0013f2cbd8c31f4d2e4c0c166b808ac1a58637fd6f326b2f81ed45e8c4" Nov 27 09:16:00 crc kubenswrapper[4971]: I1127 09:16:00.551457 4971 scope.go:117] "RemoveContainer" containerID="6575cb158f547bb22eed9602ec2cad91f3d040d7cbd989460687163c84223d1c" Nov 27 09:16:00 crc kubenswrapper[4971]: E1127 09:16:00.552326 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:16:12 crc kubenswrapper[4971]: I1127 09:16:12.556397 4971 scope.go:117] "RemoveContainer" containerID="6575cb158f547bb22eed9602ec2cad91f3d040d7cbd989460687163c84223d1c" Nov 27 09:16:12 crc kubenswrapper[4971]: E1127 09:16:12.557167 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:16:25 crc kubenswrapper[4971]: I1127 09:16:25.550027 4971 scope.go:117] "RemoveContainer" containerID="6575cb158f547bb22eed9602ec2cad91f3d040d7cbd989460687163c84223d1c" Nov 27 09:16:25 crc kubenswrapper[4971]: E1127 09:16:25.550849 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:16:36 crc kubenswrapper[4971]: I1127 09:16:36.552357 4971 scope.go:117] "RemoveContainer" containerID="6575cb158f547bb22eed9602ec2cad91f3d040d7cbd989460687163c84223d1c" Nov 27 09:16:36 crc kubenswrapper[4971]: E1127 09:16:36.553474 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:16:48 crc kubenswrapper[4971]: I1127 09:16:48.556575 4971 scope.go:117] "RemoveContainer" containerID="6575cb158f547bb22eed9602ec2cad91f3d040d7cbd989460687163c84223d1c" Nov 27 09:16:48 crc kubenswrapper[4971]: E1127 09:16:48.558045 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:17:02 crc kubenswrapper[4971]: I1127 09:17:02.373165 4971 generic.go:334] "Generic (PLEG): container finished" podID="978c7516-1e2e-4d58-a4da-8d85ec47dcd9" containerID="afe5cc894afa78799c4684d43bb79dfeb3ce99d9138fbdfe1b12148259b7d27d" exitCode=0 Nov 27 09:17:02 crc kubenswrapper[4971]: I1127 09:17:02.373256 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-qbnqs" event={"ID":"978c7516-1e2e-4d58-a4da-8d85ec47dcd9","Type":"ContainerDied","Data":"afe5cc894afa78799c4684d43bb79dfeb3ce99d9138fbdfe1b12148259b7d27d"} Nov 27 09:17:02 crc kubenswrapper[4971]: I1127 09:17:02.558923 4971 scope.go:117] "RemoveContainer" containerID="6575cb158f547bb22eed9602ec2cad91f3d040d7cbd989460687163c84223d1c" Nov 27 09:17:02 crc kubenswrapper[4971]: E1127 09:17:02.559298 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:17:03 crc kubenswrapper[4971]: I1127 09:17:03.825713 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-qbnqs" Nov 27 09:17:03 crc kubenswrapper[4971]: I1127 09:17:03.894387 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/978c7516-1e2e-4d58-a4da-8d85ec47dcd9-bootstrap-combined-ca-bundle\") pod \"978c7516-1e2e-4d58-a4da-8d85ec47dcd9\" (UID: \"978c7516-1e2e-4d58-a4da-8d85ec47dcd9\") " Nov 27 09:17:03 crc kubenswrapper[4971]: I1127 09:17:03.894547 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/978c7516-1e2e-4d58-a4da-8d85ec47dcd9-ceph\") pod \"978c7516-1e2e-4d58-a4da-8d85ec47dcd9\" (UID: \"978c7516-1e2e-4d58-a4da-8d85ec47dcd9\") " Nov 27 09:17:03 crc kubenswrapper[4971]: I1127 09:17:03.894581 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/978c7516-1e2e-4d58-a4da-8d85ec47dcd9-ssh-key\") pod \"978c7516-1e2e-4d58-a4da-8d85ec47dcd9\" (UID: \"978c7516-1e2e-4d58-a4da-8d85ec47dcd9\") " Nov 27 09:17:03 crc kubenswrapper[4971]: I1127 09:17:03.894616 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/978c7516-1e2e-4d58-a4da-8d85ec47dcd9-inventory\") pod \"978c7516-1e2e-4d58-a4da-8d85ec47dcd9\" (UID: \"978c7516-1e2e-4d58-a4da-8d85ec47dcd9\") " Nov 27 09:17:03 crc kubenswrapper[4971]: I1127 09:17:03.894687 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r2rr5\" (UniqueName: \"kubernetes.io/projected/978c7516-1e2e-4d58-a4da-8d85ec47dcd9-kube-api-access-r2rr5\") pod \"978c7516-1e2e-4d58-a4da-8d85ec47dcd9\" (UID: \"978c7516-1e2e-4d58-a4da-8d85ec47dcd9\") " Nov 27 09:17:03 crc kubenswrapper[4971]: I1127 09:17:03.900854 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/978c7516-1e2e-4d58-a4da-8d85ec47dcd9-kube-api-access-r2rr5" (OuterVolumeSpecName: "kube-api-access-r2rr5") pod "978c7516-1e2e-4d58-a4da-8d85ec47dcd9" (UID: "978c7516-1e2e-4d58-a4da-8d85ec47dcd9"). InnerVolumeSpecName "kube-api-access-r2rr5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:17:03 crc kubenswrapper[4971]: I1127 09:17:03.902321 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/978c7516-1e2e-4d58-a4da-8d85ec47dcd9-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "978c7516-1e2e-4d58-a4da-8d85ec47dcd9" (UID: "978c7516-1e2e-4d58-a4da-8d85ec47dcd9"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:17:03 crc kubenswrapper[4971]: I1127 09:17:03.905636 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/978c7516-1e2e-4d58-a4da-8d85ec47dcd9-ceph" (OuterVolumeSpecName: "ceph") pod "978c7516-1e2e-4d58-a4da-8d85ec47dcd9" (UID: "978c7516-1e2e-4d58-a4da-8d85ec47dcd9"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:17:03 crc kubenswrapper[4971]: I1127 09:17:03.922938 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/978c7516-1e2e-4d58-a4da-8d85ec47dcd9-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "978c7516-1e2e-4d58-a4da-8d85ec47dcd9" (UID: "978c7516-1e2e-4d58-a4da-8d85ec47dcd9"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:17:03 crc kubenswrapper[4971]: I1127 09:17:03.932640 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/978c7516-1e2e-4d58-a4da-8d85ec47dcd9-inventory" (OuterVolumeSpecName: "inventory") pod "978c7516-1e2e-4d58-a4da-8d85ec47dcd9" (UID: "978c7516-1e2e-4d58-a4da-8d85ec47dcd9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:17:03 crc kubenswrapper[4971]: I1127 09:17:03.997396 4971 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/978c7516-1e2e-4d58-a4da-8d85ec47dcd9-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:17:03 crc kubenswrapper[4971]: I1127 09:17:03.997450 4971 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/978c7516-1e2e-4d58-a4da-8d85ec47dcd9-ceph\") on node \"crc\" DevicePath \"\"" Nov 27 09:17:03 crc kubenswrapper[4971]: I1127 09:17:03.997460 4971 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/978c7516-1e2e-4d58-a4da-8d85ec47dcd9-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 27 09:17:03 crc kubenswrapper[4971]: I1127 09:17:03.997471 4971 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/978c7516-1e2e-4d58-a4da-8d85ec47dcd9-inventory\") on node \"crc\" DevicePath \"\"" Nov 27 09:17:03 crc kubenswrapper[4971]: I1127 09:17:03.997482 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r2rr5\" (UniqueName: \"kubernetes.io/projected/978c7516-1e2e-4d58-a4da-8d85ec47dcd9-kube-api-access-r2rr5\") on node \"crc\" DevicePath \"\"" Nov 27 09:17:04 crc kubenswrapper[4971]: I1127 09:17:04.395734 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-qbnqs" event={"ID":"978c7516-1e2e-4d58-a4da-8d85ec47dcd9","Type":"ContainerDied","Data":"bd19b67067c9bee93c79f576f0975eca70db6148a2347a270e18423921eb1844"} Nov 27 09:17:04 crc kubenswrapper[4971]: I1127 09:17:04.395791 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bd19b67067c9bee93c79f576f0975eca70db6148a2347a270e18423921eb1844" Nov 27 09:17:04 crc kubenswrapper[4971]: I1127 09:17:04.395848 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-qbnqs" Nov 27 09:17:04 crc kubenswrapper[4971]: I1127 09:17:04.491371 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-n7bzc"] Nov 27 09:17:04 crc kubenswrapper[4971]: E1127 09:17:04.492358 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90e15afa-3845-4e37-bd64-c7be4048bb7e" containerName="collect-profiles" Nov 27 09:17:04 crc kubenswrapper[4971]: I1127 09:17:04.492391 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="90e15afa-3845-4e37-bd64-c7be4048bb7e" containerName="collect-profiles" Nov 27 09:17:04 crc kubenswrapper[4971]: E1127 09:17:04.492422 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="978c7516-1e2e-4d58-a4da-8d85ec47dcd9" containerName="bootstrap-openstack-openstack-cell1" Nov 27 09:17:04 crc kubenswrapper[4971]: I1127 09:17:04.492438 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="978c7516-1e2e-4d58-a4da-8d85ec47dcd9" containerName="bootstrap-openstack-openstack-cell1" Nov 27 09:17:04 crc kubenswrapper[4971]: I1127 09:17:04.493090 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="978c7516-1e2e-4d58-a4da-8d85ec47dcd9" containerName="bootstrap-openstack-openstack-cell1" Nov 27 09:17:04 crc kubenswrapper[4971]: I1127 09:17:04.493154 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="90e15afa-3845-4e37-bd64-c7be4048bb7e" containerName="collect-profiles" Nov 27 09:17:04 crc kubenswrapper[4971]: I1127 09:17:04.494324 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-n7bzc" Nov 27 09:17:04 crc kubenswrapper[4971]: I1127 09:17:04.497888 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-jnkbm" Nov 27 09:17:04 crc kubenswrapper[4971]: I1127 09:17:04.497966 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 27 09:17:04 crc kubenswrapper[4971]: I1127 09:17:04.508177 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-n7bzc"] Nov 27 09:17:04 crc kubenswrapper[4971]: I1127 09:17:04.613147 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/48953229-16fe-403a-a919-a6aa7ed6a6bb-inventory\") pod \"download-cache-openstack-openstack-cell1-n7bzc\" (UID: \"48953229-16fe-403a-a919-a6aa7ed6a6bb\") " pod="openstack/download-cache-openstack-openstack-cell1-n7bzc" Nov 27 09:17:04 crc kubenswrapper[4971]: I1127 09:17:04.613723 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/48953229-16fe-403a-a919-a6aa7ed6a6bb-ssh-key\") pod \"download-cache-openstack-openstack-cell1-n7bzc\" (UID: \"48953229-16fe-403a-a919-a6aa7ed6a6bb\") " pod="openstack/download-cache-openstack-openstack-cell1-n7bzc" Nov 27 09:17:04 crc kubenswrapper[4971]: I1127 09:17:04.613981 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l2w2j\" (UniqueName: \"kubernetes.io/projected/48953229-16fe-403a-a919-a6aa7ed6a6bb-kube-api-access-l2w2j\") pod \"download-cache-openstack-openstack-cell1-n7bzc\" (UID: \"48953229-16fe-403a-a919-a6aa7ed6a6bb\") " pod="openstack/download-cache-openstack-openstack-cell1-n7bzc" Nov 27 09:17:04 crc kubenswrapper[4971]: I1127 09:17:04.614033 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/48953229-16fe-403a-a919-a6aa7ed6a6bb-ceph\") pod \"download-cache-openstack-openstack-cell1-n7bzc\" (UID: \"48953229-16fe-403a-a919-a6aa7ed6a6bb\") " pod="openstack/download-cache-openstack-openstack-cell1-n7bzc" Nov 27 09:17:04 crc kubenswrapper[4971]: I1127 09:17:04.716482 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/48953229-16fe-403a-a919-a6aa7ed6a6bb-ssh-key\") pod \"download-cache-openstack-openstack-cell1-n7bzc\" (UID: \"48953229-16fe-403a-a919-a6aa7ed6a6bb\") " pod="openstack/download-cache-openstack-openstack-cell1-n7bzc" Nov 27 09:17:04 crc kubenswrapper[4971]: I1127 09:17:04.716649 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l2w2j\" (UniqueName: \"kubernetes.io/projected/48953229-16fe-403a-a919-a6aa7ed6a6bb-kube-api-access-l2w2j\") pod \"download-cache-openstack-openstack-cell1-n7bzc\" (UID: \"48953229-16fe-403a-a919-a6aa7ed6a6bb\") " pod="openstack/download-cache-openstack-openstack-cell1-n7bzc" Nov 27 09:17:04 crc kubenswrapper[4971]: I1127 09:17:04.716682 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/48953229-16fe-403a-a919-a6aa7ed6a6bb-ceph\") pod \"download-cache-openstack-openstack-cell1-n7bzc\" (UID: \"48953229-16fe-403a-a919-a6aa7ed6a6bb\") " pod="openstack/download-cache-openstack-openstack-cell1-n7bzc" Nov 27 09:17:04 crc kubenswrapper[4971]: I1127 09:17:04.716747 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/48953229-16fe-403a-a919-a6aa7ed6a6bb-inventory\") pod \"download-cache-openstack-openstack-cell1-n7bzc\" (UID: \"48953229-16fe-403a-a919-a6aa7ed6a6bb\") " pod="openstack/download-cache-openstack-openstack-cell1-n7bzc" Nov 27 09:17:04 crc kubenswrapper[4971]: I1127 09:17:04.720931 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/48953229-16fe-403a-a919-a6aa7ed6a6bb-ssh-key\") pod \"download-cache-openstack-openstack-cell1-n7bzc\" (UID: \"48953229-16fe-403a-a919-a6aa7ed6a6bb\") " pod="openstack/download-cache-openstack-openstack-cell1-n7bzc" Nov 27 09:17:04 crc kubenswrapper[4971]: I1127 09:17:04.720968 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/48953229-16fe-403a-a919-a6aa7ed6a6bb-ceph\") pod \"download-cache-openstack-openstack-cell1-n7bzc\" (UID: \"48953229-16fe-403a-a919-a6aa7ed6a6bb\") " pod="openstack/download-cache-openstack-openstack-cell1-n7bzc" Nov 27 09:17:04 crc kubenswrapper[4971]: I1127 09:17:04.721469 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/48953229-16fe-403a-a919-a6aa7ed6a6bb-inventory\") pod \"download-cache-openstack-openstack-cell1-n7bzc\" (UID: \"48953229-16fe-403a-a919-a6aa7ed6a6bb\") " pod="openstack/download-cache-openstack-openstack-cell1-n7bzc" Nov 27 09:17:04 crc kubenswrapper[4971]: I1127 09:17:04.753956 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l2w2j\" (UniqueName: \"kubernetes.io/projected/48953229-16fe-403a-a919-a6aa7ed6a6bb-kube-api-access-l2w2j\") pod \"download-cache-openstack-openstack-cell1-n7bzc\" (UID: \"48953229-16fe-403a-a919-a6aa7ed6a6bb\") " pod="openstack/download-cache-openstack-openstack-cell1-n7bzc" Nov 27 09:17:04 crc kubenswrapper[4971]: I1127 09:17:04.822587 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-n7bzc" Nov 27 09:17:05 crc kubenswrapper[4971]: I1127 09:17:05.439392 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-n7bzc"] Nov 27 09:17:06 crc kubenswrapper[4971]: I1127 09:17:06.424468 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-n7bzc" event={"ID":"48953229-16fe-403a-a919-a6aa7ed6a6bb","Type":"ContainerStarted","Data":"644d6eb848e0758edcbdfc276699ef796a3e7ce9f8eeb0bd6eef30b518c4c26f"} Nov 27 09:17:06 crc kubenswrapper[4971]: I1127 09:17:06.424882 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-n7bzc" event={"ID":"48953229-16fe-403a-a919-a6aa7ed6a6bb","Type":"ContainerStarted","Data":"a6ad135e7f1c2445e6acbc0fccb22504a9ed43243ad1d7c1ffde28cb84b0864b"} Nov 27 09:17:06 crc kubenswrapper[4971]: I1127 09:17:06.508821 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-openstack-openstack-cell1-n7bzc" podStartSLOduration=1.879657211 podStartE2EDuration="2.508788857s" podCreationTimestamp="2025-11-27 09:17:04 +0000 UTC" firstStartedPulling="2025-11-27 09:17:05.450976268 +0000 UTC m=+8663.643020186" lastFinishedPulling="2025-11-27 09:17:06.080107914 +0000 UTC m=+8664.272151832" observedRunningTime="2025-11-27 09:17:06.498283158 +0000 UTC m=+8664.690327076" watchObservedRunningTime="2025-11-27 09:17:06.508788857 +0000 UTC m=+8664.700832775" Nov 27 09:17:10 crc kubenswrapper[4971]: I1127 09:17:10.471221 4971 generic.go:334] "Generic (PLEG): container finished" podID="370dcf34-1b59-47fb-9dbc-92e31c352718" containerID="cb15287e98cdfb23ec7b85c742532a744b23b0aa6a1f06924c7ed96de9364360" exitCode=0 Nov 27 09:17:10 crc kubenswrapper[4971]: I1127 09:17:10.471294 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-networker-l9thw" event={"ID":"370dcf34-1b59-47fb-9dbc-92e31c352718","Type":"ContainerDied","Data":"cb15287e98cdfb23ec7b85c742532a744b23b0aa6a1f06924c7ed96de9364360"} Nov 27 09:17:12 crc kubenswrapper[4971]: I1127 09:17:12.026436 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-networker-l9thw" Nov 27 09:17:12 crc kubenswrapper[4971]: I1127 09:17:12.144811 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/370dcf34-1b59-47fb-9dbc-92e31c352718-inventory\") pod \"370dcf34-1b59-47fb-9dbc-92e31c352718\" (UID: \"370dcf34-1b59-47fb-9dbc-92e31c352718\") " Nov 27 09:17:12 crc kubenswrapper[4971]: I1127 09:17:12.144970 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/370dcf34-1b59-47fb-9dbc-92e31c352718-bootstrap-combined-ca-bundle\") pod \"370dcf34-1b59-47fb-9dbc-92e31c352718\" (UID: \"370dcf34-1b59-47fb-9dbc-92e31c352718\") " Nov 27 09:17:12 crc kubenswrapper[4971]: I1127 09:17:12.145217 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ktwkv\" (UniqueName: \"kubernetes.io/projected/370dcf34-1b59-47fb-9dbc-92e31c352718-kube-api-access-ktwkv\") pod \"370dcf34-1b59-47fb-9dbc-92e31c352718\" (UID: \"370dcf34-1b59-47fb-9dbc-92e31c352718\") " Nov 27 09:17:12 crc kubenswrapper[4971]: I1127 09:17:12.145328 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/370dcf34-1b59-47fb-9dbc-92e31c352718-ssh-key\") pod \"370dcf34-1b59-47fb-9dbc-92e31c352718\" (UID: \"370dcf34-1b59-47fb-9dbc-92e31c352718\") " Nov 27 09:17:12 crc kubenswrapper[4971]: I1127 09:17:12.153704 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/370dcf34-1b59-47fb-9dbc-92e31c352718-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "370dcf34-1b59-47fb-9dbc-92e31c352718" (UID: "370dcf34-1b59-47fb-9dbc-92e31c352718"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:17:12 crc kubenswrapper[4971]: I1127 09:17:12.156806 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/370dcf34-1b59-47fb-9dbc-92e31c352718-kube-api-access-ktwkv" (OuterVolumeSpecName: "kube-api-access-ktwkv") pod "370dcf34-1b59-47fb-9dbc-92e31c352718" (UID: "370dcf34-1b59-47fb-9dbc-92e31c352718"). InnerVolumeSpecName "kube-api-access-ktwkv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:17:12 crc kubenswrapper[4971]: I1127 09:17:12.218450 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/370dcf34-1b59-47fb-9dbc-92e31c352718-inventory" (OuterVolumeSpecName: "inventory") pod "370dcf34-1b59-47fb-9dbc-92e31c352718" (UID: "370dcf34-1b59-47fb-9dbc-92e31c352718"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:17:12 crc kubenswrapper[4971]: I1127 09:17:12.234890 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/370dcf34-1b59-47fb-9dbc-92e31c352718-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "370dcf34-1b59-47fb-9dbc-92e31c352718" (UID: "370dcf34-1b59-47fb-9dbc-92e31c352718"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:17:12 crc kubenswrapper[4971]: I1127 09:17:12.249364 4971 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/370dcf34-1b59-47fb-9dbc-92e31c352718-inventory\") on node \"crc\" DevicePath \"\"" Nov 27 09:17:12 crc kubenswrapper[4971]: I1127 09:17:12.249448 4971 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/370dcf34-1b59-47fb-9dbc-92e31c352718-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:17:12 crc kubenswrapper[4971]: I1127 09:17:12.249471 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ktwkv\" (UniqueName: \"kubernetes.io/projected/370dcf34-1b59-47fb-9dbc-92e31c352718-kube-api-access-ktwkv\") on node \"crc\" DevicePath \"\"" Nov 27 09:17:12 crc kubenswrapper[4971]: I1127 09:17:12.249483 4971 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/370dcf34-1b59-47fb-9dbc-92e31c352718-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 27 09:17:12 crc kubenswrapper[4971]: I1127 09:17:12.493304 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-networker-l9thw" event={"ID":"370dcf34-1b59-47fb-9dbc-92e31c352718","Type":"ContainerDied","Data":"e03ba5e4a0681547dfe692a26829633f2fa1d8a1ebeedf61a77e8fe6e35adf47"} Nov 27 09:17:12 crc kubenswrapper[4971]: I1127 09:17:12.493371 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e03ba5e4a0681547dfe692a26829633f2fa1d8a1ebeedf61a77e8fe6e35adf47" Nov 27 09:17:12 crc kubenswrapper[4971]: I1127 09:17:12.493473 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-networker-l9thw" Nov 27 09:17:12 crc kubenswrapper[4971]: I1127 09:17:12.583269 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-openstack-openstack-networker-pbgpj"] Nov 27 09:17:12 crc kubenswrapper[4971]: E1127 09:17:12.583889 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="370dcf34-1b59-47fb-9dbc-92e31c352718" containerName="bootstrap-openstack-openstack-networker" Nov 27 09:17:12 crc kubenswrapper[4971]: I1127 09:17:12.583909 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="370dcf34-1b59-47fb-9dbc-92e31c352718" containerName="bootstrap-openstack-openstack-networker" Nov 27 09:17:12 crc kubenswrapper[4971]: I1127 09:17:12.584137 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="370dcf34-1b59-47fb-9dbc-92e31c352718" containerName="bootstrap-openstack-openstack-networker" Nov 27 09:17:12 crc kubenswrapper[4971]: I1127 09:17:12.585140 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-networker-pbgpj" Nov 27 09:17:12 crc kubenswrapper[4971]: I1127 09:17:12.590943 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-networker" Nov 27 09:17:12 crc kubenswrapper[4971]: I1127 09:17:12.591680 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-networker-dockercfg-hbhvx" Nov 27 09:17:12 crc kubenswrapper[4971]: I1127 09:17:12.599504 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-openstack-openstack-networker-pbgpj"] Nov 27 09:17:12 crc kubenswrapper[4971]: I1127 09:17:12.660947 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thhp4\" (UniqueName: \"kubernetes.io/projected/6c999c8f-a99f-49ec-9899-602fb664ef82-kube-api-access-thhp4\") pod \"download-cache-openstack-openstack-networker-pbgpj\" (UID: \"6c999c8f-a99f-49ec-9899-602fb664ef82\") " pod="openstack/download-cache-openstack-openstack-networker-pbgpj" Nov 27 09:17:12 crc kubenswrapper[4971]: I1127 09:17:12.661172 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6c999c8f-a99f-49ec-9899-602fb664ef82-inventory\") pod \"download-cache-openstack-openstack-networker-pbgpj\" (UID: \"6c999c8f-a99f-49ec-9899-602fb664ef82\") " pod="openstack/download-cache-openstack-openstack-networker-pbgpj" Nov 27 09:17:12 crc kubenswrapper[4971]: I1127 09:17:12.661521 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6c999c8f-a99f-49ec-9899-602fb664ef82-ssh-key\") pod \"download-cache-openstack-openstack-networker-pbgpj\" (UID: \"6c999c8f-a99f-49ec-9899-602fb664ef82\") " pod="openstack/download-cache-openstack-openstack-networker-pbgpj" Nov 27 09:17:12 crc kubenswrapper[4971]: I1127 09:17:12.764326 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6c999c8f-a99f-49ec-9899-602fb664ef82-inventory\") pod \"download-cache-openstack-openstack-networker-pbgpj\" (UID: \"6c999c8f-a99f-49ec-9899-602fb664ef82\") " pod="openstack/download-cache-openstack-openstack-networker-pbgpj" Nov 27 09:17:12 crc kubenswrapper[4971]: I1127 09:17:12.764459 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6c999c8f-a99f-49ec-9899-602fb664ef82-ssh-key\") pod \"download-cache-openstack-openstack-networker-pbgpj\" (UID: \"6c999c8f-a99f-49ec-9899-602fb664ef82\") " pod="openstack/download-cache-openstack-openstack-networker-pbgpj" Nov 27 09:17:12 crc kubenswrapper[4971]: I1127 09:17:12.764501 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thhp4\" (UniqueName: \"kubernetes.io/projected/6c999c8f-a99f-49ec-9899-602fb664ef82-kube-api-access-thhp4\") pod \"download-cache-openstack-openstack-networker-pbgpj\" (UID: \"6c999c8f-a99f-49ec-9899-602fb664ef82\") " pod="openstack/download-cache-openstack-openstack-networker-pbgpj" Nov 27 09:17:12 crc kubenswrapper[4971]: I1127 09:17:12.771396 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6c999c8f-a99f-49ec-9899-602fb664ef82-ssh-key\") pod \"download-cache-openstack-openstack-networker-pbgpj\" (UID: \"6c999c8f-a99f-49ec-9899-602fb664ef82\") " pod="openstack/download-cache-openstack-openstack-networker-pbgpj" Nov 27 09:17:12 crc kubenswrapper[4971]: I1127 09:17:12.773214 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6c999c8f-a99f-49ec-9899-602fb664ef82-inventory\") pod \"download-cache-openstack-openstack-networker-pbgpj\" (UID: \"6c999c8f-a99f-49ec-9899-602fb664ef82\") " pod="openstack/download-cache-openstack-openstack-networker-pbgpj" Nov 27 09:17:12 crc kubenswrapper[4971]: I1127 09:17:12.790274 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thhp4\" (UniqueName: \"kubernetes.io/projected/6c999c8f-a99f-49ec-9899-602fb664ef82-kube-api-access-thhp4\") pod \"download-cache-openstack-openstack-networker-pbgpj\" (UID: \"6c999c8f-a99f-49ec-9899-602fb664ef82\") " pod="openstack/download-cache-openstack-openstack-networker-pbgpj" Nov 27 09:17:12 crc kubenswrapper[4971]: I1127 09:17:12.910126 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-networker-pbgpj" Nov 27 09:17:13 crc kubenswrapper[4971]: I1127 09:17:13.359605 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-openstack-openstack-networker-pbgpj"] Nov 27 09:17:13 crc kubenswrapper[4971]: I1127 09:17:13.506887 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-networker-pbgpj" event={"ID":"6c999c8f-a99f-49ec-9899-602fb664ef82","Type":"ContainerStarted","Data":"976a966de24c01e1bd8609a6bd1d592797e47e9740c7fdccdde5c134c6227482"} Nov 27 09:17:13 crc kubenswrapper[4971]: I1127 09:17:13.551931 4971 scope.go:117] "RemoveContainer" containerID="6575cb158f547bb22eed9602ec2cad91f3d040d7cbd989460687163c84223d1c" Nov 27 09:17:13 crc kubenswrapper[4971]: E1127 09:17:13.552279 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:17:14 crc kubenswrapper[4971]: I1127 09:17:14.522144 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-networker-pbgpj" event={"ID":"6c999c8f-a99f-49ec-9899-602fb664ef82","Type":"ContainerStarted","Data":"803be98a2789f7b19701a7781dcf32d81613e90b7e37c6e701e7487a059a51dc"} Nov 27 09:17:14 crc kubenswrapper[4971]: I1127 09:17:14.550769 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-openstack-openstack-networker-pbgpj" podStartSLOduration=1.8408194340000001 podStartE2EDuration="2.550748394s" podCreationTimestamp="2025-11-27 09:17:12 +0000 UTC" firstStartedPulling="2025-11-27 09:17:13.370058765 +0000 UTC m=+8671.562102683" lastFinishedPulling="2025-11-27 09:17:14.079987725 +0000 UTC m=+8672.272031643" observedRunningTime="2025-11-27 09:17:14.545458424 +0000 UTC m=+8672.737502362" watchObservedRunningTime="2025-11-27 09:17:14.550748394 +0000 UTC m=+8672.742792312" Nov 27 09:17:27 crc kubenswrapper[4971]: I1127 09:17:27.551321 4971 scope.go:117] "RemoveContainer" containerID="6575cb158f547bb22eed9602ec2cad91f3d040d7cbd989460687163c84223d1c" Nov 27 09:17:27 crc kubenswrapper[4971]: E1127 09:17:27.552572 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:17:42 crc kubenswrapper[4971]: I1127 09:17:42.556744 4971 scope.go:117] "RemoveContainer" containerID="6575cb158f547bb22eed9602ec2cad91f3d040d7cbd989460687163c84223d1c" Nov 27 09:17:42 crc kubenswrapper[4971]: E1127 09:17:42.557925 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:17:55 crc kubenswrapper[4971]: I1127 09:17:55.551428 4971 scope.go:117] "RemoveContainer" containerID="6575cb158f547bb22eed9602ec2cad91f3d040d7cbd989460687163c84223d1c" Nov 27 09:17:55 crc kubenswrapper[4971]: E1127 09:17:55.552824 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:18:08 crc kubenswrapper[4971]: I1127 09:18:08.551943 4971 scope.go:117] "RemoveContainer" containerID="6575cb158f547bb22eed9602ec2cad91f3d040d7cbd989460687163c84223d1c" Nov 27 09:18:08 crc kubenswrapper[4971]: E1127 09:18:08.552898 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:18:19 crc kubenswrapper[4971]: I1127 09:18:19.170618 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-tmbf5"] Nov 27 09:18:19 crc kubenswrapper[4971]: I1127 09:18:19.173364 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tmbf5" Nov 27 09:18:19 crc kubenswrapper[4971]: I1127 09:18:19.191311 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tmbf5"] Nov 27 09:18:19 crc kubenswrapper[4971]: I1127 09:18:19.271358 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f91433f-83e0-4cb5-8556-02e268c1f9d2-catalog-content\") pod \"redhat-marketplace-tmbf5\" (UID: \"4f91433f-83e0-4cb5-8556-02e268c1f9d2\") " pod="openshift-marketplace/redhat-marketplace-tmbf5" Nov 27 09:18:19 crc kubenswrapper[4971]: I1127 09:18:19.271458 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f91433f-83e0-4cb5-8556-02e268c1f9d2-utilities\") pod \"redhat-marketplace-tmbf5\" (UID: \"4f91433f-83e0-4cb5-8556-02e268c1f9d2\") " pod="openshift-marketplace/redhat-marketplace-tmbf5" Nov 27 09:18:19 crc kubenswrapper[4971]: I1127 09:18:19.272107 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cmd89\" (UniqueName: \"kubernetes.io/projected/4f91433f-83e0-4cb5-8556-02e268c1f9d2-kube-api-access-cmd89\") pod \"redhat-marketplace-tmbf5\" (UID: \"4f91433f-83e0-4cb5-8556-02e268c1f9d2\") " pod="openshift-marketplace/redhat-marketplace-tmbf5" Nov 27 09:18:19 crc kubenswrapper[4971]: I1127 09:18:19.374722 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f91433f-83e0-4cb5-8556-02e268c1f9d2-catalog-content\") pod \"redhat-marketplace-tmbf5\" (UID: \"4f91433f-83e0-4cb5-8556-02e268c1f9d2\") " pod="openshift-marketplace/redhat-marketplace-tmbf5" Nov 27 09:18:19 crc kubenswrapper[4971]: I1127 09:18:19.374806 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f91433f-83e0-4cb5-8556-02e268c1f9d2-utilities\") pod \"redhat-marketplace-tmbf5\" (UID: \"4f91433f-83e0-4cb5-8556-02e268c1f9d2\") " pod="openshift-marketplace/redhat-marketplace-tmbf5" Nov 27 09:18:19 crc kubenswrapper[4971]: I1127 09:18:19.374937 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cmd89\" (UniqueName: \"kubernetes.io/projected/4f91433f-83e0-4cb5-8556-02e268c1f9d2-kube-api-access-cmd89\") pod \"redhat-marketplace-tmbf5\" (UID: \"4f91433f-83e0-4cb5-8556-02e268c1f9d2\") " pod="openshift-marketplace/redhat-marketplace-tmbf5" Nov 27 09:18:19 crc kubenswrapper[4971]: I1127 09:18:19.375325 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f91433f-83e0-4cb5-8556-02e268c1f9d2-catalog-content\") pod \"redhat-marketplace-tmbf5\" (UID: \"4f91433f-83e0-4cb5-8556-02e268c1f9d2\") " pod="openshift-marketplace/redhat-marketplace-tmbf5" Nov 27 09:18:19 crc kubenswrapper[4971]: I1127 09:18:19.375345 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f91433f-83e0-4cb5-8556-02e268c1f9d2-utilities\") pod \"redhat-marketplace-tmbf5\" (UID: \"4f91433f-83e0-4cb5-8556-02e268c1f9d2\") " pod="openshift-marketplace/redhat-marketplace-tmbf5" Nov 27 09:18:19 crc kubenswrapper[4971]: I1127 09:18:19.404959 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cmd89\" (UniqueName: \"kubernetes.io/projected/4f91433f-83e0-4cb5-8556-02e268c1f9d2-kube-api-access-cmd89\") pod \"redhat-marketplace-tmbf5\" (UID: \"4f91433f-83e0-4cb5-8556-02e268c1f9d2\") " pod="openshift-marketplace/redhat-marketplace-tmbf5" Nov 27 09:18:19 crc kubenswrapper[4971]: I1127 09:18:19.527803 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tmbf5" Nov 27 09:18:20 crc kubenswrapper[4971]: I1127 09:18:20.020971 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tmbf5"] Nov 27 09:18:20 crc kubenswrapper[4971]: I1127 09:18:20.263959 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tmbf5" event={"ID":"4f91433f-83e0-4cb5-8556-02e268c1f9d2","Type":"ContainerStarted","Data":"4e6b364949c4cd6d949163527f809f88f091c436d6c5a2936b81fcca3e576466"} Nov 27 09:18:20 crc kubenswrapper[4971]: I1127 09:18:20.264029 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tmbf5" event={"ID":"4f91433f-83e0-4cb5-8556-02e268c1f9d2","Type":"ContainerStarted","Data":"77519f55dcb71862f344f82eb198bc7f7519f7361fed4d1714002e9ef623b9a8"} Nov 27 09:18:21 crc kubenswrapper[4971]: I1127 09:18:21.280004 4971 generic.go:334] "Generic (PLEG): container finished" podID="4f91433f-83e0-4cb5-8556-02e268c1f9d2" containerID="4e6b364949c4cd6d949163527f809f88f091c436d6c5a2936b81fcca3e576466" exitCode=0 Nov 27 09:18:21 crc kubenswrapper[4971]: I1127 09:18:21.280192 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tmbf5" event={"ID":"4f91433f-83e0-4cb5-8556-02e268c1f9d2","Type":"ContainerDied","Data":"4e6b364949c4cd6d949163527f809f88f091c436d6c5a2936b81fcca3e576466"} Nov 27 09:18:22 crc kubenswrapper[4971]: I1127 09:18:22.291931 4971 generic.go:334] "Generic (PLEG): container finished" podID="4f91433f-83e0-4cb5-8556-02e268c1f9d2" containerID="3e0ee92c075f7f7da5244828bef08356e2d7d4ef1f698f029fcff3ed5eb69abc" exitCode=0 Nov 27 09:18:22 crc kubenswrapper[4971]: I1127 09:18:22.291965 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tmbf5" event={"ID":"4f91433f-83e0-4cb5-8556-02e268c1f9d2","Type":"ContainerDied","Data":"3e0ee92c075f7f7da5244828bef08356e2d7d4ef1f698f029fcff3ed5eb69abc"} Nov 27 09:18:22 crc kubenswrapper[4971]: I1127 09:18:22.560421 4971 scope.go:117] "RemoveContainer" containerID="6575cb158f547bb22eed9602ec2cad91f3d040d7cbd989460687163c84223d1c" Nov 27 09:18:22 crc kubenswrapper[4971]: E1127 09:18:22.560952 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:18:24 crc kubenswrapper[4971]: I1127 09:18:24.316691 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tmbf5" event={"ID":"4f91433f-83e0-4cb5-8556-02e268c1f9d2","Type":"ContainerStarted","Data":"7671231c1e8ec06b1c6c49ef46afd831a820b0dbe14ebea41da5b58e0b2136a4"} Nov 27 09:18:24 crc kubenswrapper[4971]: I1127 09:18:24.349149 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-tmbf5" podStartSLOduration=2.188987274 podStartE2EDuration="5.349109946s" podCreationTimestamp="2025-11-27 09:18:19 +0000 UTC" firstStartedPulling="2025-11-27 09:18:20.265642983 +0000 UTC m=+8738.457686901" lastFinishedPulling="2025-11-27 09:18:23.425765655 +0000 UTC m=+8741.617809573" observedRunningTime="2025-11-27 09:18:24.342124138 +0000 UTC m=+8742.534168066" watchObservedRunningTime="2025-11-27 09:18:24.349109946 +0000 UTC m=+8742.541153864" Nov 27 09:18:25 crc kubenswrapper[4971]: I1127 09:18:25.326855 4971 generic.go:334] "Generic (PLEG): container finished" podID="6c999c8f-a99f-49ec-9899-602fb664ef82" containerID="803be98a2789f7b19701a7781dcf32d81613e90b7e37c6e701e7487a059a51dc" exitCode=0 Nov 27 09:18:25 crc kubenswrapper[4971]: I1127 09:18:25.326945 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-networker-pbgpj" event={"ID":"6c999c8f-a99f-49ec-9899-602fb664ef82","Type":"ContainerDied","Data":"803be98a2789f7b19701a7781dcf32d81613e90b7e37c6e701e7487a059a51dc"} Nov 27 09:18:26 crc kubenswrapper[4971]: I1127 09:18:26.987552 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-networker-pbgpj" Nov 27 09:18:27 crc kubenswrapper[4971]: I1127 09:18:27.157959 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6c999c8f-a99f-49ec-9899-602fb664ef82-inventory\") pod \"6c999c8f-a99f-49ec-9899-602fb664ef82\" (UID: \"6c999c8f-a99f-49ec-9899-602fb664ef82\") " Nov 27 09:18:27 crc kubenswrapper[4971]: I1127 09:18:27.158089 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-thhp4\" (UniqueName: \"kubernetes.io/projected/6c999c8f-a99f-49ec-9899-602fb664ef82-kube-api-access-thhp4\") pod \"6c999c8f-a99f-49ec-9899-602fb664ef82\" (UID: \"6c999c8f-a99f-49ec-9899-602fb664ef82\") " Nov 27 09:18:27 crc kubenswrapper[4971]: I1127 09:18:27.158215 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6c999c8f-a99f-49ec-9899-602fb664ef82-ssh-key\") pod \"6c999c8f-a99f-49ec-9899-602fb664ef82\" (UID: \"6c999c8f-a99f-49ec-9899-602fb664ef82\") " Nov 27 09:18:27 crc kubenswrapper[4971]: I1127 09:18:27.164170 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c999c8f-a99f-49ec-9899-602fb664ef82-kube-api-access-thhp4" (OuterVolumeSpecName: "kube-api-access-thhp4") pod "6c999c8f-a99f-49ec-9899-602fb664ef82" (UID: "6c999c8f-a99f-49ec-9899-602fb664ef82"). InnerVolumeSpecName "kube-api-access-thhp4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:18:27 crc kubenswrapper[4971]: I1127 09:18:27.187052 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c999c8f-a99f-49ec-9899-602fb664ef82-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6c999c8f-a99f-49ec-9899-602fb664ef82" (UID: "6c999c8f-a99f-49ec-9899-602fb664ef82"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:18:27 crc kubenswrapper[4971]: I1127 09:18:27.197209 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c999c8f-a99f-49ec-9899-602fb664ef82-inventory" (OuterVolumeSpecName: "inventory") pod "6c999c8f-a99f-49ec-9899-602fb664ef82" (UID: "6c999c8f-a99f-49ec-9899-602fb664ef82"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:18:27 crc kubenswrapper[4971]: I1127 09:18:27.261047 4971 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6c999c8f-a99f-49ec-9899-602fb664ef82-inventory\") on node \"crc\" DevicePath \"\"" Nov 27 09:18:27 crc kubenswrapper[4971]: I1127 09:18:27.261099 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-thhp4\" (UniqueName: \"kubernetes.io/projected/6c999c8f-a99f-49ec-9899-602fb664ef82-kube-api-access-thhp4\") on node \"crc\" DevicePath \"\"" Nov 27 09:18:27 crc kubenswrapper[4971]: I1127 09:18:27.261111 4971 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6c999c8f-a99f-49ec-9899-602fb664ef82-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 27 09:18:27 crc kubenswrapper[4971]: I1127 09:18:27.347994 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-networker-pbgpj" event={"ID":"6c999c8f-a99f-49ec-9899-602fb664ef82","Type":"ContainerDied","Data":"976a966de24c01e1bd8609a6bd1d592797e47e9740c7fdccdde5c134c6227482"} Nov 27 09:18:27 crc kubenswrapper[4971]: I1127 09:18:27.348051 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="976a966de24c01e1bd8609a6bd1d592797e47e9740c7fdccdde5c134c6227482" Nov 27 09:18:27 crc kubenswrapper[4971]: I1127 09:18:27.348094 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-networker-pbgpj" Nov 27 09:18:27 crc kubenswrapper[4971]: I1127 09:18:27.457292 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-openstack-openstack-networker-m6k84"] Nov 27 09:18:27 crc kubenswrapper[4971]: E1127 09:18:27.457916 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c999c8f-a99f-49ec-9899-602fb664ef82" containerName="download-cache-openstack-openstack-networker" Nov 27 09:18:27 crc kubenswrapper[4971]: I1127 09:18:27.457940 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c999c8f-a99f-49ec-9899-602fb664ef82" containerName="download-cache-openstack-openstack-networker" Nov 27 09:18:27 crc kubenswrapper[4971]: I1127 09:18:27.458200 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c999c8f-a99f-49ec-9899-602fb664ef82" containerName="download-cache-openstack-openstack-networker" Nov 27 09:18:27 crc kubenswrapper[4971]: I1127 09:18:27.460865 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-networker-m6k84" Nov 27 09:18:27 crc kubenswrapper[4971]: I1127 09:18:27.464711 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-networker-dockercfg-hbhvx" Nov 27 09:18:27 crc kubenswrapper[4971]: I1127 09:18:27.465093 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-networker" Nov 27 09:18:27 crc kubenswrapper[4971]: I1127 09:18:27.494646 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-openstack-openstack-networker-m6k84"] Nov 27 09:18:27 crc kubenswrapper[4971]: I1127 09:18:27.567584 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8j2sg\" (UniqueName: \"kubernetes.io/projected/df91b509-08ef-4038-aaf4-b200f15daad2-kube-api-access-8j2sg\") pod \"configure-network-openstack-openstack-networker-m6k84\" (UID: \"df91b509-08ef-4038-aaf4-b200f15daad2\") " pod="openstack/configure-network-openstack-openstack-networker-m6k84" Nov 27 09:18:27 crc kubenswrapper[4971]: I1127 09:18:27.567692 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/df91b509-08ef-4038-aaf4-b200f15daad2-inventory\") pod \"configure-network-openstack-openstack-networker-m6k84\" (UID: \"df91b509-08ef-4038-aaf4-b200f15daad2\") " pod="openstack/configure-network-openstack-openstack-networker-m6k84" Nov 27 09:18:27 crc kubenswrapper[4971]: I1127 09:18:27.567736 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/df91b509-08ef-4038-aaf4-b200f15daad2-ssh-key\") pod \"configure-network-openstack-openstack-networker-m6k84\" (UID: \"df91b509-08ef-4038-aaf4-b200f15daad2\") " pod="openstack/configure-network-openstack-openstack-networker-m6k84" Nov 27 09:18:27 crc kubenswrapper[4971]: I1127 09:18:27.669608 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8j2sg\" (UniqueName: \"kubernetes.io/projected/df91b509-08ef-4038-aaf4-b200f15daad2-kube-api-access-8j2sg\") pod \"configure-network-openstack-openstack-networker-m6k84\" (UID: \"df91b509-08ef-4038-aaf4-b200f15daad2\") " pod="openstack/configure-network-openstack-openstack-networker-m6k84" Nov 27 09:18:27 crc kubenswrapper[4971]: I1127 09:18:27.669796 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/df91b509-08ef-4038-aaf4-b200f15daad2-inventory\") pod \"configure-network-openstack-openstack-networker-m6k84\" (UID: \"df91b509-08ef-4038-aaf4-b200f15daad2\") " pod="openstack/configure-network-openstack-openstack-networker-m6k84" Nov 27 09:18:27 crc kubenswrapper[4971]: I1127 09:18:27.669831 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/df91b509-08ef-4038-aaf4-b200f15daad2-ssh-key\") pod \"configure-network-openstack-openstack-networker-m6k84\" (UID: \"df91b509-08ef-4038-aaf4-b200f15daad2\") " pod="openstack/configure-network-openstack-openstack-networker-m6k84" Nov 27 09:18:27 crc kubenswrapper[4971]: I1127 09:18:27.673821 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/df91b509-08ef-4038-aaf4-b200f15daad2-inventory\") pod \"configure-network-openstack-openstack-networker-m6k84\" (UID: \"df91b509-08ef-4038-aaf4-b200f15daad2\") " pod="openstack/configure-network-openstack-openstack-networker-m6k84" Nov 27 09:18:27 crc kubenswrapper[4971]: I1127 09:18:27.678520 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/df91b509-08ef-4038-aaf4-b200f15daad2-ssh-key\") pod \"configure-network-openstack-openstack-networker-m6k84\" (UID: \"df91b509-08ef-4038-aaf4-b200f15daad2\") " pod="openstack/configure-network-openstack-openstack-networker-m6k84" Nov 27 09:18:27 crc kubenswrapper[4971]: I1127 09:18:27.689347 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8j2sg\" (UniqueName: \"kubernetes.io/projected/df91b509-08ef-4038-aaf4-b200f15daad2-kube-api-access-8j2sg\") pod \"configure-network-openstack-openstack-networker-m6k84\" (UID: \"df91b509-08ef-4038-aaf4-b200f15daad2\") " pod="openstack/configure-network-openstack-openstack-networker-m6k84" Nov 27 09:18:27 crc kubenswrapper[4971]: I1127 09:18:27.787783 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-networker-m6k84" Nov 27 09:18:28 crc kubenswrapper[4971]: I1127 09:18:28.795474 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-openstack-openstack-networker-m6k84"] Nov 27 09:18:29 crc kubenswrapper[4971]: I1127 09:18:29.375571 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-networker-m6k84" event={"ID":"df91b509-08ef-4038-aaf4-b200f15daad2","Type":"ContainerStarted","Data":"faa049502ebc25705faf416418e8faa405339607a6e91e5beae6acc2bd28887d"} Nov 27 09:18:29 crc kubenswrapper[4971]: I1127 09:18:29.528275 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-tmbf5" Nov 27 09:18:29 crc kubenswrapper[4971]: I1127 09:18:29.528659 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-tmbf5" Nov 27 09:18:29 crc kubenswrapper[4971]: I1127 09:18:29.584185 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-tmbf5" Nov 27 09:18:30 crc kubenswrapper[4971]: I1127 09:18:30.390896 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-networker-m6k84" event={"ID":"df91b509-08ef-4038-aaf4-b200f15daad2","Type":"ContainerStarted","Data":"5f20d5e35872c64a6f520a82152d4d803be28ad3379524de095a389e286bb5ae"} Nov 27 09:18:30 crc kubenswrapper[4971]: I1127 09:18:30.423723 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-openstack-openstack-networker-m6k84" podStartSLOduration=2.7412197320000002 podStartE2EDuration="3.423704243s" podCreationTimestamp="2025-11-27 09:18:27 +0000 UTC" firstStartedPulling="2025-11-27 09:18:28.78899613 +0000 UTC m=+8746.981040048" lastFinishedPulling="2025-11-27 09:18:29.471480641 +0000 UTC m=+8747.663524559" observedRunningTime="2025-11-27 09:18:30.409103958 +0000 UTC m=+8748.601147876" watchObservedRunningTime="2025-11-27 09:18:30.423704243 +0000 UTC m=+8748.615748161" Nov 27 09:18:30 crc kubenswrapper[4971]: I1127 09:18:30.495840 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-tmbf5" Nov 27 09:18:30 crc kubenswrapper[4971]: I1127 09:18:30.547799 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tmbf5"] Nov 27 09:18:32 crc kubenswrapper[4971]: I1127 09:18:32.229743 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-cdwpg"] Nov 27 09:18:32 crc kubenswrapper[4971]: I1127 09:18:32.232686 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cdwpg" Nov 27 09:18:32 crc kubenswrapper[4971]: I1127 09:18:32.248251 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cdwpg"] Nov 27 09:18:32 crc kubenswrapper[4971]: I1127 09:18:32.380439 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0621c077-a96d-4425-a32f-d4a879a67814-catalog-content\") pod \"certified-operators-cdwpg\" (UID: \"0621c077-a96d-4425-a32f-d4a879a67814\") " pod="openshift-marketplace/certified-operators-cdwpg" Nov 27 09:18:32 crc kubenswrapper[4971]: I1127 09:18:32.380503 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0621c077-a96d-4425-a32f-d4a879a67814-utilities\") pod \"certified-operators-cdwpg\" (UID: \"0621c077-a96d-4425-a32f-d4a879a67814\") " pod="openshift-marketplace/certified-operators-cdwpg" Nov 27 09:18:32 crc kubenswrapper[4971]: I1127 09:18:32.380564 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtbdv\" (UniqueName: \"kubernetes.io/projected/0621c077-a96d-4425-a32f-d4a879a67814-kube-api-access-mtbdv\") pod \"certified-operators-cdwpg\" (UID: \"0621c077-a96d-4425-a32f-d4a879a67814\") " pod="openshift-marketplace/certified-operators-cdwpg" Nov 27 09:18:32 crc kubenswrapper[4971]: I1127 09:18:32.409753 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-tmbf5" podUID="4f91433f-83e0-4cb5-8556-02e268c1f9d2" containerName="registry-server" containerID="cri-o://7671231c1e8ec06b1c6c49ef46afd831a820b0dbe14ebea41da5b58e0b2136a4" gracePeriod=2 Nov 27 09:18:32 crc kubenswrapper[4971]: I1127 09:18:32.483053 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0621c077-a96d-4425-a32f-d4a879a67814-catalog-content\") pod \"certified-operators-cdwpg\" (UID: \"0621c077-a96d-4425-a32f-d4a879a67814\") " pod="openshift-marketplace/certified-operators-cdwpg" Nov 27 09:18:32 crc kubenswrapper[4971]: I1127 09:18:32.483123 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0621c077-a96d-4425-a32f-d4a879a67814-utilities\") pod \"certified-operators-cdwpg\" (UID: \"0621c077-a96d-4425-a32f-d4a879a67814\") " pod="openshift-marketplace/certified-operators-cdwpg" Nov 27 09:18:32 crc kubenswrapper[4971]: I1127 09:18:32.483162 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtbdv\" (UniqueName: \"kubernetes.io/projected/0621c077-a96d-4425-a32f-d4a879a67814-kube-api-access-mtbdv\") pod \"certified-operators-cdwpg\" (UID: \"0621c077-a96d-4425-a32f-d4a879a67814\") " pod="openshift-marketplace/certified-operators-cdwpg" Nov 27 09:18:32 crc kubenswrapper[4971]: I1127 09:18:32.484392 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0621c077-a96d-4425-a32f-d4a879a67814-catalog-content\") pod \"certified-operators-cdwpg\" (UID: \"0621c077-a96d-4425-a32f-d4a879a67814\") " pod="openshift-marketplace/certified-operators-cdwpg" Nov 27 09:18:32 crc kubenswrapper[4971]: I1127 09:18:32.484445 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0621c077-a96d-4425-a32f-d4a879a67814-utilities\") pod \"certified-operators-cdwpg\" (UID: \"0621c077-a96d-4425-a32f-d4a879a67814\") " pod="openshift-marketplace/certified-operators-cdwpg" Nov 27 09:18:32 crc kubenswrapper[4971]: I1127 09:18:32.510398 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtbdv\" (UniqueName: \"kubernetes.io/projected/0621c077-a96d-4425-a32f-d4a879a67814-kube-api-access-mtbdv\") pod \"certified-operators-cdwpg\" (UID: \"0621c077-a96d-4425-a32f-d4a879a67814\") " pod="openshift-marketplace/certified-operators-cdwpg" Nov 27 09:18:32 crc kubenswrapper[4971]: I1127 09:18:32.557975 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cdwpg" Nov 27 09:18:33 crc kubenswrapper[4971]: I1127 09:18:33.188526 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tmbf5" Nov 27 09:18:33 crc kubenswrapper[4971]: I1127 09:18:33.260210 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cdwpg"] Nov 27 09:18:33 crc kubenswrapper[4971]: W1127 09:18:33.281204 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0621c077_a96d_4425_a32f_d4a879a67814.slice/crio-798f993ccb5debf964f37d1cad756f94b5f329f26122fc6006cbceebda387e40 WatchSource:0}: Error finding container 798f993ccb5debf964f37d1cad756f94b5f329f26122fc6006cbceebda387e40: Status 404 returned error can't find the container with id 798f993ccb5debf964f37d1cad756f94b5f329f26122fc6006cbceebda387e40 Nov 27 09:18:33 crc kubenswrapper[4971]: I1127 09:18:33.309068 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f91433f-83e0-4cb5-8556-02e268c1f9d2-catalog-content\") pod \"4f91433f-83e0-4cb5-8556-02e268c1f9d2\" (UID: \"4f91433f-83e0-4cb5-8556-02e268c1f9d2\") " Nov 27 09:18:33 crc kubenswrapper[4971]: I1127 09:18:33.309137 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f91433f-83e0-4cb5-8556-02e268c1f9d2-utilities\") pod \"4f91433f-83e0-4cb5-8556-02e268c1f9d2\" (UID: \"4f91433f-83e0-4cb5-8556-02e268c1f9d2\") " Nov 27 09:18:33 crc kubenswrapper[4971]: I1127 09:18:33.309264 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cmd89\" (UniqueName: \"kubernetes.io/projected/4f91433f-83e0-4cb5-8556-02e268c1f9d2-kube-api-access-cmd89\") pod \"4f91433f-83e0-4cb5-8556-02e268c1f9d2\" (UID: \"4f91433f-83e0-4cb5-8556-02e268c1f9d2\") " Nov 27 09:18:33 crc kubenswrapper[4971]: I1127 09:18:33.310460 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4f91433f-83e0-4cb5-8556-02e268c1f9d2-utilities" (OuterVolumeSpecName: "utilities") pod "4f91433f-83e0-4cb5-8556-02e268c1f9d2" (UID: "4f91433f-83e0-4cb5-8556-02e268c1f9d2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:18:33 crc kubenswrapper[4971]: I1127 09:18:33.322844 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f91433f-83e0-4cb5-8556-02e268c1f9d2-kube-api-access-cmd89" (OuterVolumeSpecName: "kube-api-access-cmd89") pod "4f91433f-83e0-4cb5-8556-02e268c1f9d2" (UID: "4f91433f-83e0-4cb5-8556-02e268c1f9d2"). InnerVolumeSpecName "kube-api-access-cmd89". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:18:33 crc kubenswrapper[4971]: I1127 09:18:33.331618 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4f91433f-83e0-4cb5-8556-02e268c1f9d2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4f91433f-83e0-4cb5-8556-02e268c1f9d2" (UID: "4f91433f-83e0-4cb5-8556-02e268c1f9d2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:18:33 crc kubenswrapper[4971]: I1127 09:18:33.411860 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f91433f-83e0-4cb5-8556-02e268c1f9d2-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 09:18:33 crc kubenswrapper[4971]: I1127 09:18:33.412348 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f91433f-83e0-4cb5-8556-02e268c1f9d2-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 09:18:33 crc kubenswrapper[4971]: I1127 09:18:33.412363 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cmd89\" (UniqueName: \"kubernetes.io/projected/4f91433f-83e0-4cb5-8556-02e268c1f9d2-kube-api-access-cmd89\") on node \"crc\" DevicePath \"\"" Nov 27 09:18:33 crc kubenswrapper[4971]: I1127 09:18:33.421557 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cdwpg" event={"ID":"0621c077-a96d-4425-a32f-d4a879a67814","Type":"ContainerStarted","Data":"798f993ccb5debf964f37d1cad756f94b5f329f26122fc6006cbceebda387e40"} Nov 27 09:18:33 crc kubenswrapper[4971]: I1127 09:18:33.424548 4971 generic.go:334] "Generic (PLEG): container finished" podID="4f91433f-83e0-4cb5-8556-02e268c1f9d2" containerID="7671231c1e8ec06b1c6c49ef46afd831a820b0dbe14ebea41da5b58e0b2136a4" exitCode=0 Nov 27 09:18:33 crc kubenswrapper[4971]: I1127 09:18:33.424600 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tmbf5" event={"ID":"4f91433f-83e0-4cb5-8556-02e268c1f9d2","Type":"ContainerDied","Data":"7671231c1e8ec06b1c6c49ef46afd831a820b0dbe14ebea41da5b58e0b2136a4"} Nov 27 09:18:33 crc kubenswrapper[4971]: I1127 09:18:33.424639 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tmbf5" event={"ID":"4f91433f-83e0-4cb5-8556-02e268c1f9d2","Type":"ContainerDied","Data":"77519f55dcb71862f344f82eb198bc7f7519f7361fed4d1714002e9ef623b9a8"} Nov 27 09:18:33 crc kubenswrapper[4971]: I1127 09:18:33.424637 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tmbf5" Nov 27 09:18:33 crc kubenswrapper[4971]: I1127 09:18:33.424658 4971 scope.go:117] "RemoveContainer" containerID="7671231c1e8ec06b1c6c49ef46afd831a820b0dbe14ebea41da5b58e0b2136a4" Nov 27 09:18:33 crc kubenswrapper[4971]: I1127 09:18:33.484636 4971 scope.go:117] "RemoveContainer" containerID="3e0ee92c075f7f7da5244828bef08356e2d7d4ef1f698f029fcff3ed5eb69abc" Nov 27 09:18:33 crc kubenswrapper[4971]: I1127 09:18:33.494666 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tmbf5"] Nov 27 09:18:33 crc kubenswrapper[4971]: I1127 09:18:33.508164 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-tmbf5"] Nov 27 09:18:33 crc kubenswrapper[4971]: I1127 09:18:33.520015 4971 scope.go:117] "RemoveContainer" containerID="4e6b364949c4cd6d949163527f809f88f091c436d6c5a2936b81fcca3e576466" Nov 27 09:18:33 crc kubenswrapper[4971]: I1127 09:18:33.649141 4971 scope.go:117] "RemoveContainer" containerID="7671231c1e8ec06b1c6c49ef46afd831a820b0dbe14ebea41da5b58e0b2136a4" Nov 27 09:18:33 crc kubenswrapper[4971]: E1127 09:18:33.649773 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7671231c1e8ec06b1c6c49ef46afd831a820b0dbe14ebea41da5b58e0b2136a4\": container with ID starting with 7671231c1e8ec06b1c6c49ef46afd831a820b0dbe14ebea41da5b58e0b2136a4 not found: ID does not exist" containerID="7671231c1e8ec06b1c6c49ef46afd831a820b0dbe14ebea41da5b58e0b2136a4" Nov 27 09:18:33 crc kubenswrapper[4971]: I1127 09:18:33.649826 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7671231c1e8ec06b1c6c49ef46afd831a820b0dbe14ebea41da5b58e0b2136a4"} err="failed to get container status \"7671231c1e8ec06b1c6c49ef46afd831a820b0dbe14ebea41da5b58e0b2136a4\": rpc error: code = NotFound desc = could not find container \"7671231c1e8ec06b1c6c49ef46afd831a820b0dbe14ebea41da5b58e0b2136a4\": container with ID starting with 7671231c1e8ec06b1c6c49ef46afd831a820b0dbe14ebea41da5b58e0b2136a4 not found: ID does not exist" Nov 27 09:18:33 crc kubenswrapper[4971]: I1127 09:18:33.649857 4971 scope.go:117] "RemoveContainer" containerID="3e0ee92c075f7f7da5244828bef08356e2d7d4ef1f698f029fcff3ed5eb69abc" Nov 27 09:18:33 crc kubenswrapper[4971]: E1127 09:18:33.650265 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e0ee92c075f7f7da5244828bef08356e2d7d4ef1f698f029fcff3ed5eb69abc\": container with ID starting with 3e0ee92c075f7f7da5244828bef08356e2d7d4ef1f698f029fcff3ed5eb69abc not found: ID does not exist" containerID="3e0ee92c075f7f7da5244828bef08356e2d7d4ef1f698f029fcff3ed5eb69abc" Nov 27 09:18:33 crc kubenswrapper[4971]: I1127 09:18:33.650295 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e0ee92c075f7f7da5244828bef08356e2d7d4ef1f698f029fcff3ed5eb69abc"} err="failed to get container status \"3e0ee92c075f7f7da5244828bef08356e2d7d4ef1f698f029fcff3ed5eb69abc\": rpc error: code = NotFound desc = could not find container \"3e0ee92c075f7f7da5244828bef08356e2d7d4ef1f698f029fcff3ed5eb69abc\": container with ID starting with 3e0ee92c075f7f7da5244828bef08356e2d7d4ef1f698f029fcff3ed5eb69abc not found: ID does not exist" Nov 27 09:18:33 crc kubenswrapper[4971]: I1127 09:18:33.650309 4971 scope.go:117] "RemoveContainer" containerID="4e6b364949c4cd6d949163527f809f88f091c436d6c5a2936b81fcca3e576466" Nov 27 09:18:33 crc kubenswrapper[4971]: E1127 09:18:33.650668 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e6b364949c4cd6d949163527f809f88f091c436d6c5a2936b81fcca3e576466\": container with ID starting with 4e6b364949c4cd6d949163527f809f88f091c436d6c5a2936b81fcca3e576466 not found: ID does not exist" containerID="4e6b364949c4cd6d949163527f809f88f091c436d6c5a2936b81fcca3e576466" Nov 27 09:18:33 crc kubenswrapper[4971]: I1127 09:18:33.650714 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e6b364949c4cd6d949163527f809f88f091c436d6c5a2936b81fcca3e576466"} err="failed to get container status \"4e6b364949c4cd6d949163527f809f88f091c436d6c5a2936b81fcca3e576466\": rpc error: code = NotFound desc = could not find container \"4e6b364949c4cd6d949163527f809f88f091c436d6c5a2936b81fcca3e576466\": container with ID starting with 4e6b364949c4cd6d949163527f809f88f091c436d6c5a2936b81fcca3e576466 not found: ID does not exist" Nov 27 09:18:34 crc kubenswrapper[4971]: I1127 09:18:34.436064 4971 generic.go:334] "Generic (PLEG): container finished" podID="0621c077-a96d-4425-a32f-d4a879a67814" containerID="6949a7d201ee578f57f11413e8d7599e2b33c4a2fd34a8da7409fbe734114a6c" exitCode=0 Nov 27 09:18:34 crc kubenswrapper[4971]: I1127 09:18:34.436180 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cdwpg" event={"ID":"0621c077-a96d-4425-a32f-d4a879a67814","Type":"ContainerDied","Data":"6949a7d201ee578f57f11413e8d7599e2b33c4a2fd34a8da7409fbe734114a6c"} Nov 27 09:18:34 crc kubenswrapper[4971]: I1127 09:18:34.574628 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f91433f-83e0-4cb5-8556-02e268c1f9d2" path="/var/lib/kubelet/pods/4f91433f-83e0-4cb5-8556-02e268c1f9d2/volumes" Nov 27 09:18:35 crc kubenswrapper[4971]: I1127 09:18:35.556197 4971 scope.go:117] "RemoveContainer" containerID="6575cb158f547bb22eed9602ec2cad91f3d040d7cbd989460687163c84223d1c" Nov 27 09:18:36 crc kubenswrapper[4971]: I1127 09:18:36.467177 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"74942ef67bc6f0078a96e89348238bb86066d83521cb4e5856153cafd601817d"} Nov 27 09:18:41 crc kubenswrapper[4971]: I1127 09:18:41.563445 4971 generic.go:334] "Generic (PLEG): container finished" podID="48953229-16fe-403a-a919-a6aa7ed6a6bb" containerID="644d6eb848e0758edcbdfc276699ef796a3e7ce9f8eeb0bd6eef30b518c4c26f" exitCode=0 Nov 27 09:18:41 crc kubenswrapper[4971]: I1127 09:18:41.563555 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-n7bzc" event={"ID":"48953229-16fe-403a-a919-a6aa7ed6a6bb","Type":"ContainerDied","Data":"644d6eb848e0758edcbdfc276699ef796a3e7ce9f8eeb0bd6eef30b518c4c26f"} Nov 27 09:18:41 crc kubenswrapper[4971]: I1127 09:18:41.567602 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cdwpg" event={"ID":"0621c077-a96d-4425-a32f-d4a879a67814","Type":"ContainerStarted","Data":"fa09c4121b944be5ecc424054046cccf3c158484c40d783406eb973afc58f586"} Nov 27 09:18:42 crc kubenswrapper[4971]: I1127 09:18:42.579919 4971 generic.go:334] "Generic (PLEG): container finished" podID="0621c077-a96d-4425-a32f-d4a879a67814" containerID="fa09c4121b944be5ecc424054046cccf3c158484c40d783406eb973afc58f586" exitCode=0 Nov 27 09:18:42 crc kubenswrapper[4971]: I1127 09:18:42.580307 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cdwpg" event={"ID":"0621c077-a96d-4425-a32f-d4a879a67814","Type":"ContainerDied","Data":"fa09c4121b944be5ecc424054046cccf3c158484c40d783406eb973afc58f586"} Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.178020 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-n7bzc" Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.259702 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/48953229-16fe-403a-a919-a6aa7ed6a6bb-inventory\") pod \"48953229-16fe-403a-a919-a6aa7ed6a6bb\" (UID: \"48953229-16fe-403a-a919-a6aa7ed6a6bb\") " Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.259832 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/48953229-16fe-403a-a919-a6aa7ed6a6bb-ssh-key\") pod \"48953229-16fe-403a-a919-a6aa7ed6a6bb\" (UID: \"48953229-16fe-403a-a919-a6aa7ed6a6bb\") " Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.259950 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l2w2j\" (UniqueName: \"kubernetes.io/projected/48953229-16fe-403a-a919-a6aa7ed6a6bb-kube-api-access-l2w2j\") pod \"48953229-16fe-403a-a919-a6aa7ed6a6bb\" (UID: \"48953229-16fe-403a-a919-a6aa7ed6a6bb\") " Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.260048 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/48953229-16fe-403a-a919-a6aa7ed6a6bb-ceph\") pod \"48953229-16fe-403a-a919-a6aa7ed6a6bb\" (UID: \"48953229-16fe-403a-a919-a6aa7ed6a6bb\") " Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.268153 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48953229-16fe-403a-a919-a6aa7ed6a6bb-ceph" (OuterVolumeSpecName: "ceph") pod "48953229-16fe-403a-a919-a6aa7ed6a6bb" (UID: "48953229-16fe-403a-a919-a6aa7ed6a6bb"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.268212 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48953229-16fe-403a-a919-a6aa7ed6a6bb-kube-api-access-l2w2j" (OuterVolumeSpecName: "kube-api-access-l2w2j") pod "48953229-16fe-403a-a919-a6aa7ed6a6bb" (UID: "48953229-16fe-403a-a919-a6aa7ed6a6bb"). InnerVolumeSpecName "kube-api-access-l2w2j". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.295071 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48953229-16fe-403a-a919-a6aa7ed6a6bb-inventory" (OuterVolumeSpecName: "inventory") pod "48953229-16fe-403a-a919-a6aa7ed6a6bb" (UID: "48953229-16fe-403a-a919-a6aa7ed6a6bb"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.303876 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48953229-16fe-403a-a919-a6aa7ed6a6bb-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "48953229-16fe-403a-a919-a6aa7ed6a6bb" (UID: "48953229-16fe-403a-a919-a6aa7ed6a6bb"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.362362 4971 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/48953229-16fe-403a-a919-a6aa7ed6a6bb-ceph\") on node \"crc\" DevicePath \"\"" Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.362558 4971 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/48953229-16fe-403a-a919-a6aa7ed6a6bb-inventory\") on node \"crc\" DevicePath \"\"" Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.362570 4971 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/48953229-16fe-403a-a919-a6aa7ed6a6bb-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.362581 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l2w2j\" (UniqueName: \"kubernetes.io/projected/48953229-16fe-403a-a919-a6aa7ed6a6bb-kube-api-access-l2w2j\") on node \"crc\" DevicePath \"\"" Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.592649 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-n7bzc" event={"ID":"48953229-16fe-403a-a919-a6aa7ed6a6bb","Type":"ContainerDied","Data":"a6ad135e7f1c2445e6acbc0fccb22504a9ed43243ad1d7c1ffde28cb84b0864b"} Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.592707 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a6ad135e7f1c2445e6acbc0fccb22504a9ed43243ad1d7c1ffde28cb84b0864b" Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.593035 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-n7bzc" Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.598055 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cdwpg" event={"ID":"0621c077-a96d-4425-a32f-d4a879a67814","Type":"ContainerStarted","Data":"0907a72de7e64d12ea692451b6d6a54e274f164526052aa5c74bf8271921a8ec"} Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.634903 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-cdwpg" podStartSLOduration=3.008964246 podStartE2EDuration="11.634872426s" podCreationTimestamp="2025-11-27 09:18:32 +0000 UTC" firstStartedPulling="2025-11-27 09:18:34.438426334 +0000 UTC m=+8752.630470252" lastFinishedPulling="2025-11-27 09:18:43.064334514 +0000 UTC m=+8761.256378432" observedRunningTime="2025-11-27 09:18:43.62092959 +0000 UTC m=+8761.812973508" watchObservedRunningTime="2025-11-27 09:18:43.634872426 +0000 UTC m=+8761.826916354" Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.686244 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-ftnrt"] Nov 27 09:18:43 crc kubenswrapper[4971]: E1127 09:18:43.689828 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f91433f-83e0-4cb5-8556-02e268c1f9d2" containerName="extract-utilities" Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.689860 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f91433f-83e0-4cb5-8556-02e268c1f9d2" containerName="extract-utilities" Nov 27 09:18:43 crc kubenswrapper[4971]: E1127 09:18:43.689873 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f91433f-83e0-4cb5-8556-02e268c1f9d2" containerName="extract-content" Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.689881 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f91433f-83e0-4cb5-8556-02e268c1f9d2" containerName="extract-content" Nov 27 09:18:43 crc kubenswrapper[4971]: E1127 09:18:43.689906 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48953229-16fe-403a-a919-a6aa7ed6a6bb" containerName="download-cache-openstack-openstack-cell1" Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.689912 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="48953229-16fe-403a-a919-a6aa7ed6a6bb" containerName="download-cache-openstack-openstack-cell1" Nov 27 09:18:43 crc kubenswrapper[4971]: E1127 09:18:43.689924 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f91433f-83e0-4cb5-8556-02e268c1f9d2" containerName="registry-server" Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.689929 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f91433f-83e0-4cb5-8556-02e268c1f9d2" containerName="registry-server" Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.690146 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f91433f-83e0-4cb5-8556-02e268c1f9d2" containerName="registry-server" Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.690175 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="48953229-16fe-403a-a919-a6aa7ed6a6bb" containerName="download-cache-openstack-openstack-cell1" Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.691132 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-ftnrt" Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.693463 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.703003 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-ftnrt"] Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.734635 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-jnkbm" Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.772507 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8a65697f-7b70-42e9-bc6b-12f32a73cbb7-inventory\") pod \"configure-network-openstack-openstack-cell1-ftnrt\" (UID: \"8a65697f-7b70-42e9-bc6b-12f32a73cbb7\") " pod="openstack/configure-network-openstack-openstack-cell1-ftnrt" Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.772726 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8a65697f-7b70-42e9-bc6b-12f32a73cbb7-ceph\") pod \"configure-network-openstack-openstack-cell1-ftnrt\" (UID: \"8a65697f-7b70-42e9-bc6b-12f32a73cbb7\") " pod="openstack/configure-network-openstack-openstack-cell1-ftnrt" Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.772994 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8n9jj\" (UniqueName: \"kubernetes.io/projected/8a65697f-7b70-42e9-bc6b-12f32a73cbb7-kube-api-access-8n9jj\") pod \"configure-network-openstack-openstack-cell1-ftnrt\" (UID: \"8a65697f-7b70-42e9-bc6b-12f32a73cbb7\") " pod="openstack/configure-network-openstack-openstack-cell1-ftnrt" Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.773244 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8a65697f-7b70-42e9-bc6b-12f32a73cbb7-ssh-key\") pod \"configure-network-openstack-openstack-cell1-ftnrt\" (UID: \"8a65697f-7b70-42e9-bc6b-12f32a73cbb7\") " pod="openstack/configure-network-openstack-openstack-cell1-ftnrt" Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.875195 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8a65697f-7b70-42e9-bc6b-12f32a73cbb7-ssh-key\") pod \"configure-network-openstack-openstack-cell1-ftnrt\" (UID: \"8a65697f-7b70-42e9-bc6b-12f32a73cbb7\") " pod="openstack/configure-network-openstack-openstack-cell1-ftnrt" Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.875315 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8a65697f-7b70-42e9-bc6b-12f32a73cbb7-inventory\") pod \"configure-network-openstack-openstack-cell1-ftnrt\" (UID: \"8a65697f-7b70-42e9-bc6b-12f32a73cbb7\") " pod="openstack/configure-network-openstack-openstack-cell1-ftnrt" Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.875360 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8a65697f-7b70-42e9-bc6b-12f32a73cbb7-ceph\") pod \"configure-network-openstack-openstack-cell1-ftnrt\" (UID: \"8a65697f-7b70-42e9-bc6b-12f32a73cbb7\") " pod="openstack/configure-network-openstack-openstack-cell1-ftnrt" Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.875422 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8n9jj\" (UniqueName: \"kubernetes.io/projected/8a65697f-7b70-42e9-bc6b-12f32a73cbb7-kube-api-access-8n9jj\") pod \"configure-network-openstack-openstack-cell1-ftnrt\" (UID: \"8a65697f-7b70-42e9-bc6b-12f32a73cbb7\") " pod="openstack/configure-network-openstack-openstack-cell1-ftnrt" Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.880460 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8a65697f-7b70-42e9-bc6b-12f32a73cbb7-inventory\") pod \"configure-network-openstack-openstack-cell1-ftnrt\" (UID: \"8a65697f-7b70-42e9-bc6b-12f32a73cbb7\") " pod="openstack/configure-network-openstack-openstack-cell1-ftnrt" Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.880460 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8a65697f-7b70-42e9-bc6b-12f32a73cbb7-ceph\") pod \"configure-network-openstack-openstack-cell1-ftnrt\" (UID: \"8a65697f-7b70-42e9-bc6b-12f32a73cbb7\") " pod="openstack/configure-network-openstack-openstack-cell1-ftnrt" Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.880832 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8a65697f-7b70-42e9-bc6b-12f32a73cbb7-ssh-key\") pod \"configure-network-openstack-openstack-cell1-ftnrt\" (UID: \"8a65697f-7b70-42e9-bc6b-12f32a73cbb7\") " pod="openstack/configure-network-openstack-openstack-cell1-ftnrt" Nov 27 09:18:43 crc kubenswrapper[4971]: I1127 09:18:43.890082 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8n9jj\" (UniqueName: \"kubernetes.io/projected/8a65697f-7b70-42e9-bc6b-12f32a73cbb7-kube-api-access-8n9jj\") pod \"configure-network-openstack-openstack-cell1-ftnrt\" (UID: \"8a65697f-7b70-42e9-bc6b-12f32a73cbb7\") " pod="openstack/configure-network-openstack-openstack-cell1-ftnrt" Nov 27 09:18:44 crc kubenswrapper[4971]: I1127 09:18:44.047101 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-ftnrt" Nov 27 09:18:44 crc kubenswrapper[4971]: I1127 09:18:44.608364 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-ftnrt"] Nov 27 09:18:45 crc kubenswrapper[4971]: I1127 09:18:45.619076 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-ftnrt" event={"ID":"8a65697f-7b70-42e9-bc6b-12f32a73cbb7","Type":"ContainerStarted","Data":"d424dac5602c6611880a60a77453747835831996737351a09adfba6e0d1a1a4e"} Nov 27 09:18:45 crc kubenswrapper[4971]: I1127 09:18:45.619523 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-ftnrt" event={"ID":"8a65697f-7b70-42e9-bc6b-12f32a73cbb7","Type":"ContainerStarted","Data":"64b60aa5cfc64d3eb65e8ab1957e786e16bfcf52d6fee36e6599f6458bde5fca"} Nov 27 09:18:45 crc kubenswrapper[4971]: I1127 09:18:45.645735 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-openstack-openstack-cell1-ftnrt" podStartSLOduration=2.03476022 podStartE2EDuration="2.645714391s" podCreationTimestamp="2025-11-27 09:18:43 +0000 UTC" firstStartedPulling="2025-11-27 09:18:44.61042849 +0000 UTC m=+8762.802472408" lastFinishedPulling="2025-11-27 09:18:45.221382661 +0000 UTC m=+8763.413426579" observedRunningTime="2025-11-27 09:18:45.633595927 +0000 UTC m=+8763.825639865" watchObservedRunningTime="2025-11-27 09:18:45.645714391 +0000 UTC m=+8763.837758309" Nov 27 09:18:52 crc kubenswrapper[4971]: I1127 09:18:52.562148 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-cdwpg" Nov 27 09:18:52 crc kubenswrapper[4971]: I1127 09:18:52.562700 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-cdwpg" Nov 27 09:18:52 crc kubenswrapper[4971]: I1127 09:18:52.608877 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-cdwpg" Nov 27 09:18:52 crc kubenswrapper[4971]: I1127 09:18:52.762252 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-cdwpg" Nov 27 09:18:52 crc kubenswrapper[4971]: I1127 09:18:52.843345 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cdwpg"] Nov 27 09:18:52 crc kubenswrapper[4971]: I1127 09:18:52.887777 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nwtx2"] Nov 27 09:18:52 crc kubenswrapper[4971]: I1127 09:18:52.888032 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-nwtx2" podUID="81f9813c-4df5-458c-a044-f8488e44e16f" containerName="registry-server" containerID="cri-o://1196480d555dc950bd1dfc5c6b297ce11aae65757903317837611a0f40020b70" gracePeriod=2 Nov 27 09:18:53 crc kubenswrapper[4971]: I1127 09:18:53.524884 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nwtx2" Nov 27 09:18:53 crc kubenswrapper[4971]: I1127 09:18:53.584550 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81f9813c-4df5-458c-a044-f8488e44e16f-catalog-content\") pod \"81f9813c-4df5-458c-a044-f8488e44e16f\" (UID: \"81f9813c-4df5-458c-a044-f8488e44e16f\") " Nov 27 09:18:53 crc kubenswrapper[4971]: I1127 09:18:53.584607 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81f9813c-4df5-458c-a044-f8488e44e16f-utilities\") pod \"81f9813c-4df5-458c-a044-f8488e44e16f\" (UID: \"81f9813c-4df5-458c-a044-f8488e44e16f\") " Nov 27 09:18:53 crc kubenswrapper[4971]: I1127 09:18:53.584646 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bgjns\" (UniqueName: \"kubernetes.io/projected/81f9813c-4df5-458c-a044-f8488e44e16f-kube-api-access-bgjns\") pod \"81f9813c-4df5-458c-a044-f8488e44e16f\" (UID: \"81f9813c-4df5-458c-a044-f8488e44e16f\") " Nov 27 09:18:53 crc kubenswrapper[4971]: I1127 09:18:53.590851 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81f9813c-4df5-458c-a044-f8488e44e16f-utilities" (OuterVolumeSpecName: "utilities") pod "81f9813c-4df5-458c-a044-f8488e44e16f" (UID: "81f9813c-4df5-458c-a044-f8488e44e16f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:18:53 crc kubenswrapper[4971]: I1127 09:18:53.598741 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81f9813c-4df5-458c-a044-f8488e44e16f-kube-api-access-bgjns" (OuterVolumeSpecName: "kube-api-access-bgjns") pod "81f9813c-4df5-458c-a044-f8488e44e16f" (UID: "81f9813c-4df5-458c-a044-f8488e44e16f"). InnerVolumeSpecName "kube-api-access-bgjns". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:18:53 crc kubenswrapper[4971]: I1127 09:18:53.688712 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81f9813c-4df5-458c-a044-f8488e44e16f-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 09:18:53 crc kubenswrapper[4971]: I1127 09:18:53.688760 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bgjns\" (UniqueName: \"kubernetes.io/projected/81f9813c-4df5-458c-a044-f8488e44e16f-kube-api-access-bgjns\") on node \"crc\" DevicePath \"\"" Nov 27 09:18:53 crc kubenswrapper[4971]: I1127 09:18:53.705158 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81f9813c-4df5-458c-a044-f8488e44e16f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "81f9813c-4df5-458c-a044-f8488e44e16f" (UID: "81f9813c-4df5-458c-a044-f8488e44e16f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:18:53 crc kubenswrapper[4971]: I1127 09:18:53.733020 4971 generic.go:334] "Generic (PLEG): container finished" podID="81f9813c-4df5-458c-a044-f8488e44e16f" containerID="1196480d555dc950bd1dfc5c6b297ce11aae65757903317837611a0f40020b70" exitCode=0 Nov 27 09:18:53 crc kubenswrapper[4971]: I1127 09:18:53.733093 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nwtx2" Nov 27 09:18:53 crc kubenswrapper[4971]: I1127 09:18:53.733193 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nwtx2" event={"ID":"81f9813c-4df5-458c-a044-f8488e44e16f","Type":"ContainerDied","Data":"1196480d555dc950bd1dfc5c6b297ce11aae65757903317837611a0f40020b70"} Nov 27 09:18:53 crc kubenswrapper[4971]: I1127 09:18:53.733241 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nwtx2" event={"ID":"81f9813c-4df5-458c-a044-f8488e44e16f","Type":"ContainerDied","Data":"796d5cebfd5acee53af75a42fb1828deb86074edef6f146c41852df18db2591a"} Nov 27 09:18:53 crc kubenswrapper[4971]: I1127 09:18:53.733273 4971 scope.go:117] "RemoveContainer" containerID="1196480d555dc950bd1dfc5c6b297ce11aae65757903317837611a0f40020b70" Nov 27 09:18:53 crc kubenswrapper[4971]: I1127 09:18:53.787861 4971 scope.go:117] "RemoveContainer" containerID="304e3420b6798a57b063de909ae9fce58feca84fd196a4eafe01d7590b61b083" Nov 27 09:18:53 crc kubenswrapper[4971]: I1127 09:18:53.788132 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nwtx2"] Nov 27 09:18:53 crc kubenswrapper[4971]: I1127 09:18:53.792004 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81f9813c-4df5-458c-a044-f8488e44e16f-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 09:18:53 crc kubenswrapper[4971]: I1127 09:18:53.798206 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-nwtx2"] Nov 27 09:18:54 crc kubenswrapper[4971]: I1127 09:18:54.278301 4971 scope.go:117] "RemoveContainer" containerID="72b5ac953e778adb62d0a84e18f96e661077e3cbd9d5a1d6cb28cdeba7857e2b" Nov 27 09:18:54 crc kubenswrapper[4971]: I1127 09:18:54.315939 4971 scope.go:117] "RemoveContainer" containerID="1196480d555dc950bd1dfc5c6b297ce11aae65757903317837611a0f40020b70" Nov 27 09:18:54 crc kubenswrapper[4971]: E1127 09:18:54.316617 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1196480d555dc950bd1dfc5c6b297ce11aae65757903317837611a0f40020b70\": container with ID starting with 1196480d555dc950bd1dfc5c6b297ce11aae65757903317837611a0f40020b70 not found: ID does not exist" containerID="1196480d555dc950bd1dfc5c6b297ce11aae65757903317837611a0f40020b70" Nov 27 09:18:54 crc kubenswrapper[4971]: I1127 09:18:54.316673 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1196480d555dc950bd1dfc5c6b297ce11aae65757903317837611a0f40020b70"} err="failed to get container status \"1196480d555dc950bd1dfc5c6b297ce11aae65757903317837611a0f40020b70\": rpc error: code = NotFound desc = could not find container \"1196480d555dc950bd1dfc5c6b297ce11aae65757903317837611a0f40020b70\": container with ID starting with 1196480d555dc950bd1dfc5c6b297ce11aae65757903317837611a0f40020b70 not found: ID does not exist" Nov 27 09:18:54 crc kubenswrapper[4971]: I1127 09:18:54.316706 4971 scope.go:117] "RemoveContainer" containerID="304e3420b6798a57b063de909ae9fce58feca84fd196a4eafe01d7590b61b083" Nov 27 09:18:54 crc kubenswrapper[4971]: E1127 09:18:54.317070 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"304e3420b6798a57b063de909ae9fce58feca84fd196a4eafe01d7590b61b083\": container with ID starting with 304e3420b6798a57b063de909ae9fce58feca84fd196a4eafe01d7590b61b083 not found: ID does not exist" containerID="304e3420b6798a57b063de909ae9fce58feca84fd196a4eafe01d7590b61b083" Nov 27 09:18:54 crc kubenswrapper[4971]: I1127 09:18:54.317095 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"304e3420b6798a57b063de909ae9fce58feca84fd196a4eafe01d7590b61b083"} err="failed to get container status \"304e3420b6798a57b063de909ae9fce58feca84fd196a4eafe01d7590b61b083\": rpc error: code = NotFound desc = could not find container \"304e3420b6798a57b063de909ae9fce58feca84fd196a4eafe01d7590b61b083\": container with ID starting with 304e3420b6798a57b063de909ae9fce58feca84fd196a4eafe01d7590b61b083 not found: ID does not exist" Nov 27 09:18:54 crc kubenswrapper[4971]: I1127 09:18:54.317113 4971 scope.go:117] "RemoveContainer" containerID="72b5ac953e778adb62d0a84e18f96e661077e3cbd9d5a1d6cb28cdeba7857e2b" Nov 27 09:18:54 crc kubenswrapper[4971]: E1127 09:18:54.317636 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"72b5ac953e778adb62d0a84e18f96e661077e3cbd9d5a1d6cb28cdeba7857e2b\": container with ID starting with 72b5ac953e778adb62d0a84e18f96e661077e3cbd9d5a1d6cb28cdeba7857e2b not found: ID does not exist" containerID="72b5ac953e778adb62d0a84e18f96e661077e3cbd9d5a1d6cb28cdeba7857e2b" Nov 27 09:18:54 crc kubenswrapper[4971]: I1127 09:18:54.317658 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72b5ac953e778adb62d0a84e18f96e661077e3cbd9d5a1d6cb28cdeba7857e2b"} err="failed to get container status \"72b5ac953e778adb62d0a84e18f96e661077e3cbd9d5a1d6cb28cdeba7857e2b\": rpc error: code = NotFound desc = could not find container \"72b5ac953e778adb62d0a84e18f96e661077e3cbd9d5a1d6cb28cdeba7857e2b\": container with ID starting with 72b5ac953e778adb62d0a84e18f96e661077e3cbd9d5a1d6cb28cdeba7857e2b not found: ID does not exist" Nov 27 09:18:54 crc kubenswrapper[4971]: I1127 09:18:54.567580 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81f9813c-4df5-458c-a044-f8488e44e16f" path="/var/lib/kubelet/pods/81f9813c-4df5-458c-a044-f8488e44e16f/volumes" Nov 27 09:19:31 crc kubenswrapper[4971]: I1127 09:19:31.148821 4971 generic.go:334] "Generic (PLEG): container finished" podID="df91b509-08ef-4038-aaf4-b200f15daad2" containerID="5f20d5e35872c64a6f520a82152d4d803be28ad3379524de095a389e286bb5ae" exitCode=0 Nov 27 09:19:31 crc kubenswrapper[4971]: I1127 09:19:31.148925 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-networker-m6k84" event={"ID":"df91b509-08ef-4038-aaf4-b200f15daad2","Type":"ContainerDied","Data":"5f20d5e35872c64a6f520a82152d4d803be28ad3379524de095a389e286bb5ae"} Nov 27 09:19:32 crc kubenswrapper[4971]: I1127 09:19:32.680031 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-networker-m6k84" Nov 27 09:19:32 crc kubenswrapper[4971]: I1127 09:19:32.755790 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/df91b509-08ef-4038-aaf4-b200f15daad2-inventory\") pod \"df91b509-08ef-4038-aaf4-b200f15daad2\" (UID: \"df91b509-08ef-4038-aaf4-b200f15daad2\") " Nov 27 09:19:32 crc kubenswrapper[4971]: I1127 09:19:32.755965 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8j2sg\" (UniqueName: \"kubernetes.io/projected/df91b509-08ef-4038-aaf4-b200f15daad2-kube-api-access-8j2sg\") pod \"df91b509-08ef-4038-aaf4-b200f15daad2\" (UID: \"df91b509-08ef-4038-aaf4-b200f15daad2\") " Nov 27 09:19:32 crc kubenswrapper[4971]: I1127 09:19:32.755995 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/df91b509-08ef-4038-aaf4-b200f15daad2-ssh-key\") pod \"df91b509-08ef-4038-aaf4-b200f15daad2\" (UID: \"df91b509-08ef-4038-aaf4-b200f15daad2\") " Nov 27 09:19:32 crc kubenswrapper[4971]: I1127 09:19:32.775356 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df91b509-08ef-4038-aaf4-b200f15daad2-kube-api-access-8j2sg" (OuterVolumeSpecName: "kube-api-access-8j2sg") pod "df91b509-08ef-4038-aaf4-b200f15daad2" (UID: "df91b509-08ef-4038-aaf4-b200f15daad2"). InnerVolumeSpecName "kube-api-access-8j2sg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:19:32 crc kubenswrapper[4971]: I1127 09:19:32.787666 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df91b509-08ef-4038-aaf4-b200f15daad2-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "df91b509-08ef-4038-aaf4-b200f15daad2" (UID: "df91b509-08ef-4038-aaf4-b200f15daad2"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:19:32 crc kubenswrapper[4971]: I1127 09:19:32.790159 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df91b509-08ef-4038-aaf4-b200f15daad2-inventory" (OuterVolumeSpecName: "inventory") pod "df91b509-08ef-4038-aaf4-b200f15daad2" (UID: "df91b509-08ef-4038-aaf4-b200f15daad2"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:19:32 crc kubenswrapper[4971]: I1127 09:19:32.859796 4971 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/df91b509-08ef-4038-aaf4-b200f15daad2-inventory\") on node \"crc\" DevicePath \"\"" Nov 27 09:19:32 crc kubenswrapper[4971]: I1127 09:19:32.859850 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8j2sg\" (UniqueName: \"kubernetes.io/projected/df91b509-08ef-4038-aaf4-b200f15daad2-kube-api-access-8j2sg\") on node \"crc\" DevicePath \"\"" Nov 27 09:19:32 crc kubenswrapper[4971]: I1127 09:19:32.859866 4971 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/df91b509-08ef-4038-aaf4-b200f15daad2-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 27 09:19:33 crc kubenswrapper[4971]: I1127 09:19:33.169682 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-networker-m6k84" event={"ID":"df91b509-08ef-4038-aaf4-b200f15daad2","Type":"ContainerDied","Data":"faa049502ebc25705faf416418e8faa405339607a6e91e5beae6acc2bd28887d"} Nov 27 09:19:33 crc kubenswrapper[4971]: I1127 09:19:33.169722 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="faa049502ebc25705faf416418e8faa405339607a6e91e5beae6acc2bd28887d" Nov 27 09:19:33 crc kubenswrapper[4971]: I1127 09:19:33.169776 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-networker-m6k84" Nov 27 09:19:33 crc kubenswrapper[4971]: I1127 09:19:33.281200 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-openstack-openstack-networker-kz2cj"] Nov 27 09:19:33 crc kubenswrapper[4971]: E1127 09:19:33.281872 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df91b509-08ef-4038-aaf4-b200f15daad2" containerName="configure-network-openstack-openstack-networker" Nov 27 09:19:33 crc kubenswrapper[4971]: I1127 09:19:33.281899 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="df91b509-08ef-4038-aaf4-b200f15daad2" containerName="configure-network-openstack-openstack-networker" Nov 27 09:19:33 crc kubenswrapper[4971]: E1127 09:19:33.281918 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81f9813c-4df5-458c-a044-f8488e44e16f" containerName="extract-utilities" Nov 27 09:19:33 crc kubenswrapper[4971]: I1127 09:19:33.281927 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="81f9813c-4df5-458c-a044-f8488e44e16f" containerName="extract-utilities" Nov 27 09:19:33 crc kubenswrapper[4971]: E1127 09:19:33.281967 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81f9813c-4df5-458c-a044-f8488e44e16f" containerName="extract-content" Nov 27 09:19:33 crc kubenswrapper[4971]: I1127 09:19:33.281975 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="81f9813c-4df5-458c-a044-f8488e44e16f" containerName="extract-content" Nov 27 09:19:33 crc kubenswrapper[4971]: E1127 09:19:33.281991 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81f9813c-4df5-458c-a044-f8488e44e16f" containerName="registry-server" Nov 27 09:19:33 crc kubenswrapper[4971]: I1127 09:19:33.281997 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="81f9813c-4df5-458c-a044-f8488e44e16f" containerName="registry-server" Nov 27 09:19:33 crc kubenswrapper[4971]: I1127 09:19:33.282249 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="df91b509-08ef-4038-aaf4-b200f15daad2" containerName="configure-network-openstack-openstack-networker" Nov 27 09:19:33 crc kubenswrapper[4971]: I1127 09:19:33.282278 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="81f9813c-4df5-458c-a044-f8488e44e16f" containerName="registry-server" Nov 27 09:19:33 crc kubenswrapper[4971]: I1127 09:19:33.283363 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-networker-kz2cj" Nov 27 09:19:33 crc kubenswrapper[4971]: I1127 09:19:33.287007 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-networker" Nov 27 09:19:33 crc kubenswrapper[4971]: I1127 09:19:33.287749 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-networker-dockercfg-hbhvx" Nov 27 09:19:33 crc kubenswrapper[4971]: I1127 09:19:33.296025 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-openstack-openstack-networker-kz2cj"] Nov 27 09:19:33 crc kubenswrapper[4971]: I1127 09:19:33.372721 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9657dfc5-381a-4d1b-adb3-c5e6d0676d88-inventory\") pod \"validate-network-openstack-openstack-networker-kz2cj\" (UID: \"9657dfc5-381a-4d1b-adb3-c5e6d0676d88\") " pod="openstack/validate-network-openstack-openstack-networker-kz2cj" Nov 27 09:19:33 crc kubenswrapper[4971]: I1127 09:19:33.372827 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crvh7\" (UniqueName: \"kubernetes.io/projected/9657dfc5-381a-4d1b-adb3-c5e6d0676d88-kube-api-access-crvh7\") pod \"validate-network-openstack-openstack-networker-kz2cj\" (UID: \"9657dfc5-381a-4d1b-adb3-c5e6d0676d88\") " pod="openstack/validate-network-openstack-openstack-networker-kz2cj" Nov 27 09:19:33 crc kubenswrapper[4971]: I1127 09:19:33.372925 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9657dfc5-381a-4d1b-adb3-c5e6d0676d88-ssh-key\") pod \"validate-network-openstack-openstack-networker-kz2cj\" (UID: \"9657dfc5-381a-4d1b-adb3-c5e6d0676d88\") " pod="openstack/validate-network-openstack-openstack-networker-kz2cj" Nov 27 09:19:33 crc kubenswrapper[4971]: I1127 09:19:33.475096 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9657dfc5-381a-4d1b-adb3-c5e6d0676d88-ssh-key\") pod \"validate-network-openstack-openstack-networker-kz2cj\" (UID: \"9657dfc5-381a-4d1b-adb3-c5e6d0676d88\") " pod="openstack/validate-network-openstack-openstack-networker-kz2cj" Nov 27 09:19:33 crc kubenswrapper[4971]: I1127 09:19:33.475215 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9657dfc5-381a-4d1b-adb3-c5e6d0676d88-inventory\") pod \"validate-network-openstack-openstack-networker-kz2cj\" (UID: \"9657dfc5-381a-4d1b-adb3-c5e6d0676d88\") " pod="openstack/validate-network-openstack-openstack-networker-kz2cj" Nov 27 09:19:33 crc kubenswrapper[4971]: I1127 09:19:33.475278 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crvh7\" (UniqueName: \"kubernetes.io/projected/9657dfc5-381a-4d1b-adb3-c5e6d0676d88-kube-api-access-crvh7\") pod \"validate-network-openstack-openstack-networker-kz2cj\" (UID: \"9657dfc5-381a-4d1b-adb3-c5e6d0676d88\") " pod="openstack/validate-network-openstack-openstack-networker-kz2cj" Nov 27 09:19:33 crc kubenswrapper[4971]: I1127 09:19:33.479814 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9657dfc5-381a-4d1b-adb3-c5e6d0676d88-inventory\") pod \"validate-network-openstack-openstack-networker-kz2cj\" (UID: \"9657dfc5-381a-4d1b-adb3-c5e6d0676d88\") " pod="openstack/validate-network-openstack-openstack-networker-kz2cj" Nov 27 09:19:33 crc kubenswrapper[4971]: I1127 09:19:33.488316 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9657dfc5-381a-4d1b-adb3-c5e6d0676d88-ssh-key\") pod \"validate-network-openstack-openstack-networker-kz2cj\" (UID: \"9657dfc5-381a-4d1b-adb3-c5e6d0676d88\") " pod="openstack/validate-network-openstack-openstack-networker-kz2cj" Nov 27 09:19:33 crc kubenswrapper[4971]: I1127 09:19:33.503911 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crvh7\" (UniqueName: \"kubernetes.io/projected/9657dfc5-381a-4d1b-adb3-c5e6d0676d88-kube-api-access-crvh7\") pod \"validate-network-openstack-openstack-networker-kz2cj\" (UID: \"9657dfc5-381a-4d1b-adb3-c5e6d0676d88\") " pod="openstack/validate-network-openstack-openstack-networker-kz2cj" Nov 27 09:19:33 crc kubenswrapper[4971]: I1127 09:19:33.605870 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-networker-kz2cj" Nov 27 09:19:34 crc kubenswrapper[4971]: I1127 09:19:34.135055 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-openstack-openstack-networker-kz2cj"] Nov 27 09:19:34 crc kubenswrapper[4971]: I1127 09:19:34.155674 4971 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 27 09:19:34 crc kubenswrapper[4971]: I1127 09:19:34.179892 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-networker-kz2cj" event={"ID":"9657dfc5-381a-4d1b-adb3-c5e6d0676d88","Type":"ContainerStarted","Data":"1807f7706b91ea2bb7c21c0fbc25d4f18b2cfc5b253840c1967e4219567b1493"} Nov 27 09:19:36 crc kubenswrapper[4971]: I1127 09:19:36.227897 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-networker-kz2cj" event={"ID":"9657dfc5-381a-4d1b-adb3-c5e6d0676d88","Type":"ContainerStarted","Data":"4725502266e77e4103276c668969880465da38771e328b06a37a7f84d397d8c6"} Nov 27 09:19:36 crc kubenswrapper[4971]: I1127 09:19:36.253521 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-openstack-openstack-networker-kz2cj" podStartSLOduration=2.115726331 podStartE2EDuration="3.253489042s" podCreationTimestamp="2025-11-27 09:19:33 +0000 UTC" firstStartedPulling="2025-11-27 09:19:34.155239495 +0000 UTC m=+8812.347283413" lastFinishedPulling="2025-11-27 09:19:35.293002206 +0000 UTC m=+8813.485046124" observedRunningTime="2025-11-27 09:19:36.250313402 +0000 UTC m=+8814.442357340" watchObservedRunningTime="2025-11-27 09:19:36.253489042 +0000 UTC m=+8814.445532960" Nov 27 09:19:42 crc kubenswrapper[4971]: I1127 09:19:42.288567 4971 generic.go:334] "Generic (PLEG): container finished" podID="9657dfc5-381a-4d1b-adb3-c5e6d0676d88" containerID="4725502266e77e4103276c668969880465da38771e328b06a37a7f84d397d8c6" exitCode=0 Nov 27 09:19:42 crc kubenswrapper[4971]: I1127 09:19:42.288658 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-networker-kz2cj" event={"ID":"9657dfc5-381a-4d1b-adb3-c5e6d0676d88","Type":"ContainerDied","Data":"4725502266e77e4103276c668969880465da38771e328b06a37a7f84d397d8c6"} Nov 27 09:19:43 crc kubenswrapper[4971]: I1127 09:19:43.763464 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-networker-kz2cj" Nov 27 09:19:43 crc kubenswrapper[4971]: I1127 09:19:43.856231 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9657dfc5-381a-4d1b-adb3-c5e6d0676d88-ssh-key\") pod \"9657dfc5-381a-4d1b-adb3-c5e6d0676d88\" (UID: \"9657dfc5-381a-4d1b-adb3-c5e6d0676d88\") " Nov 27 09:19:43 crc kubenswrapper[4971]: I1127 09:19:43.856306 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-crvh7\" (UniqueName: \"kubernetes.io/projected/9657dfc5-381a-4d1b-adb3-c5e6d0676d88-kube-api-access-crvh7\") pod \"9657dfc5-381a-4d1b-adb3-c5e6d0676d88\" (UID: \"9657dfc5-381a-4d1b-adb3-c5e6d0676d88\") " Nov 27 09:19:43 crc kubenswrapper[4971]: I1127 09:19:43.856348 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9657dfc5-381a-4d1b-adb3-c5e6d0676d88-inventory\") pod \"9657dfc5-381a-4d1b-adb3-c5e6d0676d88\" (UID: \"9657dfc5-381a-4d1b-adb3-c5e6d0676d88\") " Nov 27 09:19:43 crc kubenswrapper[4971]: I1127 09:19:43.873668 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9657dfc5-381a-4d1b-adb3-c5e6d0676d88-kube-api-access-crvh7" (OuterVolumeSpecName: "kube-api-access-crvh7") pod "9657dfc5-381a-4d1b-adb3-c5e6d0676d88" (UID: "9657dfc5-381a-4d1b-adb3-c5e6d0676d88"). InnerVolumeSpecName "kube-api-access-crvh7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:19:43 crc kubenswrapper[4971]: I1127 09:19:43.890563 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9657dfc5-381a-4d1b-adb3-c5e6d0676d88-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "9657dfc5-381a-4d1b-adb3-c5e6d0676d88" (UID: "9657dfc5-381a-4d1b-adb3-c5e6d0676d88"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:19:43 crc kubenswrapper[4971]: I1127 09:19:43.891698 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9657dfc5-381a-4d1b-adb3-c5e6d0676d88-inventory" (OuterVolumeSpecName: "inventory") pod "9657dfc5-381a-4d1b-adb3-c5e6d0676d88" (UID: "9657dfc5-381a-4d1b-adb3-c5e6d0676d88"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:19:43 crc kubenswrapper[4971]: I1127 09:19:43.957830 4971 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9657dfc5-381a-4d1b-adb3-c5e6d0676d88-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 27 09:19:43 crc kubenswrapper[4971]: I1127 09:19:43.957863 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-crvh7\" (UniqueName: \"kubernetes.io/projected/9657dfc5-381a-4d1b-adb3-c5e6d0676d88-kube-api-access-crvh7\") on node \"crc\" DevicePath \"\"" Nov 27 09:19:43 crc kubenswrapper[4971]: I1127 09:19:43.957874 4971 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9657dfc5-381a-4d1b-adb3-c5e6d0676d88-inventory\") on node \"crc\" DevicePath \"\"" Nov 27 09:19:44 crc kubenswrapper[4971]: I1127 09:19:44.314917 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-networker-kz2cj" event={"ID":"9657dfc5-381a-4d1b-adb3-c5e6d0676d88","Type":"ContainerDied","Data":"1807f7706b91ea2bb7c21c0fbc25d4f18b2cfc5b253840c1967e4219567b1493"} Nov 27 09:19:44 crc kubenswrapper[4971]: I1127 09:19:44.315266 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1807f7706b91ea2bb7c21c0fbc25d4f18b2cfc5b253840c1967e4219567b1493" Nov 27 09:19:44 crc kubenswrapper[4971]: I1127 09:19:44.315046 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-networker-kz2cj" Nov 27 09:19:44 crc kubenswrapper[4971]: I1127 09:19:44.386283 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-openstack-openstack-networker-2dgqc"] Nov 27 09:19:44 crc kubenswrapper[4971]: E1127 09:19:44.386841 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9657dfc5-381a-4d1b-adb3-c5e6d0676d88" containerName="validate-network-openstack-openstack-networker" Nov 27 09:19:44 crc kubenswrapper[4971]: I1127 09:19:44.386869 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="9657dfc5-381a-4d1b-adb3-c5e6d0676d88" containerName="validate-network-openstack-openstack-networker" Nov 27 09:19:44 crc kubenswrapper[4971]: I1127 09:19:44.387193 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="9657dfc5-381a-4d1b-adb3-c5e6d0676d88" containerName="validate-network-openstack-openstack-networker" Nov 27 09:19:44 crc kubenswrapper[4971]: I1127 09:19:44.388717 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-networker-2dgqc" Nov 27 09:19:44 crc kubenswrapper[4971]: I1127 09:19:44.392162 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-networker-dockercfg-hbhvx" Nov 27 09:19:44 crc kubenswrapper[4971]: I1127 09:19:44.393198 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-networker" Nov 27 09:19:44 crc kubenswrapper[4971]: I1127 09:19:44.399090 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-openstack-openstack-networker-2dgqc"] Nov 27 09:19:44 crc kubenswrapper[4971]: I1127 09:19:44.584572 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r29j8\" (UniqueName: \"kubernetes.io/projected/326f81f2-90ba-46d9-a77e-fdefdce61bea-kube-api-access-r29j8\") pod \"install-os-openstack-openstack-networker-2dgqc\" (UID: \"326f81f2-90ba-46d9-a77e-fdefdce61bea\") " pod="openstack/install-os-openstack-openstack-networker-2dgqc" Nov 27 09:19:44 crc kubenswrapper[4971]: I1127 09:19:44.586474 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/326f81f2-90ba-46d9-a77e-fdefdce61bea-ssh-key\") pod \"install-os-openstack-openstack-networker-2dgqc\" (UID: \"326f81f2-90ba-46d9-a77e-fdefdce61bea\") " pod="openstack/install-os-openstack-openstack-networker-2dgqc" Nov 27 09:19:44 crc kubenswrapper[4971]: I1127 09:19:44.586553 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/326f81f2-90ba-46d9-a77e-fdefdce61bea-inventory\") pod \"install-os-openstack-openstack-networker-2dgqc\" (UID: \"326f81f2-90ba-46d9-a77e-fdefdce61bea\") " pod="openstack/install-os-openstack-openstack-networker-2dgqc" Nov 27 09:19:44 crc kubenswrapper[4971]: I1127 09:19:44.689896 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/326f81f2-90ba-46d9-a77e-fdefdce61bea-ssh-key\") pod \"install-os-openstack-openstack-networker-2dgqc\" (UID: \"326f81f2-90ba-46d9-a77e-fdefdce61bea\") " pod="openstack/install-os-openstack-openstack-networker-2dgqc" Nov 27 09:19:44 crc kubenswrapper[4971]: I1127 09:19:44.689967 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/326f81f2-90ba-46d9-a77e-fdefdce61bea-inventory\") pod \"install-os-openstack-openstack-networker-2dgqc\" (UID: \"326f81f2-90ba-46d9-a77e-fdefdce61bea\") " pod="openstack/install-os-openstack-openstack-networker-2dgqc" Nov 27 09:19:44 crc kubenswrapper[4971]: I1127 09:19:44.690076 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r29j8\" (UniqueName: \"kubernetes.io/projected/326f81f2-90ba-46d9-a77e-fdefdce61bea-kube-api-access-r29j8\") pod \"install-os-openstack-openstack-networker-2dgqc\" (UID: \"326f81f2-90ba-46d9-a77e-fdefdce61bea\") " pod="openstack/install-os-openstack-openstack-networker-2dgqc" Nov 27 09:19:44 crc kubenswrapper[4971]: I1127 09:19:44.695792 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/326f81f2-90ba-46d9-a77e-fdefdce61bea-ssh-key\") pod \"install-os-openstack-openstack-networker-2dgqc\" (UID: \"326f81f2-90ba-46d9-a77e-fdefdce61bea\") " pod="openstack/install-os-openstack-openstack-networker-2dgqc" Nov 27 09:19:44 crc kubenswrapper[4971]: I1127 09:19:44.695832 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/326f81f2-90ba-46d9-a77e-fdefdce61bea-inventory\") pod \"install-os-openstack-openstack-networker-2dgqc\" (UID: \"326f81f2-90ba-46d9-a77e-fdefdce61bea\") " pod="openstack/install-os-openstack-openstack-networker-2dgqc" Nov 27 09:19:44 crc kubenswrapper[4971]: I1127 09:19:44.725063 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r29j8\" (UniqueName: \"kubernetes.io/projected/326f81f2-90ba-46d9-a77e-fdefdce61bea-kube-api-access-r29j8\") pod \"install-os-openstack-openstack-networker-2dgqc\" (UID: \"326f81f2-90ba-46d9-a77e-fdefdce61bea\") " pod="openstack/install-os-openstack-openstack-networker-2dgqc" Nov 27 09:19:45 crc kubenswrapper[4971]: I1127 09:19:45.020924 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-networker-2dgqc" Nov 27 09:19:45 crc kubenswrapper[4971]: I1127 09:19:45.329194 4971 generic.go:334] "Generic (PLEG): container finished" podID="8a65697f-7b70-42e9-bc6b-12f32a73cbb7" containerID="d424dac5602c6611880a60a77453747835831996737351a09adfba6e0d1a1a4e" exitCode=0 Nov 27 09:19:45 crc kubenswrapper[4971]: I1127 09:19:45.329325 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-ftnrt" event={"ID":"8a65697f-7b70-42e9-bc6b-12f32a73cbb7","Type":"ContainerDied","Data":"d424dac5602c6611880a60a77453747835831996737351a09adfba6e0d1a1a4e"} Nov 27 09:19:45 crc kubenswrapper[4971]: I1127 09:19:45.577198 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-openstack-openstack-networker-2dgqc"] Nov 27 09:19:46 crc kubenswrapper[4971]: I1127 09:19:46.350102 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-networker-2dgqc" event={"ID":"326f81f2-90ba-46d9-a77e-fdefdce61bea","Type":"ContainerStarted","Data":"955c7b44e083adfe4dc2ec9ef471f3a478fc7d7452fc5c7d1c8afa3f02bf631f"} Nov 27 09:19:46 crc kubenswrapper[4971]: I1127 09:19:46.869676 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-ftnrt" Nov 27 09:19:47 crc kubenswrapper[4971]: I1127 09:19:47.058978 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8n9jj\" (UniqueName: \"kubernetes.io/projected/8a65697f-7b70-42e9-bc6b-12f32a73cbb7-kube-api-access-8n9jj\") pod \"8a65697f-7b70-42e9-bc6b-12f32a73cbb7\" (UID: \"8a65697f-7b70-42e9-bc6b-12f32a73cbb7\") " Nov 27 09:19:47 crc kubenswrapper[4971]: I1127 09:19:47.059156 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8a65697f-7b70-42e9-bc6b-12f32a73cbb7-ceph\") pod \"8a65697f-7b70-42e9-bc6b-12f32a73cbb7\" (UID: \"8a65697f-7b70-42e9-bc6b-12f32a73cbb7\") " Nov 27 09:19:47 crc kubenswrapper[4971]: I1127 09:19:47.059275 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8a65697f-7b70-42e9-bc6b-12f32a73cbb7-inventory\") pod \"8a65697f-7b70-42e9-bc6b-12f32a73cbb7\" (UID: \"8a65697f-7b70-42e9-bc6b-12f32a73cbb7\") " Nov 27 09:19:47 crc kubenswrapper[4971]: I1127 09:19:47.059333 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8a65697f-7b70-42e9-bc6b-12f32a73cbb7-ssh-key\") pod \"8a65697f-7b70-42e9-bc6b-12f32a73cbb7\" (UID: \"8a65697f-7b70-42e9-bc6b-12f32a73cbb7\") " Nov 27 09:19:47 crc kubenswrapper[4971]: I1127 09:19:47.066371 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a65697f-7b70-42e9-bc6b-12f32a73cbb7-ceph" (OuterVolumeSpecName: "ceph") pod "8a65697f-7b70-42e9-bc6b-12f32a73cbb7" (UID: "8a65697f-7b70-42e9-bc6b-12f32a73cbb7"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:19:47 crc kubenswrapper[4971]: I1127 09:19:47.067489 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a65697f-7b70-42e9-bc6b-12f32a73cbb7-kube-api-access-8n9jj" (OuterVolumeSpecName: "kube-api-access-8n9jj") pod "8a65697f-7b70-42e9-bc6b-12f32a73cbb7" (UID: "8a65697f-7b70-42e9-bc6b-12f32a73cbb7"). InnerVolumeSpecName "kube-api-access-8n9jj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:19:47 crc kubenswrapper[4971]: E1127 09:19:47.089235 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8a65697f-7b70-42e9-bc6b-12f32a73cbb7-inventory podName:8a65697f-7b70-42e9-bc6b-12f32a73cbb7 nodeName:}" failed. No retries permitted until 2025-11-27 09:19:47.589192426 +0000 UTC m=+8825.781236334 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "inventory" (UniqueName: "kubernetes.io/secret/8a65697f-7b70-42e9-bc6b-12f32a73cbb7-inventory") pod "8a65697f-7b70-42e9-bc6b-12f32a73cbb7" (UID: "8a65697f-7b70-42e9-bc6b-12f32a73cbb7") : error deleting /var/lib/kubelet/pods/8a65697f-7b70-42e9-bc6b-12f32a73cbb7/volume-subpaths: remove /var/lib/kubelet/pods/8a65697f-7b70-42e9-bc6b-12f32a73cbb7/volume-subpaths: no such file or directory Nov 27 09:19:47 crc kubenswrapper[4971]: I1127 09:19:47.093237 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a65697f-7b70-42e9-bc6b-12f32a73cbb7-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8a65697f-7b70-42e9-bc6b-12f32a73cbb7" (UID: "8a65697f-7b70-42e9-bc6b-12f32a73cbb7"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:19:47 crc kubenswrapper[4971]: I1127 09:19:47.162824 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8n9jj\" (UniqueName: \"kubernetes.io/projected/8a65697f-7b70-42e9-bc6b-12f32a73cbb7-kube-api-access-8n9jj\") on node \"crc\" DevicePath \"\"" Nov 27 09:19:47 crc kubenswrapper[4971]: I1127 09:19:47.162863 4971 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8a65697f-7b70-42e9-bc6b-12f32a73cbb7-ceph\") on node \"crc\" DevicePath \"\"" Nov 27 09:19:47 crc kubenswrapper[4971]: I1127 09:19:47.162872 4971 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8a65697f-7b70-42e9-bc6b-12f32a73cbb7-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 27 09:19:47 crc kubenswrapper[4971]: I1127 09:19:47.363313 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-ftnrt" event={"ID":"8a65697f-7b70-42e9-bc6b-12f32a73cbb7","Type":"ContainerDied","Data":"64b60aa5cfc64d3eb65e8ab1957e786e16bfcf52d6fee36e6599f6458bde5fca"} Nov 27 09:19:47 crc kubenswrapper[4971]: I1127 09:19:47.363362 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="64b60aa5cfc64d3eb65e8ab1957e786e16bfcf52d6fee36e6599f6458bde5fca" Nov 27 09:19:47 crc kubenswrapper[4971]: I1127 09:19:47.364660 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-ftnrt" Nov 27 09:19:47 crc kubenswrapper[4971]: I1127 09:19:47.365148 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-networker-2dgqc" event={"ID":"326f81f2-90ba-46d9-a77e-fdefdce61bea","Type":"ContainerStarted","Data":"b53c2275d385c7e47a9f57abee4c03f611c2c250b9d59e61e1ff7f475c74804d"} Nov 27 09:19:47 crc kubenswrapper[4971]: I1127 09:19:47.404971 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-openstack-openstack-networker-2dgqc" podStartSLOduration=2.771185944 podStartE2EDuration="3.404950362s" podCreationTimestamp="2025-11-27 09:19:44 +0000 UTC" firstStartedPulling="2025-11-27 09:19:45.581679015 +0000 UTC m=+8823.773722943" lastFinishedPulling="2025-11-27 09:19:46.215443403 +0000 UTC m=+8824.407487361" observedRunningTime="2025-11-27 09:19:47.386932111 +0000 UTC m=+8825.578976029" watchObservedRunningTime="2025-11-27 09:19:47.404950362 +0000 UTC m=+8825.596994280" Nov 27 09:19:47 crc kubenswrapper[4971]: I1127 09:19:47.440084 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-4hhx2"] Nov 27 09:19:47 crc kubenswrapper[4971]: E1127 09:19:47.440704 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a65697f-7b70-42e9-bc6b-12f32a73cbb7" containerName="configure-network-openstack-openstack-cell1" Nov 27 09:19:47 crc kubenswrapper[4971]: I1127 09:19:47.440724 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a65697f-7b70-42e9-bc6b-12f32a73cbb7" containerName="configure-network-openstack-openstack-cell1" Nov 27 09:19:47 crc kubenswrapper[4971]: I1127 09:19:47.440970 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a65697f-7b70-42e9-bc6b-12f32a73cbb7" containerName="configure-network-openstack-openstack-cell1" Nov 27 09:19:47 crc kubenswrapper[4971]: I1127 09:19:47.441903 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-4hhx2" Nov 27 09:19:47 crc kubenswrapper[4971]: I1127 09:19:47.455277 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-4hhx2"] Nov 27 09:19:47 crc kubenswrapper[4971]: I1127 09:19:47.573275 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ef7cbaeb-f90b-4ff4-855c-917a445abf20-ceph\") pod \"validate-network-openstack-openstack-cell1-4hhx2\" (UID: \"ef7cbaeb-f90b-4ff4-855c-917a445abf20\") " pod="openstack/validate-network-openstack-openstack-cell1-4hhx2" Nov 27 09:19:47 crc kubenswrapper[4971]: I1127 09:19:47.573325 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ef7cbaeb-f90b-4ff4-855c-917a445abf20-inventory\") pod \"validate-network-openstack-openstack-cell1-4hhx2\" (UID: \"ef7cbaeb-f90b-4ff4-855c-917a445abf20\") " pod="openstack/validate-network-openstack-openstack-cell1-4hhx2" Nov 27 09:19:47 crc kubenswrapper[4971]: I1127 09:19:47.573980 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ef7cbaeb-f90b-4ff4-855c-917a445abf20-ssh-key\") pod \"validate-network-openstack-openstack-cell1-4hhx2\" (UID: \"ef7cbaeb-f90b-4ff4-855c-917a445abf20\") " pod="openstack/validate-network-openstack-openstack-cell1-4hhx2" Nov 27 09:19:47 crc kubenswrapper[4971]: I1127 09:19:47.574429 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64qrv\" (UniqueName: \"kubernetes.io/projected/ef7cbaeb-f90b-4ff4-855c-917a445abf20-kube-api-access-64qrv\") pod \"validate-network-openstack-openstack-cell1-4hhx2\" (UID: \"ef7cbaeb-f90b-4ff4-855c-917a445abf20\") " pod="openstack/validate-network-openstack-openstack-cell1-4hhx2" Nov 27 09:19:47 crc kubenswrapper[4971]: I1127 09:19:47.686387 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8a65697f-7b70-42e9-bc6b-12f32a73cbb7-inventory\") pod \"8a65697f-7b70-42e9-bc6b-12f32a73cbb7\" (UID: \"8a65697f-7b70-42e9-bc6b-12f32a73cbb7\") " Nov 27 09:19:47 crc kubenswrapper[4971]: I1127 09:19:47.687613 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ef7cbaeb-f90b-4ff4-855c-917a445abf20-ssh-key\") pod \"validate-network-openstack-openstack-cell1-4hhx2\" (UID: \"ef7cbaeb-f90b-4ff4-855c-917a445abf20\") " pod="openstack/validate-network-openstack-openstack-cell1-4hhx2" Nov 27 09:19:47 crc kubenswrapper[4971]: I1127 09:19:47.687722 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64qrv\" (UniqueName: \"kubernetes.io/projected/ef7cbaeb-f90b-4ff4-855c-917a445abf20-kube-api-access-64qrv\") pod \"validate-network-openstack-openstack-cell1-4hhx2\" (UID: \"ef7cbaeb-f90b-4ff4-855c-917a445abf20\") " pod="openstack/validate-network-openstack-openstack-cell1-4hhx2" Nov 27 09:19:47 crc kubenswrapper[4971]: I1127 09:19:47.687805 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ef7cbaeb-f90b-4ff4-855c-917a445abf20-ceph\") pod \"validate-network-openstack-openstack-cell1-4hhx2\" (UID: \"ef7cbaeb-f90b-4ff4-855c-917a445abf20\") " pod="openstack/validate-network-openstack-openstack-cell1-4hhx2" Nov 27 09:19:47 crc kubenswrapper[4971]: I1127 09:19:47.687838 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ef7cbaeb-f90b-4ff4-855c-917a445abf20-inventory\") pod \"validate-network-openstack-openstack-cell1-4hhx2\" (UID: \"ef7cbaeb-f90b-4ff4-855c-917a445abf20\") " pod="openstack/validate-network-openstack-openstack-cell1-4hhx2" Nov 27 09:19:47 crc kubenswrapper[4971]: I1127 09:19:47.692002 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ef7cbaeb-f90b-4ff4-855c-917a445abf20-inventory\") pod \"validate-network-openstack-openstack-cell1-4hhx2\" (UID: \"ef7cbaeb-f90b-4ff4-855c-917a445abf20\") " pod="openstack/validate-network-openstack-openstack-cell1-4hhx2" Nov 27 09:19:47 crc kubenswrapper[4971]: I1127 09:19:47.721030 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ef7cbaeb-f90b-4ff4-855c-917a445abf20-ssh-key\") pod \"validate-network-openstack-openstack-cell1-4hhx2\" (UID: \"ef7cbaeb-f90b-4ff4-855c-917a445abf20\") " pod="openstack/validate-network-openstack-openstack-cell1-4hhx2" Nov 27 09:19:47 crc kubenswrapper[4971]: I1127 09:19:47.721526 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64qrv\" (UniqueName: \"kubernetes.io/projected/ef7cbaeb-f90b-4ff4-855c-917a445abf20-kube-api-access-64qrv\") pod \"validate-network-openstack-openstack-cell1-4hhx2\" (UID: \"ef7cbaeb-f90b-4ff4-855c-917a445abf20\") " pod="openstack/validate-network-openstack-openstack-cell1-4hhx2" Nov 27 09:19:47 crc kubenswrapper[4971]: I1127 09:19:47.722763 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a65697f-7b70-42e9-bc6b-12f32a73cbb7-inventory" (OuterVolumeSpecName: "inventory") pod "8a65697f-7b70-42e9-bc6b-12f32a73cbb7" (UID: "8a65697f-7b70-42e9-bc6b-12f32a73cbb7"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:19:47 crc kubenswrapper[4971]: I1127 09:19:47.726404 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ef7cbaeb-f90b-4ff4-855c-917a445abf20-ceph\") pod \"validate-network-openstack-openstack-cell1-4hhx2\" (UID: \"ef7cbaeb-f90b-4ff4-855c-917a445abf20\") " pod="openstack/validate-network-openstack-openstack-cell1-4hhx2" Nov 27 09:19:47 crc kubenswrapper[4971]: I1127 09:19:47.762529 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-4hhx2" Nov 27 09:19:47 crc kubenswrapper[4971]: I1127 09:19:47.789987 4971 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8a65697f-7b70-42e9-bc6b-12f32a73cbb7-inventory\") on node \"crc\" DevicePath \"\"" Nov 27 09:19:48 crc kubenswrapper[4971]: I1127 09:19:48.327869 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-4hhx2"] Nov 27 09:19:48 crc kubenswrapper[4971]: W1127 09:19:48.333021 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podef7cbaeb_f90b_4ff4_855c_917a445abf20.slice/crio-97041381a1ac98df310260a4338acb7a23a87439cd87e6ce4dcd7feac8075fb0 WatchSource:0}: Error finding container 97041381a1ac98df310260a4338acb7a23a87439cd87e6ce4dcd7feac8075fb0: Status 404 returned error can't find the container with id 97041381a1ac98df310260a4338acb7a23a87439cd87e6ce4dcd7feac8075fb0 Nov 27 09:19:48 crc kubenswrapper[4971]: I1127 09:19:48.381076 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-4hhx2" event={"ID":"ef7cbaeb-f90b-4ff4-855c-917a445abf20","Type":"ContainerStarted","Data":"97041381a1ac98df310260a4338acb7a23a87439cd87e6ce4dcd7feac8075fb0"} Nov 27 09:19:49 crc kubenswrapper[4971]: I1127 09:19:49.396346 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-4hhx2" event={"ID":"ef7cbaeb-f90b-4ff4-855c-917a445abf20","Type":"ContainerStarted","Data":"8907b9e28008efb7af1da6433b184c745ecad615d37fe18a7e4c6091e8dfec47"} Nov 27 09:19:49 crc kubenswrapper[4971]: I1127 09:19:49.423836 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-openstack-openstack-cell1-4hhx2" podStartSLOduration=1.930896407 podStartE2EDuration="2.423816395s" podCreationTimestamp="2025-11-27 09:19:47 +0000 UTC" firstStartedPulling="2025-11-27 09:19:48.342661282 +0000 UTC m=+8826.534705190" lastFinishedPulling="2025-11-27 09:19:48.83558126 +0000 UTC m=+8827.027625178" observedRunningTime="2025-11-27 09:19:49.417942038 +0000 UTC m=+8827.609985966" watchObservedRunningTime="2025-11-27 09:19:49.423816395 +0000 UTC m=+8827.615860313" Nov 27 09:19:55 crc kubenswrapper[4971]: I1127 09:19:55.463167 4971 generic.go:334] "Generic (PLEG): container finished" podID="ef7cbaeb-f90b-4ff4-855c-917a445abf20" containerID="8907b9e28008efb7af1da6433b184c745ecad615d37fe18a7e4c6091e8dfec47" exitCode=0 Nov 27 09:19:55 crc kubenswrapper[4971]: I1127 09:19:55.463366 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-4hhx2" event={"ID":"ef7cbaeb-f90b-4ff4-855c-917a445abf20","Type":"ContainerDied","Data":"8907b9e28008efb7af1da6433b184c745ecad615d37fe18a7e4c6091e8dfec47"} Nov 27 09:19:56 crc kubenswrapper[4971]: I1127 09:19:56.986155 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-4hhx2" Nov 27 09:19:57 crc kubenswrapper[4971]: I1127 09:19:57.138597 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ef7cbaeb-f90b-4ff4-855c-917a445abf20-inventory\") pod \"ef7cbaeb-f90b-4ff4-855c-917a445abf20\" (UID: \"ef7cbaeb-f90b-4ff4-855c-917a445abf20\") " Nov 27 09:19:57 crc kubenswrapper[4971]: I1127 09:19:57.138736 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ef7cbaeb-f90b-4ff4-855c-917a445abf20-ceph\") pod \"ef7cbaeb-f90b-4ff4-855c-917a445abf20\" (UID: \"ef7cbaeb-f90b-4ff4-855c-917a445abf20\") " Nov 27 09:19:57 crc kubenswrapper[4971]: I1127 09:19:57.138802 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ef7cbaeb-f90b-4ff4-855c-917a445abf20-ssh-key\") pod \"ef7cbaeb-f90b-4ff4-855c-917a445abf20\" (UID: \"ef7cbaeb-f90b-4ff4-855c-917a445abf20\") " Nov 27 09:19:57 crc kubenswrapper[4971]: I1127 09:19:57.138856 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-64qrv\" (UniqueName: \"kubernetes.io/projected/ef7cbaeb-f90b-4ff4-855c-917a445abf20-kube-api-access-64qrv\") pod \"ef7cbaeb-f90b-4ff4-855c-917a445abf20\" (UID: \"ef7cbaeb-f90b-4ff4-855c-917a445abf20\") " Nov 27 09:19:57 crc kubenswrapper[4971]: I1127 09:19:57.145857 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef7cbaeb-f90b-4ff4-855c-917a445abf20-kube-api-access-64qrv" (OuterVolumeSpecName: "kube-api-access-64qrv") pod "ef7cbaeb-f90b-4ff4-855c-917a445abf20" (UID: "ef7cbaeb-f90b-4ff4-855c-917a445abf20"). InnerVolumeSpecName "kube-api-access-64qrv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:19:57 crc kubenswrapper[4971]: I1127 09:19:57.153526 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef7cbaeb-f90b-4ff4-855c-917a445abf20-ceph" (OuterVolumeSpecName: "ceph") pod "ef7cbaeb-f90b-4ff4-855c-917a445abf20" (UID: "ef7cbaeb-f90b-4ff4-855c-917a445abf20"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:19:57 crc kubenswrapper[4971]: I1127 09:19:57.172279 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef7cbaeb-f90b-4ff4-855c-917a445abf20-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ef7cbaeb-f90b-4ff4-855c-917a445abf20" (UID: "ef7cbaeb-f90b-4ff4-855c-917a445abf20"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:19:57 crc kubenswrapper[4971]: I1127 09:19:57.173833 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef7cbaeb-f90b-4ff4-855c-917a445abf20-inventory" (OuterVolumeSpecName: "inventory") pod "ef7cbaeb-f90b-4ff4-855c-917a445abf20" (UID: "ef7cbaeb-f90b-4ff4-855c-917a445abf20"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:19:57 crc kubenswrapper[4971]: I1127 09:19:57.241616 4971 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ef7cbaeb-f90b-4ff4-855c-917a445abf20-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 27 09:19:57 crc kubenswrapper[4971]: I1127 09:19:57.241663 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-64qrv\" (UniqueName: \"kubernetes.io/projected/ef7cbaeb-f90b-4ff4-855c-917a445abf20-kube-api-access-64qrv\") on node \"crc\" DevicePath \"\"" Nov 27 09:19:57 crc kubenswrapper[4971]: I1127 09:19:57.241702 4971 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ef7cbaeb-f90b-4ff4-855c-917a445abf20-inventory\") on node \"crc\" DevicePath \"\"" Nov 27 09:19:57 crc kubenswrapper[4971]: I1127 09:19:57.241713 4971 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ef7cbaeb-f90b-4ff4-855c-917a445abf20-ceph\") on node \"crc\" DevicePath \"\"" Nov 27 09:19:57 crc kubenswrapper[4971]: I1127 09:19:57.487029 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-4hhx2" event={"ID":"ef7cbaeb-f90b-4ff4-855c-917a445abf20","Type":"ContainerDied","Data":"97041381a1ac98df310260a4338acb7a23a87439cd87e6ce4dcd7feac8075fb0"} Nov 27 09:19:57 crc kubenswrapper[4971]: I1127 09:19:57.487101 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="97041381a1ac98df310260a4338acb7a23a87439cd87e6ce4dcd7feac8075fb0" Nov 27 09:19:57 crc kubenswrapper[4971]: I1127 09:19:57.487163 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-4hhx2" Nov 27 09:19:57 crc kubenswrapper[4971]: I1127 09:19:57.588569 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-openstack-openstack-cell1-d9wmt"] Nov 27 09:19:57 crc kubenswrapper[4971]: E1127 09:19:57.589840 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef7cbaeb-f90b-4ff4-855c-917a445abf20" containerName="validate-network-openstack-openstack-cell1" Nov 27 09:19:57 crc kubenswrapper[4971]: I1127 09:19:57.589987 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef7cbaeb-f90b-4ff4-855c-917a445abf20" containerName="validate-network-openstack-openstack-cell1" Nov 27 09:19:57 crc kubenswrapper[4971]: I1127 09:19:57.590457 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef7cbaeb-f90b-4ff4-855c-917a445abf20" containerName="validate-network-openstack-openstack-cell1" Nov 27 09:19:57 crc kubenswrapper[4971]: I1127 09:19:57.592210 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-d9wmt" Nov 27 09:19:57 crc kubenswrapper[4971]: I1127 09:19:57.595296 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 27 09:19:57 crc kubenswrapper[4971]: I1127 09:19:57.595361 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-jnkbm" Nov 27 09:19:57 crc kubenswrapper[4971]: I1127 09:19:57.598747 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-openstack-openstack-cell1-d9wmt"] Nov 27 09:19:57 crc kubenswrapper[4971]: I1127 09:19:57.761232 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2973f319-d645-472b-b9b5-b3708dd37782-ssh-key\") pod \"install-os-openstack-openstack-cell1-d9wmt\" (UID: \"2973f319-d645-472b-b9b5-b3708dd37782\") " pod="openstack/install-os-openstack-openstack-cell1-d9wmt" Nov 27 09:19:57 crc kubenswrapper[4971]: I1127 09:19:57.761840 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2973f319-d645-472b-b9b5-b3708dd37782-ceph\") pod \"install-os-openstack-openstack-cell1-d9wmt\" (UID: \"2973f319-d645-472b-b9b5-b3708dd37782\") " pod="openstack/install-os-openstack-openstack-cell1-d9wmt" Nov 27 09:19:57 crc kubenswrapper[4971]: I1127 09:19:57.762186 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-592zq\" (UniqueName: \"kubernetes.io/projected/2973f319-d645-472b-b9b5-b3708dd37782-kube-api-access-592zq\") pod \"install-os-openstack-openstack-cell1-d9wmt\" (UID: \"2973f319-d645-472b-b9b5-b3708dd37782\") " pod="openstack/install-os-openstack-openstack-cell1-d9wmt" Nov 27 09:19:57 crc kubenswrapper[4971]: I1127 09:19:57.762386 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2973f319-d645-472b-b9b5-b3708dd37782-inventory\") pod \"install-os-openstack-openstack-cell1-d9wmt\" (UID: \"2973f319-d645-472b-b9b5-b3708dd37782\") " pod="openstack/install-os-openstack-openstack-cell1-d9wmt" Nov 27 09:19:57 crc kubenswrapper[4971]: I1127 09:19:57.864507 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2973f319-d645-472b-b9b5-b3708dd37782-ceph\") pod \"install-os-openstack-openstack-cell1-d9wmt\" (UID: \"2973f319-d645-472b-b9b5-b3708dd37782\") " pod="openstack/install-os-openstack-openstack-cell1-d9wmt" Nov 27 09:19:57 crc kubenswrapper[4971]: I1127 09:19:57.864660 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-592zq\" (UniqueName: \"kubernetes.io/projected/2973f319-d645-472b-b9b5-b3708dd37782-kube-api-access-592zq\") pod \"install-os-openstack-openstack-cell1-d9wmt\" (UID: \"2973f319-d645-472b-b9b5-b3708dd37782\") " pod="openstack/install-os-openstack-openstack-cell1-d9wmt" Nov 27 09:19:57 crc kubenswrapper[4971]: I1127 09:19:57.864715 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2973f319-d645-472b-b9b5-b3708dd37782-inventory\") pod \"install-os-openstack-openstack-cell1-d9wmt\" (UID: \"2973f319-d645-472b-b9b5-b3708dd37782\") " pod="openstack/install-os-openstack-openstack-cell1-d9wmt" Nov 27 09:19:57 crc kubenswrapper[4971]: I1127 09:19:57.865221 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2973f319-d645-472b-b9b5-b3708dd37782-ssh-key\") pod \"install-os-openstack-openstack-cell1-d9wmt\" (UID: \"2973f319-d645-472b-b9b5-b3708dd37782\") " pod="openstack/install-os-openstack-openstack-cell1-d9wmt" Nov 27 09:19:57 crc kubenswrapper[4971]: I1127 09:19:57.868445 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2973f319-d645-472b-b9b5-b3708dd37782-inventory\") pod \"install-os-openstack-openstack-cell1-d9wmt\" (UID: \"2973f319-d645-472b-b9b5-b3708dd37782\") " pod="openstack/install-os-openstack-openstack-cell1-d9wmt" Nov 27 09:19:57 crc kubenswrapper[4971]: I1127 09:19:57.869561 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2973f319-d645-472b-b9b5-b3708dd37782-ceph\") pod \"install-os-openstack-openstack-cell1-d9wmt\" (UID: \"2973f319-d645-472b-b9b5-b3708dd37782\") " pod="openstack/install-os-openstack-openstack-cell1-d9wmt" Nov 27 09:19:57 crc kubenswrapper[4971]: I1127 09:19:57.870320 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2973f319-d645-472b-b9b5-b3708dd37782-ssh-key\") pod \"install-os-openstack-openstack-cell1-d9wmt\" (UID: \"2973f319-d645-472b-b9b5-b3708dd37782\") " pod="openstack/install-os-openstack-openstack-cell1-d9wmt" Nov 27 09:19:57 crc kubenswrapper[4971]: I1127 09:19:57.882731 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-592zq\" (UniqueName: \"kubernetes.io/projected/2973f319-d645-472b-b9b5-b3708dd37782-kube-api-access-592zq\") pod \"install-os-openstack-openstack-cell1-d9wmt\" (UID: \"2973f319-d645-472b-b9b5-b3708dd37782\") " pod="openstack/install-os-openstack-openstack-cell1-d9wmt" Nov 27 09:19:57 crc kubenswrapper[4971]: I1127 09:19:57.922659 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-d9wmt" Nov 27 09:19:58 crc kubenswrapper[4971]: I1127 09:19:58.616090 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-openstack-openstack-cell1-d9wmt"] Nov 27 09:19:59 crc kubenswrapper[4971]: I1127 09:19:59.506045 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-d9wmt" event={"ID":"2973f319-d645-472b-b9b5-b3708dd37782","Type":"ContainerStarted","Data":"0bdc2029c9810a51ae60821f91731a43685fa1b6e2273a180c3bf180cf5c1fce"} Nov 27 09:19:59 crc kubenswrapper[4971]: I1127 09:19:59.506681 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-d9wmt" event={"ID":"2973f319-d645-472b-b9b5-b3708dd37782","Type":"ContainerStarted","Data":"e970309e7ea289628d68b6822de99b2510b7f98542923cdc796c207500628678"} Nov 27 09:19:59 crc kubenswrapper[4971]: I1127 09:19:59.528902 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-openstack-openstack-cell1-d9wmt" podStartSLOduration=2.044440053 podStartE2EDuration="2.52888197s" podCreationTimestamp="2025-11-27 09:19:57 +0000 UTC" firstStartedPulling="2025-11-27 09:19:58.617257042 +0000 UTC m=+8836.809300960" lastFinishedPulling="2025-11-27 09:19:59.101698959 +0000 UTC m=+8837.293742877" observedRunningTime="2025-11-27 09:19:59.5228719 +0000 UTC m=+8837.714915808" watchObservedRunningTime="2025-11-27 09:19:59.52888197 +0000 UTC m=+8837.720925888" Nov 27 09:20:38 crc kubenswrapper[4971]: I1127 09:20:38.894195 4971 generic.go:334] "Generic (PLEG): container finished" podID="326f81f2-90ba-46d9-a77e-fdefdce61bea" containerID="b53c2275d385c7e47a9f57abee4c03f611c2c250b9d59e61e1ff7f475c74804d" exitCode=0 Nov 27 09:20:38 crc kubenswrapper[4971]: I1127 09:20:38.894304 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-networker-2dgqc" event={"ID":"326f81f2-90ba-46d9-a77e-fdefdce61bea","Type":"ContainerDied","Data":"b53c2275d385c7e47a9f57abee4c03f611c2c250b9d59e61e1ff7f475c74804d"} Nov 27 09:20:40 crc kubenswrapper[4971]: I1127 09:20:40.474620 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-networker-2dgqc" Nov 27 09:20:40 crc kubenswrapper[4971]: I1127 09:20:40.529852 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r29j8\" (UniqueName: \"kubernetes.io/projected/326f81f2-90ba-46d9-a77e-fdefdce61bea-kube-api-access-r29j8\") pod \"326f81f2-90ba-46d9-a77e-fdefdce61bea\" (UID: \"326f81f2-90ba-46d9-a77e-fdefdce61bea\") " Nov 27 09:20:40 crc kubenswrapper[4971]: I1127 09:20:40.530097 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/326f81f2-90ba-46d9-a77e-fdefdce61bea-ssh-key\") pod \"326f81f2-90ba-46d9-a77e-fdefdce61bea\" (UID: \"326f81f2-90ba-46d9-a77e-fdefdce61bea\") " Nov 27 09:20:40 crc kubenswrapper[4971]: I1127 09:20:40.530188 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/326f81f2-90ba-46d9-a77e-fdefdce61bea-inventory\") pod \"326f81f2-90ba-46d9-a77e-fdefdce61bea\" (UID: \"326f81f2-90ba-46d9-a77e-fdefdce61bea\") " Nov 27 09:20:40 crc kubenswrapper[4971]: I1127 09:20:40.552845 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/326f81f2-90ba-46d9-a77e-fdefdce61bea-kube-api-access-r29j8" (OuterVolumeSpecName: "kube-api-access-r29j8") pod "326f81f2-90ba-46d9-a77e-fdefdce61bea" (UID: "326f81f2-90ba-46d9-a77e-fdefdce61bea"). InnerVolumeSpecName "kube-api-access-r29j8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:20:40 crc kubenswrapper[4971]: I1127 09:20:40.704457 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/326f81f2-90ba-46d9-a77e-fdefdce61bea-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "326f81f2-90ba-46d9-a77e-fdefdce61bea" (UID: "326f81f2-90ba-46d9-a77e-fdefdce61bea"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:20:40 crc kubenswrapper[4971]: I1127 09:20:40.766002 4971 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/326f81f2-90ba-46d9-a77e-fdefdce61bea-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 27 09:20:40 crc kubenswrapper[4971]: I1127 09:20:40.766046 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r29j8\" (UniqueName: \"kubernetes.io/projected/326f81f2-90ba-46d9-a77e-fdefdce61bea-kube-api-access-r29j8\") on node \"crc\" DevicePath \"\"" Nov 27 09:20:40 crc kubenswrapper[4971]: I1127 09:20:40.770486 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/326f81f2-90ba-46d9-a77e-fdefdce61bea-inventory" (OuterVolumeSpecName: "inventory") pod "326f81f2-90ba-46d9-a77e-fdefdce61bea" (UID: "326f81f2-90ba-46d9-a77e-fdefdce61bea"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:20:40 crc kubenswrapper[4971]: I1127 09:20:40.867844 4971 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/326f81f2-90ba-46d9-a77e-fdefdce61bea-inventory\") on node \"crc\" DevicePath \"\"" Nov 27 09:20:40 crc kubenswrapper[4971]: I1127 09:20:40.936840 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-networker-2dgqc" event={"ID":"326f81f2-90ba-46d9-a77e-fdefdce61bea","Type":"ContainerDied","Data":"955c7b44e083adfe4dc2ec9ef471f3a478fc7d7452fc5c7d1c8afa3f02bf631f"} Nov 27 09:20:40 crc kubenswrapper[4971]: I1127 09:20:40.936900 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="955c7b44e083adfe4dc2ec9ef471f3a478fc7d7452fc5c7d1c8afa3f02bf631f" Nov 27 09:20:40 crc kubenswrapper[4971]: I1127 09:20:40.936996 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-networker-2dgqc" Nov 27 09:20:41 crc kubenswrapper[4971]: I1127 09:20:41.040567 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-openstack-openstack-networker-jzpb4"] Nov 27 09:20:41 crc kubenswrapper[4971]: E1127 09:20:41.041190 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="326f81f2-90ba-46d9-a77e-fdefdce61bea" containerName="install-os-openstack-openstack-networker" Nov 27 09:20:41 crc kubenswrapper[4971]: I1127 09:20:41.041213 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="326f81f2-90ba-46d9-a77e-fdefdce61bea" containerName="install-os-openstack-openstack-networker" Nov 27 09:20:41 crc kubenswrapper[4971]: I1127 09:20:41.041472 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="326f81f2-90ba-46d9-a77e-fdefdce61bea" containerName="install-os-openstack-openstack-networker" Nov 27 09:20:41 crc kubenswrapper[4971]: I1127 09:20:41.042482 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-networker-jzpb4" Nov 27 09:20:41 crc kubenswrapper[4971]: I1127 09:20:41.045468 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-networker-dockercfg-hbhvx" Nov 27 09:20:41 crc kubenswrapper[4971]: I1127 09:20:41.045750 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-networker" Nov 27 09:20:41 crc kubenswrapper[4971]: I1127 09:20:41.057491 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-openstack-openstack-networker-jzpb4"] Nov 27 09:20:41 crc kubenswrapper[4971]: I1127 09:20:41.179011 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0f83c73e-5e55-47a9-9df5-32c19df3372f-inventory\") pod \"configure-os-openstack-openstack-networker-jzpb4\" (UID: \"0f83c73e-5e55-47a9-9df5-32c19df3372f\") " pod="openstack/configure-os-openstack-openstack-networker-jzpb4" Nov 27 09:20:41 crc kubenswrapper[4971]: I1127 09:20:41.179119 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzjnd\" (UniqueName: \"kubernetes.io/projected/0f83c73e-5e55-47a9-9df5-32c19df3372f-kube-api-access-pzjnd\") pod \"configure-os-openstack-openstack-networker-jzpb4\" (UID: \"0f83c73e-5e55-47a9-9df5-32c19df3372f\") " pod="openstack/configure-os-openstack-openstack-networker-jzpb4" Nov 27 09:20:41 crc kubenswrapper[4971]: I1127 09:20:41.179468 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0f83c73e-5e55-47a9-9df5-32c19df3372f-ssh-key\") pod \"configure-os-openstack-openstack-networker-jzpb4\" (UID: \"0f83c73e-5e55-47a9-9df5-32c19df3372f\") " pod="openstack/configure-os-openstack-openstack-networker-jzpb4" Nov 27 09:20:41 crc kubenswrapper[4971]: I1127 09:20:41.282364 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0f83c73e-5e55-47a9-9df5-32c19df3372f-inventory\") pod \"configure-os-openstack-openstack-networker-jzpb4\" (UID: \"0f83c73e-5e55-47a9-9df5-32c19df3372f\") " pod="openstack/configure-os-openstack-openstack-networker-jzpb4" Nov 27 09:20:41 crc kubenswrapper[4971]: I1127 09:20:41.282473 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzjnd\" (UniqueName: \"kubernetes.io/projected/0f83c73e-5e55-47a9-9df5-32c19df3372f-kube-api-access-pzjnd\") pod \"configure-os-openstack-openstack-networker-jzpb4\" (UID: \"0f83c73e-5e55-47a9-9df5-32c19df3372f\") " pod="openstack/configure-os-openstack-openstack-networker-jzpb4" Nov 27 09:20:41 crc kubenswrapper[4971]: I1127 09:20:41.282637 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0f83c73e-5e55-47a9-9df5-32c19df3372f-ssh-key\") pod \"configure-os-openstack-openstack-networker-jzpb4\" (UID: \"0f83c73e-5e55-47a9-9df5-32c19df3372f\") " pod="openstack/configure-os-openstack-openstack-networker-jzpb4" Nov 27 09:20:41 crc kubenswrapper[4971]: I1127 09:20:41.287493 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0f83c73e-5e55-47a9-9df5-32c19df3372f-ssh-key\") pod \"configure-os-openstack-openstack-networker-jzpb4\" (UID: \"0f83c73e-5e55-47a9-9df5-32c19df3372f\") " pod="openstack/configure-os-openstack-openstack-networker-jzpb4" Nov 27 09:20:41 crc kubenswrapper[4971]: I1127 09:20:41.288134 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0f83c73e-5e55-47a9-9df5-32c19df3372f-inventory\") pod \"configure-os-openstack-openstack-networker-jzpb4\" (UID: \"0f83c73e-5e55-47a9-9df5-32c19df3372f\") " pod="openstack/configure-os-openstack-openstack-networker-jzpb4" Nov 27 09:20:41 crc kubenswrapper[4971]: I1127 09:20:41.302894 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzjnd\" (UniqueName: \"kubernetes.io/projected/0f83c73e-5e55-47a9-9df5-32c19df3372f-kube-api-access-pzjnd\") pod \"configure-os-openstack-openstack-networker-jzpb4\" (UID: \"0f83c73e-5e55-47a9-9df5-32c19df3372f\") " pod="openstack/configure-os-openstack-openstack-networker-jzpb4" Nov 27 09:20:41 crc kubenswrapper[4971]: I1127 09:20:41.378716 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-networker-jzpb4" Nov 27 09:20:42 crc kubenswrapper[4971]: I1127 09:20:42.094001 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-openstack-openstack-networker-jzpb4"] Nov 27 09:20:42 crc kubenswrapper[4971]: I1127 09:20:42.966456 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-networker-jzpb4" event={"ID":"0f83c73e-5e55-47a9-9df5-32c19df3372f","Type":"ContainerStarted","Data":"330e47ee9d72f978e7113f9b97173a8b84cd1f1e3ef969141009eb5e95f4a0e3"} Nov 27 09:20:43 crc kubenswrapper[4971]: I1127 09:20:43.979045 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-networker-jzpb4" event={"ID":"0f83c73e-5e55-47a9-9df5-32c19df3372f","Type":"ContainerStarted","Data":"daf279daf7363bd49c276209e6cecf062f5a5eb15a02a0c8dbab1660b8decd8d"} Nov 27 09:20:44 crc kubenswrapper[4971]: I1127 09:20:44.005324 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-openstack-openstack-networker-jzpb4" podStartSLOduration=1.873307252 podStartE2EDuration="3.005299449s" podCreationTimestamp="2025-11-27 09:20:41 +0000 UTC" firstStartedPulling="2025-11-27 09:20:42.108273657 +0000 UTC m=+8880.300317595" lastFinishedPulling="2025-11-27 09:20:43.240265874 +0000 UTC m=+8881.432309792" observedRunningTime="2025-11-27 09:20:43.996900111 +0000 UTC m=+8882.188944039" watchObservedRunningTime="2025-11-27 09:20:44.005299449 +0000 UTC m=+8882.197343367" Nov 27 09:20:52 crc kubenswrapper[4971]: I1127 09:20:52.068685 4971 generic.go:334] "Generic (PLEG): container finished" podID="2973f319-d645-472b-b9b5-b3708dd37782" containerID="0bdc2029c9810a51ae60821f91731a43685fa1b6e2273a180c3bf180cf5c1fce" exitCode=0 Nov 27 09:20:52 crc kubenswrapper[4971]: I1127 09:20:52.068782 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-d9wmt" event={"ID":"2973f319-d645-472b-b9b5-b3708dd37782","Type":"ContainerDied","Data":"0bdc2029c9810a51ae60821f91731a43685fa1b6e2273a180c3bf180cf5c1fce"} Nov 27 09:20:53 crc kubenswrapper[4971]: I1127 09:20:53.539905 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-d9wmt" Nov 27 09:20:53 crc kubenswrapper[4971]: I1127 09:20:53.565322 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2973f319-d645-472b-b9b5-b3708dd37782-ssh-key\") pod \"2973f319-d645-472b-b9b5-b3708dd37782\" (UID: \"2973f319-d645-472b-b9b5-b3708dd37782\") " Nov 27 09:20:53 crc kubenswrapper[4971]: I1127 09:20:53.565388 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-592zq\" (UniqueName: \"kubernetes.io/projected/2973f319-d645-472b-b9b5-b3708dd37782-kube-api-access-592zq\") pod \"2973f319-d645-472b-b9b5-b3708dd37782\" (UID: \"2973f319-d645-472b-b9b5-b3708dd37782\") " Nov 27 09:20:53 crc kubenswrapper[4971]: I1127 09:20:53.565518 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2973f319-d645-472b-b9b5-b3708dd37782-ceph\") pod \"2973f319-d645-472b-b9b5-b3708dd37782\" (UID: \"2973f319-d645-472b-b9b5-b3708dd37782\") " Nov 27 09:20:53 crc kubenswrapper[4971]: I1127 09:20:53.565661 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2973f319-d645-472b-b9b5-b3708dd37782-inventory\") pod \"2973f319-d645-472b-b9b5-b3708dd37782\" (UID: \"2973f319-d645-472b-b9b5-b3708dd37782\") " Nov 27 09:20:53 crc kubenswrapper[4971]: I1127 09:20:53.585344 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2973f319-d645-472b-b9b5-b3708dd37782-ceph" (OuterVolumeSpecName: "ceph") pod "2973f319-d645-472b-b9b5-b3708dd37782" (UID: "2973f319-d645-472b-b9b5-b3708dd37782"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:20:53 crc kubenswrapper[4971]: I1127 09:20:53.589816 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2973f319-d645-472b-b9b5-b3708dd37782-kube-api-access-592zq" (OuterVolumeSpecName: "kube-api-access-592zq") pod "2973f319-d645-472b-b9b5-b3708dd37782" (UID: "2973f319-d645-472b-b9b5-b3708dd37782"). InnerVolumeSpecName "kube-api-access-592zq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:20:53 crc kubenswrapper[4971]: I1127 09:20:53.622097 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2973f319-d645-472b-b9b5-b3708dd37782-inventory" (OuterVolumeSpecName: "inventory") pod "2973f319-d645-472b-b9b5-b3708dd37782" (UID: "2973f319-d645-472b-b9b5-b3708dd37782"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:20:53 crc kubenswrapper[4971]: I1127 09:20:53.638515 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2973f319-d645-472b-b9b5-b3708dd37782-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2973f319-d645-472b-b9b5-b3708dd37782" (UID: "2973f319-d645-472b-b9b5-b3708dd37782"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:20:53 crc kubenswrapper[4971]: I1127 09:20:53.669629 4971 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2973f319-d645-472b-b9b5-b3708dd37782-ceph\") on node \"crc\" DevicePath \"\"" Nov 27 09:20:53 crc kubenswrapper[4971]: I1127 09:20:53.669914 4971 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2973f319-d645-472b-b9b5-b3708dd37782-inventory\") on node \"crc\" DevicePath \"\"" Nov 27 09:20:53 crc kubenswrapper[4971]: I1127 09:20:53.669995 4971 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2973f319-d645-472b-b9b5-b3708dd37782-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 27 09:20:53 crc kubenswrapper[4971]: I1127 09:20:53.670078 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-592zq\" (UniqueName: \"kubernetes.io/projected/2973f319-d645-472b-b9b5-b3708dd37782-kube-api-access-592zq\") on node \"crc\" DevicePath \"\"" Nov 27 09:20:54 crc kubenswrapper[4971]: I1127 09:20:54.106527 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-d9wmt" event={"ID":"2973f319-d645-472b-b9b5-b3708dd37782","Type":"ContainerDied","Data":"e970309e7ea289628d68b6822de99b2510b7f98542923cdc796c207500628678"} Nov 27 09:20:54 crc kubenswrapper[4971]: I1127 09:20:54.106935 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e970309e7ea289628d68b6822de99b2510b7f98542923cdc796c207500628678" Nov 27 09:20:54 crc kubenswrapper[4971]: I1127 09:20:54.107063 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-d9wmt" Nov 27 09:20:54 crc kubenswrapper[4971]: I1127 09:20:54.190142 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-58vhc"] Nov 27 09:20:54 crc kubenswrapper[4971]: E1127 09:20:54.190851 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2973f319-d645-472b-b9b5-b3708dd37782" containerName="install-os-openstack-openstack-cell1" Nov 27 09:20:54 crc kubenswrapper[4971]: I1127 09:20:54.190890 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="2973f319-d645-472b-b9b5-b3708dd37782" containerName="install-os-openstack-openstack-cell1" Nov 27 09:20:54 crc kubenswrapper[4971]: I1127 09:20:54.191144 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="2973f319-d645-472b-b9b5-b3708dd37782" containerName="install-os-openstack-openstack-cell1" Nov 27 09:20:54 crc kubenswrapper[4971]: I1127 09:20:54.192365 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-58vhc" Nov 27 09:20:54 crc kubenswrapper[4971]: I1127 09:20:54.196277 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 27 09:20:54 crc kubenswrapper[4971]: I1127 09:20:54.196695 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-jnkbm" Nov 27 09:20:54 crc kubenswrapper[4971]: I1127 09:20:54.211191 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-58vhc"] Nov 27 09:20:54 crc kubenswrapper[4971]: I1127 09:20:54.284809 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/811b1337-2a00-4b0a-a398-163af3d76d79-ssh-key\") pod \"configure-os-openstack-openstack-cell1-58vhc\" (UID: \"811b1337-2a00-4b0a-a398-163af3d76d79\") " pod="openstack/configure-os-openstack-openstack-cell1-58vhc" Nov 27 09:20:54 crc kubenswrapper[4971]: I1127 09:20:54.285063 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/811b1337-2a00-4b0a-a398-163af3d76d79-ceph\") pod \"configure-os-openstack-openstack-cell1-58vhc\" (UID: \"811b1337-2a00-4b0a-a398-163af3d76d79\") " pod="openstack/configure-os-openstack-openstack-cell1-58vhc" Nov 27 09:20:54 crc kubenswrapper[4971]: I1127 09:20:54.285145 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/811b1337-2a00-4b0a-a398-163af3d76d79-inventory\") pod \"configure-os-openstack-openstack-cell1-58vhc\" (UID: \"811b1337-2a00-4b0a-a398-163af3d76d79\") " pod="openstack/configure-os-openstack-openstack-cell1-58vhc" Nov 27 09:20:54 crc kubenswrapper[4971]: I1127 09:20:54.285187 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-glgc4\" (UniqueName: \"kubernetes.io/projected/811b1337-2a00-4b0a-a398-163af3d76d79-kube-api-access-glgc4\") pod \"configure-os-openstack-openstack-cell1-58vhc\" (UID: \"811b1337-2a00-4b0a-a398-163af3d76d79\") " pod="openstack/configure-os-openstack-openstack-cell1-58vhc" Nov 27 09:20:54 crc kubenswrapper[4971]: I1127 09:20:54.387738 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/811b1337-2a00-4b0a-a398-163af3d76d79-ssh-key\") pod \"configure-os-openstack-openstack-cell1-58vhc\" (UID: \"811b1337-2a00-4b0a-a398-163af3d76d79\") " pod="openstack/configure-os-openstack-openstack-cell1-58vhc" Nov 27 09:20:54 crc kubenswrapper[4971]: I1127 09:20:54.387824 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/811b1337-2a00-4b0a-a398-163af3d76d79-ceph\") pod \"configure-os-openstack-openstack-cell1-58vhc\" (UID: \"811b1337-2a00-4b0a-a398-163af3d76d79\") " pod="openstack/configure-os-openstack-openstack-cell1-58vhc" Nov 27 09:20:54 crc kubenswrapper[4971]: I1127 09:20:54.387886 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/811b1337-2a00-4b0a-a398-163af3d76d79-inventory\") pod \"configure-os-openstack-openstack-cell1-58vhc\" (UID: \"811b1337-2a00-4b0a-a398-163af3d76d79\") " pod="openstack/configure-os-openstack-openstack-cell1-58vhc" Nov 27 09:20:54 crc kubenswrapper[4971]: I1127 09:20:54.387920 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-glgc4\" (UniqueName: \"kubernetes.io/projected/811b1337-2a00-4b0a-a398-163af3d76d79-kube-api-access-glgc4\") pod \"configure-os-openstack-openstack-cell1-58vhc\" (UID: \"811b1337-2a00-4b0a-a398-163af3d76d79\") " pod="openstack/configure-os-openstack-openstack-cell1-58vhc" Nov 27 09:20:54 crc kubenswrapper[4971]: I1127 09:20:54.394900 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/811b1337-2a00-4b0a-a398-163af3d76d79-inventory\") pod \"configure-os-openstack-openstack-cell1-58vhc\" (UID: \"811b1337-2a00-4b0a-a398-163af3d76d79\") " pod="openstack/configure-os-openstack-openstack-cell1-58vhc" Nov 27 09:20:54 crc kubenswrapper[4971]: I1127 09:20:54.395140 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/811b1337-2a00-4b0a-a398-163af3d76d79-ssh-key\") pod \"configure-os-openstack-openstack-cell1-58vhc\" (UID: \"811b1337-2a00-4b0a-a398-163af3d76d79\") " pod="openstack/configure-os-openstack-openstack-cell1-58vhc" Nov 27 09:20:54 crc kubenswrapper[4971]: I1127 09:20:54.395447 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/811b1337-2a00-4b0a-a398-163af3d76d79-ceph\") pod \"configure-os-openstack-openstack-cell1-58vhc\" (UID: \"811b1337-2a00-4b0a-a398-163af3d76d79\") " pod="openstack/configure-os-openstack-openstack-cell1-58vhc" Nov 27 09:20:54 crc kubenswrapper[4971]: I1127 09:20:54.408602 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-glgc4\" (UniqueName: \"kubernetes.io/projected/811b1337-2a00-4b0a-a398-163af3d76d79-kube-api-access-glgc4\") pod \"configure-os-openstack-openstack-cell1-58vhc\" (UID: \"811b1337-2a00-4b0a-a398-163af3d76d79\") " pod="openstack/configure-os-openstack-openstack-cell1-58vhc" Nov 27 09:20:54 crc kubenswrapper[4971]: I1127 09:20:54.527990 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-58vhc" Nov 27 09:20:55 crc kubenswrapper[4971]: I1127 09:20:55.132210 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-58vhc"] Nov 27 09:20:56 crc kubenswrapper[4971]: I1127 09:20:56.130339 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-58vhc" event={"ID":"811b1337-2a00-4b0a-a398-163af3d76d79","Type":"ContainerStarted","Data":"fcf18b1a01047d5e1fd387bf06370bfe5b1fc141e80fc0065c14b94ce8f2b671"} Nov 27 09:20:56 crc kubenswrapper[4971]: I1127 09:20:56.131464 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-58vhc" event={"ID":"811b1337-2a00-4b0a-a398-163af3d76d79","Type":"ContainerStarted","Data":"b955224ed47bb376c05070891675cd69d0b065aa9c70eab0405f5e6b5d757976"} Nov 27 09:20:56 crc kubenswrapper[4971]: I1127 09:20:56.156659 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-openstack-openstack-cell1-58vhc" podStartSLOduration=1.6555365960000001 podStartE2EDuration="2.156632845s" podCreationTimestamp="2025-11-27 09:20:54 +0000 UTC" firstStartedPulling="2025-11-27 09:20:55.132714369 +0000 UTC m=+8893.324758287" lastFinishedPulling="2025-11-27 09:20:55.633810608 +0000 UTC m=+8893.825854536" observedRunningTime="2025-11-27 09:20:56.147174797 +0000 UTC m=+8894.339218735" watchObservedRunningTime="2025-11-27 09:20:56.156632845 +0000 UTC m=+8894.348676763" Nov 27 09:20:56 crc kubenswrapper[4971]: I1127 09:20:56.413286 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 09:20:56 crc kubenswrapper[4971]: I1127 09:20:56.413353 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 09:21:23 crc kubenswrapper[4971]: I1127 09:21:23.171859 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-qfclb"] Nov 27 09:21:23 crc kubenswrapper[4971]: I1127 09:21:23.179921 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qfclb" Nov 27 09:21:23 crc kubenswrapper[4971]: I1127 09:21:23.190315 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qfclb"] Nov 27 09:21:23 crc kubenswrapper[4971]: I1127 09:21:23.329670 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c7a22e8a-307c-4e58-b358-10b242121eb6-catalog-content\") pod \"redhat-operators-qfclb\" (UID: \"c7a22e8a-307c-4e58-b358-10b242121eb6\") " pod="openshift-marketplace/redhat-operators-qfclb" Nov 27 09:21:23 crc kubenswrapper[4971]: I1127 09:21:23.330313 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvxxn\" (UniqueName: \"kubernetes.io/projected/c7a22e8a-307c-4e58-b358-10b242121eb6-kube-api-access-rvxxn\") pod \"redhat-operators-qfclb\" (UID: \"c7a22e8a-307c-4e58-b358-10b242121eb6\") " pod="openshift-marketplace/redhat-operators-qfclb" Nov 27 09:21:23 crc kubenswrapper[4971]: I1127 09:21:23.330482 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c7a22e8a-307c-4e58-b358-10b242121eb6-utilities\") pod \"redhat-operators-qfclb\" (UID: \"c7a22e8a-307c-4e58-b358-10b242121eb6\") " pod="openshift-marketplace/redhat-operators-qfclb" Nov 27 09:21:23 crc kubenswrapper[4971]: I1127 09:21:23.432279 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c7a22e8a-307c-4e58-b358-10b242121eb6-utilities\") pod \"redhat-operators-qfclb\" (UID: \"c7a22e8a-307c-4e58-b358-10b242121eb6\") " pod="openshift-marketplace/redhat-operators-qfclb" Nov 27 09:21:23 crc kubenswrapper[4971]: I1127 09:21:23.432377 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c7a22e8a-307c-4e58-b358-10b242121eb6-catalog-content\") pod \"redhat-operators-qfclb\" (UID: \"c7a22e8a-307c-4e58-b358-10b242121eb6\") " pod="openshift-marketplace/redhat-operators-qfclb" Nov 27 09:21:23 crc kubenswrapper[4971]: I1127 09:21:23.432467 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvxxn\" (UniqueName: \"kubernetes.io/projected/c7a22e8a-307c-4e58-b358-10b242121eb6-kube-api-access-rvxxn\") pod \"redhat-operators-qfclb\" (UID: \"c7a22e8a-307c-4e58-b358-10b242121eb6\") " pod="openshift-marketplace/redhat-operators-qfclb" Nov 27 09:21:23 crc kubenswrapper[4971]: I1127 09:21:23.432925 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c7a22e8a-307c-4e58-b358-10b242121eb6-utilities\") pod \"redhat-operators-qfclb\" (UID: \"c7a22e8a-307c-4e58-b358-10b242121eb6\") " pod="openshift-marketplace/redhat-operators-qfclb" Nov 27 09:21:23 crc kubenswrapper[4971]: I1127 09:21:23.433068 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c7a22e8a-307c-4e58-b358-10b242121eb6-catalog-content\") pod \"redhat-operators-qfclb\" (UID: \"c7a22e8a-307c-4e58-b358-10b242121eb6\") " pod="openshift-marketplace/redhat-operators-qfclb" Nov 27 09:21:23 crc kubenswrapper[4971]: I1127 09:21:23.467896 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvxxn\" (UniqueName: \"kubernetes.io/projected/c7a22e8a-307c-4e58-b358-10b242121eb6-kube-api-access-rvxxn\") pod \"redhat-operators-qfclb\" (UID: \"c7a22e8a-307c-4e58-b358-10b242121eb6\") " pod="openshift-marketplace/redhat-operators-qfclb" Nov 27 09:21:23 crc kubenswrapper[4971]: I1127 09:21:23.528513 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qfclb" Nov 27 09:21:24 crc kubenswrapper[4971]: I1127 09:21:24.308524 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qfclb"] Nov 27 09:21:24 crc kubenswrapper[4971]: I1127 09:21:24.400321 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qfclb" event={"ID":"c7a22e8a-307c-4e58-b358-10b242121eb6","Type":"ContainerStarted","Data":"5ad7e97f7416d88593a6837c4e4576f35bd1f8400587e29ddf7b4c1f7421f4c8"} Nov 27 09:21:25 crc kubenswrapper[4971]: I1127 09:21:25.420686 4971 generic.go:334] "Generic (PLEG): container finished" podID="c7a22e8a-307c-4e58-b358-10b242121eb6" containerID="3e7f9f77f68d259854857266a3749ad9c10f498a273013c6abd5917cc99812c9" exitCode=0 Nov 27 09:21:25 crc kubenswrapper[4971]: I1127 09:21:25.420777 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qfclb" event={"ID":"c7a22e8a-307c-4e58-b358-10b242121eb6","Type":"ContainerDied","Data":"3e7f9f77f68d259854857266a3749ad9c10f498a273013c6abd5917cc99812c9"} Nov 27 09:21:26 crc kubenswrapper[4971]: I1127 09:21:26.413117 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 09:21:26 crc kubenswrapper[4971]: I1127 09:21:26.413854 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 09:21:26 crc kubenswrapper[4971]: I1127 09:21:26.465410 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qfclb" event={"ID":"c7a22e8a-307c-4e58-b358-10b242121eb6","Type":"ContainerStarted","Data":"6fd32e91acb8149c4b085314e4d52fb9203372c77a0bba6f6e57066247eea5ca"} Nov 27 09:21:29 crc kubenswrapper[4971]: I1127 09:21:29.496507 4971 generic.go:334] "Generic (PLEG): container finished" podID="c7a22e8a-307c-4e58-b358-10b242121eb6" containerID="6fd32e91acb8149c4b085314e4d52fb9203372c77a0bba6f6e57066247eea5ca" exitCode=0 Nov 27 09:21:29 crc kubenswrapper[4971]: I1127 09:21:29.496597 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qfclb" event={"ID":"c7a22e8a-307c-4e58-b358-10b242121eb6","Type":"ContainerDied","Data":"6fd32e91acb8149c4b085314e4d52fb9203372c77a0bba6f6e57066247eea5ca"} Nov 27 09:21:32 crc kubenswrapper[4971]: I1127 09:21:32.569067 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qfclb" event={"ID":"c7a22e8a-307c-4e58-b358-10b242121eb6","Type":"ContainerStarted","Data":"d457790f73ca72367527e97592fdbb9e429ed7908733f495d436296d6a865f1c"} Nov 27 09:21:32 crc kubenswrapper[4971]: I1127 09:21:32.594263 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-qfclb" podStartSLOduration=3.732379141 podStartE2EDuration="9.594226197s" podCreationTimestamp="2025-11-27 09:21:23 +0000 UTC" firstStartedPulling="2025-11-27 09:21:25.424769397 +0000 UTC m=+8923.616813315" lastFinishedPulling="2025-11-27 09:21:31.286616453 +0000 UTC m=+8929.478660371" observedRunningTime="2025-11-27 09:21:32.591286594 +0000 UTC m=+8930.783330522" watchObservedRunningTime="2025-11-27 09:21:32.594226197 +0000 UTC m=+8930.786270125" Nov 27 09:21:33 crc kubenswrapper[4971]: I1127 09:21:33.529417 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-qfclb" Nov 27 09:21:33 crc kubenswrapper[4971]: I1127 09:21:33.529726 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-qfclb" Nov 27 09:21:34 crc kubenswrapper[4971]: I1127 09:21:34.603482 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-qfclb" podUID="c7a22e8a-307c-4e58-b358-10b242121eb6" containerName="registry-server" probeResult="failure" output=< Nov 27 09:21:34 crc kubenswrapper[4971]: timeout: failed to connect service ":50051" within 1s Nov 27 09:21:34 crc kubenswrapper[4971]: > Nov 27 09:21:41 crc kubenswrapper[4971]: I1127 09:21:41.660947 4971 generic.go:334] "Generic (PLEG): container finished" podID="0f83c73e-5e55-47a9-9df5-32c19df3372f" containerID="daf279daf7363bd49c276209e6cecf062f5a5eb15a02a0c8dbab1660b8decd8d" exitCode=0 Nov 27 09:21:41 crc kubenswrapper[4971]: I1127 09:21:41.661165 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-networker-jzpb4" event={"ID":"0f83c73e-5e55-47a9-9df5-32c19df3372f","Type":"ContainerDied","Data":"daf279daf7363bd49c276209e6cecf062f5a5eb15a02a0c8dbab1660b8decd8d"} Nov 27 09:21:43 crc kubenswrapper[4971]: I1127 09:21:43.211747 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-networker-jzpb4" Nov 27 09:21:43 crc kubenswrapper[4971]: I1127 09:21:43.336495 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0f83c73e-5e55-47a9-9df5-32c19df3372f-inventory\") pod \"0f83c73e-5e55-47a9-9df5-32c19df3372f\" (UID: \"0f83c73e-5e55-47a9-9df5-32c19df3372f\") " Nov 27 09:21:43 crc kubenswrapper[4971]: I1127 09:21:43.337140 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pzjnd\" (UniqueName: \"kubernetes.io/projected/0f83c73e-5e55-47a9-9df5-32c19df3372f-kube-api-access-pzjnd\") pod \"0f83c73e-5e55-47a9-9df5-32c19df3372f\" (UID: \"0f83c73e-5e55-47a9-9df5-32c19df3372f\") " Nov 27 09:21:43 crc kubenswrapper[4971]: I1127 09:21:43.337191 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0f83c73e-5e55-47a9-9df5-32c19df3372f-ssh-key\") pod \"0f83c73e-5e55-47a9-9df5-32c19df3372f\" (UID: \"0f83c73e-5e55-47a9-9df5-32c19df3372f\") " Nov 27 09:21:43 crc kubenswrapper[4971]: I1127 09:21:43.346066 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f83c73e-5e55-47a9-9df5-32c19df3372f-kube-api-access-pzjnd" (OuterVolumeSpecName: "kube-api-access-pzjnd") pod "0f83c73e-5e55-47a9-9df5-32c19df3372f" (UID: "0f83c73e-5e55-47a9-9df5-32c19df3372f"). InnerVolumeSpecName "kube-api-access-pzjnd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:21:43 crc kubenswrapper[4971]: I1127 09:21:43.368410 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f83c73e-5e55-47a9-9df5-32c19df3372f-inventory" (OuterVolumeSpecName: "inventory") pod "0f83c73e-5e55-47a9-9df5-32c19df3372f" (UID: "0f83c73e-5e55-47a9-9df5-32c19df3372f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:21:43 crc kubenswrapper[4971]: I1127 09:21:43.383815 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f83c73e-5e55-47a9-9df5-32c19df3372f-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0f83c73e-5e55-47a9-9df5-32c19df3372f" (UID: "0f83c73e-5e55-47a9-9df5-32c19df3372f"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:21:43 crc kubenswrapper[4971]: I1127 09:21:43.440677 4971 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0f83c73e-5e55-47a9-9df5-32c19df3372f-inventory\") on node \"crc\" DevicePath \"\"" Nov 27 09:21:43 crc kubenswrapper[4971]: I1127 09:21:43.440985 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pzjnd\" (UniqueName: \"kubernetes.io/projected/0f83c73e-5e55-47a9-9df5-32c19df3372f-kube-api-access-pzjnd\") on node \"crc\" DevicePath \"\"" Nov 27 09:21:43 crc kubenswrapper[4971]: I1127 09:21:43.441059 4971 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0f83c73e-5e55-47a9-9df5-32c19df3372f-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 27 09:21:43 crc kubenswrapper[4971]: I1127 09:21:43.588845 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-qfclb" Nov 27 09:21:43 crc kubenswrapper[4971]: I1127 09:21:43.643290 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-qfclb" Nov 27 09:21:43 crc kubenswrapper[4971]: I1127 09:21:43.703408 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-networker-jzpb4" event={"ID":"0f83c73e-5e55-47a9-9df5-32c19df3372f","Type":"ContainerDied","Data":"330e47ee9d72f978e7113f9b97173a8b84cd1f1e3ef969141009eb5e95f4a0e3"} Nov 27 09:21:43 crc kubenswrapper[4971]: I1127 09:21:43.703442 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-networker-jzpb4" Nov 27 09:21:43 crc kubenswrapper[4971]: I1127 09:21:43.703460 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="330e47ee9d72f978e7113f9b97173a8b84cd1f1e3ef969141009eb5e95f4a0e3" Nov 27 09:21:43 crc kubenswrapper[4971]: I1127 09:21:43.796911 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-openstack-openstack-networker-jdddp"] Nov 27 09:21:43 crc kubenswrapper[4971]: E1127 09:21:43.797454 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f83c73e-5e55-47a9-9df5-32c19df3372f" containerName="configure-os-openstack-openstack-networker" Nov 27 09:21:43 crc kubenswrapper[4971]: I1127 09:21:43.797475 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f83c73e-5e55-47a9-9df5-32c19df3372f" containerName="configure-os-openstack-openstack-networker" Nov 27 09:21:43 crc kubenswrapper[4971]: I1127 09:21:43.797787 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f83c73e-5e55-47a9-9df5-32c19df3372f" containerName="configure-os-openstack-openstack-networker" Nov 27 09:21:43 crc kubenswrapper[4971]: I1127 09:21:43.798638 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-networker-jdddp" Nov 27 09:21:43 crc kubenswrapper[4971]: I1127 09:21:43.801268 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-networker-dockercfg-hbhvx" Nov 27 09:21:43 crc kubenswrapper[4971]: I1127 09:21:43.806414 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-networker" Nov 27 09:21:43 crc kubenswrapper[4971]: I1127 09:21:43.819303 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-openstack-openstack-networker-jdddp"] Nov 27 09:21:43 crc kubenswrapper[4971]: I1127 09:21:43.914792 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qfclb"] Nov 27 09:21:43 crc kubenswrapper[4971]: I1127 09:21:43.961313 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bw7gv\" (UniqueName: \"kubernetes.io/projected/98fa796b-6f0e-4968-aea3-bbea8edc617c-kube-api-access-bw7gv\") pod \"run-os-openstack-openstack-networker-jdddp\" (UID: \"98fa796b-6f0e-4968-aea3-bbea8edc617c\") " pod="openstack/run-os-openstack-openstack-networker-jdddp" Nov 27 09:21:43 crc kubenswrapper[4971]: I1127 09:21:43.961590 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/98fa796b-6f0e-4968-aea3-bbea8edc617c-ssh-key\") pod \"run-os-openstack-openstack-networker-jdddp\" (UID: \"98fa796b-6f0e-4968-aea3-bbea8edc617c\") " pod="openstack/run-os-openstack-openstack-networker-jdddp" Nov 27 09:21:43 crc kubenswrapper[4971]: I1127 09:21:43.961656 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/98fa796b-6f0e-4968-aea3-bbea8edc617c-inventory\") pod \"run-os-openstack-openstack-networker-jdddp\" (UID: \"98fa796b-6f0e-4968-aea3-bbea8edc617c\") " pod="openstack/run-os-openstack-openstack-networker-jdddp" Nov 27 09:21:44 crc kubenswrapper[4971]: I1127 09:21:44.063808 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/98fa796b-6f0e-4968-aea3-bbea8edc617c-ssh-key\") pod \"run-os-openstack-openstack-networker-jdddp\" (UID: \"98fa796b-6f0e-4968-aea3-bbea8edc617c\") " pod="openstack/run-os-openstack-openstack-networker-jdddp" Nov 27 09:21:44 crc kubenswrapper[4971]: I1127 09:21:44.063864 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/98fa796b-6f0e-4968-aea3-bbea8edc617c-inventory\") pod \"run-os-openstack-openstack-networker-jdddp\" (UID: \"98fa796b-6f0e-4968-aea3-bbea8edc617c\") " pod="openstack/run-os-openstack-openstack-networker-jdddp" Nov 27 09:21:44 crc kubenswrapper[4971]: I1127 09:21:44.063940 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bw7gv\" (UniqueName: \"kubernetes.io/projected/98fa796b-6f0e-4968-aea3-bbea8edc617c-kube-api-access-bw7gv\") pod \"run-os-openstack-openstack-networker-jdddp\" (UID: \"98fa796b-6f0e-4968-aea3-bbea8edc617c\") " pod="openstack/run-os-openstack-openstack-networker-jdddp" Nov 27 09:21:44 crc kubenswrapper[4971]: I1127 09:21:44.647995 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/98fa796b-6f0e-4968-aea3-bbea8edc617c-inventory\") pod \"run-os-openstack-openstack-networker-jdddp\" (UID: \"98fa796b-6f0e-4968-aea3-bbea8edc617c\") " pod="openstack/run-os-openstack-openstack-networker-jdddp" Nov 27 09:21:44 crc kubenswrapper[4971]: I1127 09:21:44.648193 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/98fa796b-6f0e-4968-aea3-bbea8edc617c-ssh-key\") pod \"run-os-openstack-openstack-networker-jdddp\" (UID: \"98fa796b-6f0e-4968-aea3-bbea8edc617c\") " pod="openstack/run-os-openstack-openstack-networker-jdddp" Nov 27 09:21:44 crc kubenswrapper[4971]: I1127 09:21:44.650621 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bw7gv\" (UniqueName: \"kubernetes.io/projected/98fa796b-6f0e-4968-aea3-bbea8edc617c-kube-api-access-bw7gv\") pod \"run-os-openstack-openstack-networker-jdddp\" (UID: \"98fa796b-6f0e-4968-aea3-bbea8edc617c\") " pod="openstack/run-os-openstack-openstack-networker-jdddp" Nov 27 09:21:44 crc kubenswrapper[4971]: I1127 09:21:44.721238 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-qfclb" podUID="c7a22e8a-307c-4e58-b358-10b242121eb6" containerName="registry-server" containerID="cri-o://d457790f73ca72367527e97592fdbb9e429ed7908733f495d436296d6a865f1c" gracePeriod=2 Nov 27 09:21:44 crc kubenswrapper[4971]: I1127 09:21:44.773228 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-networker-jdddp" Nov 27 09:21:45 crc kubenswrapper[4971]: I1127 09:21:45.506120 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-openstack-openstack-networker-jdddp"] Nov 27 09:21:45 crc kubenswrapper[4971]: I1127 09:21:45.741226 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-networker-jdddp" event={"ID":"98fa796b-6f0e-4968-aea3-bbea8edc617c","Type":"ContainerStarted","Data":"7328994b90de2eef8acd0b0fdc07fd0d462b4059967bb128df1128496ca26570"} Nov 27 09:21:45 crc kubenswrapper[4971]: I1127 09:21:45.754684 4971 generic.go:334] "Generic (PLEG): container finished" podID="c7a22e8a-307c-4e58-b358-10b242121eb6" containerID="d457790f73ca72367527e97592fdbb9e429ed7908733f495d436296d6a865f1c" exitCode=0 Nov 27 09:21:45 crc kubenswrapper[4971]: I1127 09:21:45.754746 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qfclb" event={"ID":"c7a22e8a-307c-4e58-b358-10b242121eb6","Type":"ContainerDied","Data":"d457790f73ca72367527e97592fdbb9e429ed7908733f495d436296d6a865f1c"} Nov 27 09:21:45 crc kubenswrapper[4971]: I1127 09:21:45.944842 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qfclb" Nov 27 09:21:46 crc kubenswrapper[4971]: I1127 09:21:46.019170 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c7a22e8a-307c-4e58-b358-10b242121eb6-catalog-content\") pod \"c7a22e8a-307c-4e58-b358-10b242121eb6\" (UID: \"c7a22e8a-307c-4e58-b358-10b242121eb6\") " Nov 27 09:21:46 crc kubenswrapper[4971]: I1127 09:21:46.019369 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rvxxn\" (UniqueName: \"kubernetes.io/projected/c7a22e8a-307c-4e58-b358-10b242121eb6-kube-api-access-rvxxn\") pod \"c7a22e8a-307c-4e58-b358-10b242121eb6\" (UID: \"c7a22e8a-307c-4e58-b358-10b242121eb6\") " Nov 27 09:21:46 crc kubenswrapper[4971]: I1127 09:21:46.019429 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c7a22e8a-307c-4e58-b358-10b242121eb6-utilities\") pod \"c7a22e8a-307c-4e58-b358-10b242121eb6\" (UID: \"c7a22e8a-307c-4e58-b358-10b242121eb6\") " Nov 27 09:21:46 crc kubenswrapper[4971]: I1127 09:21:46.021638 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c7a22e8a-307c-4e58-b358-10b242121eb6-utilities" (OuterVolumeSpecName: "utilities") pod "c7a22e8a-307c-4e58-b358-10b242121eb6" (UID: "c7a22e8a-307c-4e58-b358-10b242121eb6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:21:46 crc kubenswrapper[4971]: I1127 09:21:46.024748 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7a22e8a-307c-4e58-b358-10b242121eb6-kube-api-access-rvxxn" (OuterVolumeSpecName: "kube-api-access-rvxxn") pod "c7a22e8a-307c-4e58-b358-10b242121eb6" (UID: "c7a22e8a-307c-4e58-b358-10b242121eb6"). InnerVolumeSpecName "kube-api-access-rvxxn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:21:46 crc kubenswrapper[4971]: I1127 09:21:46.122573 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rvxxn\" (UniqueName: \"kubernetes.io/projected/c7a22e8a-307c-4e58-b358-10b242121eb6-kube-api-access-rvxxn\") on node \"crc\" DevicePath \"\"" Nov 27 09:21:46 crc kubenswrapper[4971]: I1127 09:21:46.122618 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c7a22e8a-307c-4e58-b358-10b242121eb6-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 09:21:46 crc kubenswrapper[4971]: I1127 09:21:46.165927 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c7a22e8a-307c-4e58-b358-10b242121eb6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c7a22e8a-307c-4e58-b358-10b242121eb6" (UID: "c7a22e8a-307c-4e58-b358-10b242121eb6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:21:46 crc kubenswrapper[4971]: I1127 09:21:46.224744 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c7a22e8a-307c-4e58-b358-10b242121eb6-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 09:21:46 crc kubenswrapper[4971]: I1127 09:21:46.767330 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-networker-jdddp" event={"ID":"98fa796b-6f0e-4968-aea3-bbea8edc617c","Type":"ContainerStarted","Data":"e5fb9113e0471a86411b8e5cf02f68fe54cffcf94f0f53ed3337ab057e6dd33b"} Nov 27 09:21:46 crc kubenswrapper[4971]: I1127 09:21:46.771556 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qfclb" event={"ID":"c7a22e8a-307c-4e58-b358-10b242121eb6","Type":"ContainerDied","Data":"5ad7e97f7416d88593a6837c4e4576f35bd1f8400587e29ddf7b4c1f7421f4c8"} Nov 27 09:21:46 crc kubenswrapper[4971]: I1127 09:21:46.771610 4971 scope.go:117] "RemoveContainer" containerID="d457790f73ca72367527e97592fdbb9e429ed7908733f495d436296d6a865f1c" Nov 27 09:21:46 crc kubenswrapper[4971]: I1127 09:21:46.771806 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qfclb" Nov 27 09:21:46 crc kubenswrapper[4971]: I1127 09:21:46.795843 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-openstack-openstack-networker-jdddp" podStartSLOduration=3.320620422 podStartE2EDuration="3.795816077s" podCreationTimestamp="2025-11-27 09:21:43 +0000 UTC" firstStartedPulling="2025-11-27 09:21:45.524681738 +0000 UTC m=+8943.716725656" lastFinishedPulling="2025-11-27 09:21:45.999877393 +0000 UTC m=+8944.191921311" observedRunningTime="2025-11-27 09:21:46.794915211 +0000 UTC m=+8944.986959119" watchObservedRunningTime="2025-11-27 09:21:46.795816077 +0000 UTC m=+8944.987859995" Nov 27 09:21:46 crc kubenswrapper[4971]: I1127 09:21:46.797129 4971 scope.go:117] "RemoveContainer" containerID="6fd32e91acb8149c4b085314e4d52fb9203372c77a0bba6f6e57066247eea5ca" Nov 27 09:21:46 crc kubenswrapper[4971]: I1127 09:21:46.840363 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qfclb"] Nov 27 09:21:46 crc kubenswrapper[4971]: I1127 09:21:46.854903 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-qfclb"] Nov 27 09:21:47 crc kubenswrapper[4971]: I1127 09:21:47.163928 4971 scope.go:117] "RemoveContainer" containerID="3e7f9f77f68d259854857266a3749ad9c10f498a273013c6abd5917cc99812c9" Nov 27 09:21:48 crc kubenswrapper[4971]: I1127 09:21:48.570575 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c7a22e8a-307c-4e58-b358-10b242121eb6" path="/var/lib/kubelet/pods/c7a22e8a-307c-4e58-b358-10b242121eb6/volumes" Nov 27 09:21:49 crc kubenswrapper[4971]: I1127 09:21:49.811439 4971 generic.go:334] "Generic (PLEG): container finished" podID="811b1337-2a00-4b0a-a398-163af3d76d79" containerID="fcf18b1a01047d5e1fd387bf06370bfe5b1fc141e80fc0065c14b94ce8f2b671" exitCode=0 Nov 27 09:21:49 crc kubenswrapper[4971]: I1127 09:21:49.811522 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-58vhc" event={"ID":"811b1337-2a00-4b0a-a398-163af3d76d79","Type":"ContainerDied","Data":"fcf18b1a01047d5e1fd387bf06370bfe5b1fc141e80fc0065c14b94ce8f2b671"} Nov 27 09:21:51 crc kubenswrapper[4971]: I1127 09:21:51.600056 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-58vhc" Nov 27 09:21:51 crc kubenswrapper[4971]: I1127 09:21:51.666235 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/811b1337-2a00-4b0a-a398-163af3d76d79-ceph\") pod \"811b1337-2a00-4b0a-a398-163af3d76d79\" (UID: \"811b1337-2a00-4b0a-a398-163af3d76d79\") " Nov 27 09:21:51 crc kubenswrapper[4971]: I1127 09:21:51.666662 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/811b1337-2a00-4b0a-a398-163af3d76d79-ssh-key\") pod \"811b1337-2a00-4b0a-a398-163af3d76d79\" (UID: \"811b1337-2a00-4b0a-a398-163af3d76d79\") " Nov 27 09:21:51 crc kubenswrapper[4971]: I1127 09:21:51.666798 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/811b1337-2a00-4b0a-a398-163af3d76d79-inventory\") pod \"811b1337-2a00-4b0a-a398-163af3d76d79\" (UID: \"811b1337-2a00-4b0a-a398-163af3d76d79\") " Nov 27 09:21:51 crc kubenswrapper[4971]: I1127 09:21:51.666994 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-glgc4\" (UniqueName: \"kubernetes.io/projected/811b1337-2a00-4b0a-a398-163af3d76d79-kube-api-access-glgc4\") pod \"811b1337-2a00-4b0a-a398-163af3d76d79\" (UID: \"811b1337-2a00-4b0a-a398-163af3d76d79\") " Nov 27 09:21:51 crc kubenswrapper[4971]: I1127 09:21:51.675480 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/811b1337-2a00-4b0a-a398-163af3d76d79-kube-api-access-glgc4" (OuterVolumeSpecName: "kube-api-access-glgc4") pod "811b1337-2a00-4b0a-a398-163af3d76d79" (UID: "811b1337-2a00-4b0a-a398-163af3d76d79"). InnerVolumeSpecName "kube-api-access-glgc4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:21:51 crc kubenswrapper[4971]: I1127 09:21:51.677836 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/811b1337-2a00-4b0a-a398-163af3d76d79-ceph" (OuterVolumeSpecName: "ceph") pod "811b1337-2a00-4b0a-a398-163af3d76d79" (UID: "811b1337-2a00-4b0a-a398-163af3d76d79"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:21:51 crc kubenswrapper[4971]: I1127 09:21:51.704011 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/811b1337-2a00-4b0a-a398-163af3d76d79-inventory" (OuterVolumeSpecName: "inventory") pod "811b1337-2a00-4b0a-a398-163af3d76d79" (UID: "811b1337-2a00-4b0a-a398-163af3d76d79"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:21:51 crc kubenswrapper[4971]: I1127 09:21:51.707185 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/811b1337-2a00-4b0a-a398-163af3d76d79-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "811b1337-2a00-4b0a-a398-163af3d76d79" (UID: "811b1337-2a00-4b0a-a398-163af3d76d79"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:21:51 crc kubenswrapper[4971]: I1127 09:21:51.770702 4971 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/811b1337-2a00-4b0a-a398-163af3d76d79-inventory\") on node \"crc\" DevicePath \"\"" Nov 27 09:21:51 crc kubenswrapper[4971]: I1127 09:21:51.770745 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-glgc4\" (UniqueName: \"kubernetes.io/projected/811b1337-2a00-4b0a-a398-163af3d76d79-kube-api-access-glgc4\") on node \"crc\" DevicePath \"\"" Nov 27 09:21:51 crc kubenswrapper[4971]: I1127 09:21:51.770756 4971 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/811b1337-2a00-4b0a-a398-163af3d76d79-ceph\") on node \"crc\" DevicePath \"\"" Nov 27 09:21:51 crc kubenswrapper[4971]: I1127 09:21:51.770764 4971 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/811b1337-2a00-4b0a-a398-163af3d76d79-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 27 09:21:51 crc kubenswrapper[4971]: I1127 09:21:51.832558 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-58vhc" event={"ID":"811b1337-2a00-4b0a-a398-163af3d76d79","Type":"ContainerDied","Data":"b955224ed47bb376c05070891675cd69d0b065aa9c70eab0405f5e6b5d757976"} Nov 27 09:21:51 crc kubenswrapper[4971]: I1127 09:21:51.832613 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b955224ed47bb376c05070891675cd69d0b065aa9c70eab0405f5e6b5d757976" Nov 27 09:21:51 crc kubenswrapper[4971]: I1127 09:21:51.832672 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-58vhc" Nov 27 09:21:51 crc kubenswrapper[4971]: I1127 09:21:51.996921 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-openstack-ncmnj"] Nov 27 09:21:51 crc kubenswrapper[4971]: E1127 09:21:51.997583 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7a22e8a-307c-4e58-b358-10b242121eb6" containerName="extract-utilities" Nov 27 09:21:51 crc kubenswrapper[4971]: I1127 09:21:51.997610 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7a22e8a-307c-4e58-b358-10b242121eb6" containerName="extract-utilities" Nov 27 09:21:51 crc kubenswrapper[4971]: E1127 09:21:51.997636 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7a22e8a-307c-4e58-b358-10b242121eb6" containerName="extract-content" Nov 27 09:21:51 crc kubenswrapper[4971]: I1127 09:21:51.997657 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7a22e8a-307c-4e58-b358-10b242121eb6" containerName="extract-content" Nov 27 09:21:51 crc kubenswrapper[4971]: E1127 09:21:51.997682 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7a22e8a-307c-4e58-b358-10b242121eb6" containerName="registry-server" Nov 27 09:21:51 crc kubenswrapper[4971]: I1127 09:21:51.997692 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7a22e8a-307c-4e58-b358-10b242121eb6" containerName="registry-server" Nov 27 09:21:51 crc kubenswrapper[4971]: E1127 09:21:51.997751 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="811b1337-2a00-4b0a-a398-163af3d76d79" containerName="configure-os-openstack-openstack-cell1" Nov 27 09:21:51 crc kubenswrapper[4971]: I1127 09:21:51.997761 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="811b1337-2a00-4b0a-a398-163af3d76d79" containerName="configure-os-openstack-openstack-cell1" Nov 27 09:21:51 crc kubenswrapper[4971]: I1127 09:21:51.998062 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7a22e8a-307c-4e58-b358-10b242121eb6" containerName="registry-server" Nov 27 09:21:51 crc kubenswrapper[4971]: I1127 09:21:51.998127 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="811b1337-2a00-4b0a-a398-163af3d76d79" containerName="configure-os-openstack-openstack-cell1" Nov 27 09:21:51 crc kubenswrapper[4971]: I1127 09:21:51.999352 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-ncmnj" Nov 27 09:21:52 crc kubenswrapper[4971]: I1127 09:21:52.001474 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 27 09:21:52 crc kubenswrapper[4971]: I1127 09:21:52.003124 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-jnkbm" Nov 27 09:21:52 crc kubenswrapper[4971]: I1127 09:21:52.016093 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-openstack-ncmnj"] Nov 27 09:21:52 crc kubenswrapper[4971]: I1127 09:21:52.081820 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpjcf\" (UniqueName: \"kubernetes.io/projected/0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14-kube-api-access-fpjcf\") pod \"ssh-known-hosts-openstack-ncmnj\" (UID: \"0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14\") " pod="openstack/ssh-known-hosts-openstack-ncmnj" Nov 27 09:21:52 crc kubenswrapper[4971]: I1127 09:21:52.081910 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14-inventory-0\") pod \"ssh-known-hosts-openstack-ncmnj\" (UID: \"0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14\") " pod="openstack/ssh-known-hosts-openstack-ncmnj" Nov 27 09:21:52 crc kubenswrapper[4971]: I1127 09:21:52.082026 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14-ssh-key-openstack-cell1\") pod \"ssh-known-hosts-openstack-ncmnj\" (UID: \"0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14\") " pod="openstack/ssh-known-hosts-openstack-ncmnj" Nov 27 09:21:52 crc kubenswrapper[4971]: I1127 09:21:52.082265 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14-ceph\") pod \"ssh-known-hosts-openstack-ncmnj\" (UID: \"0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14\") " pod="openstack/ssh-known-hosts-openstack-ncmnj" Nov 27 09:21:52 crc kubenswrapper[4971]: I1127 09:21:52.082699 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-networker\" (UniqueName: \"kubernetes.io/secret/0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14-ssh-key-openstack-networker\") pod \"ssh-known-hosts-openstack-ncmnj\" (UID: \"0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14\") " pod="openstack/ssh-known-hosts-openstack-ncmnj" Nov 27 09:21:52 crc kubenswrapper[4971]: I1127 09:21:52.083048 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-1\" (UniqueName: \"kubernetes.io/secret/0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14-inventory-1\") pod \"ssh-known-hosts-openstack-ncmnj\" (UID: \"0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14\") " pod="openstack/ssh-known-hosts-openstack-ncmnj" Nov 27 09:21:52 crc kubenswrapper[4971]: I1127 09:21:52.184885 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14-inventory-0\") pod \"ssh-known-hosts-openstack-ncmnj\" (UID: \"0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14\") " pod="openstack/ssh-known-hosts-openstack-ncmnj" Nov 27 09:21:52 crc kubenswrapper[4971]: I1127 09:21:52.184998 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14-ssh-key-openstack-cell1\") pod \"ssh-known-hosts-openstack-ncmnj\" (UID: \"0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14\") " pod="openstack/ssh-known-hosts-openstack-ncmnj" Nov 27 09:21:52 crc kubenswrapper[4971]: I1127 09:21:52.185039 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14-ceph\") pod \"ssh-known-hosts-openstack-ncmnj\" (UID: \"0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14\") " pod="openstack/ssh-known-hosts-openstack-ncmnj" Nov 27 09:21:52 crc kubenswrapper[4971]: I1127 09:21:52.185093 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-networker\" (UniqueName: \"kubernetes.io/secret/0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14-ssh-key-openstack-networker\") pod \"ssh-known-hosts-openstack-ncmnj\" (UID: \"0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14\") " pod="openstack/ssh-known-hosts-openstack-ncmnj" Nov 27 09:21:52 crc kubenswrapper[4971]: I1127 09:21:52.185162 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-1\" (UniqueName: \"kubernetes.io/secret/0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14-inventory-1\") pod \"ssh-known-hosts-openstack-ncmnj\" (UID: \"0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14\") " pod="openstack/ssh-known-hosts-openstack-ncmnj" Nov 27 09:21:52 crc kubenswrapper[4971]: I1127 09:21:52.185196 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpjcf\" (UniqueName: \"kubernetes.io/projected/0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14-kube-api-access-fpjcf\") pod \"ssh-known-hosts-openstack-ncmnj\" (UID: \"0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14\") " pod="openstack/ssh-known-hosts-openstack-ncmnj" Nov 27 09:21:52 crc kubenswrapper[4971]: I1127 09:21:52.189009 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-networker\" (UniqueName: \"kubernetes.io/secret/0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14-ssh-key-openstack-networker\") pod \"ssh-known-hosts-openstack-ncmnj\" (UID: \"0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14\") " pod="openstack/ssh-known-hosts-openstack-ncmnj" Nov 27 09:21:52 crc kubenswrapper[4971]: I1127 09:21:52.189479 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14-ssh-key-openstack-cell1\") pod \"ssh-known-hosts-openstack-ncmnj\" (UID: \"0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14\") " pod="openstack/ssh-known-hosts-openstack-ncmnj" Nov 27 09:21:52 crc kubenswrapper[4971]: I1127 09:21:52.190644 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-1\" (UniqueName: \"kubernetes.io/secret/0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14-inventory-1\") pod \"ssh-known-hosts-openstack-ncmnj\" (UID: \"0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14\") " pod="openstack/ssh-known-hosts-openstack-ncmnj" Nov 27 09:21:52 crc kubenswrapper[4971]: I1127 09:21:52.193794 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14-ceph\") pod \"ssh-known-hosts-openstack-ncmnj\" (UID: \"0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14\") " pod="openstack/ssh-known-hosts-openstack-ncmnj" Nov 27 09:21:52 crc kubenswrapper[4971]: I1127 09:21:52.203312 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14-inventory-0\") pod \"ssh-known-hosts-openstack-ncmnj\" (UID: \"0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14\") " pod="openstack/ssh-known-hosts-openstack-ncmnj" Nov 27 09:21:52 crc kubenswrapper[4971]: I1127 09:21:52.205281 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpjcf\" (UniqueName: \"kubernetes.io/projected/0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14-kube-api-access-fpjcf\") pod \"ssh-known-hosts-openstack-ncmnj\" (UID: \"0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14\") " pod="openstack/ssh-known-hosts-openstack-ncmnj" Nov 27 09:21:52 crc kubenswrapper[4971]: I1127 09:21:52.330430 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-ncmnj" Nov 27 09:21:52 crc kubenswrapper[4971]: I1127 09:21:52.956328 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-openstack-ncmnj"] Nov 27 09:21:53 crc kubenswrapper[4971]: I1127 09:21:53.869833 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-ncmnj" event={"ID":"0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14","Type":"ContainerStarted","Data":"1176d98d05c944bd79952bac8f9a01299cdb8b3918411fe0cac92af3607de5e2"} Nov 27 09:21:54 crc kubenswrapper[4971]: I1127 09:21:54.885367 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-ncmnj" event={"ID":"0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14","Type":"ContainerStarted","Data":"956b2a8334c18fc109860e03e78761c37bb5d6fce07dd7732b50cb78a5e455d0"} Nov 27 09:21:54 crc kubenswrapper[4971]: I1127 09:21:54.916206 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-openstack-ncmnj" podStartSLOduration=3.085947873 podStartE2EDuration="3.91617126s" podCreationTimestamp="2025-11-27 09:21:51 +0000 UTC" firstStartedPulling="2025-11-27 09:21:52.972808162 +0000 UTC m=+8951.164852080" lastFinishedPulling="2025-11-27 09:21:53.803031549 +0000 UTC m=+8951.995075467" observedRunningTime="2025-11-27 09:21:54.905078485 +0000 UTC m=+8953.097122483" watchObservedRunningTime="2025-11-27 09:21:54.91617126 +0000 UTC m=+8953.108215188" Nov 27 09:21:56 crc kubenswrapper[4971]: I1127 09:21:56.412872 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 09:21:56 crc kubenswrapper[4971]: I1127 09:21:56.412955 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 09:21:56 crc kubenswrapper[4971]: I1127 09:21:56.413057 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 09:21:56 crc kubenswrapper[4971]: I1127 09:21:56.414361 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"74942ef67bc6f0078a96e89348238bb86066d83521cb4e5856153cafd601817d"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 09:21:56 crc kubenswrapper[4971]: I1127 09:21:56.414472 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://74942ef67bc6f0078a96e89348238bb86066d83521cb4e5856153cafd601817d" gracePeriod=600 Nov 27 09:21:56 crc kubenswrapper[4971]: I1127 09:21:56.921895 4971 generic.go:334] "Generic (PLEG): container finished" podID="98fa796b-6f0e-4968-aea3-bbea8edc617c" containerID="e5fb9113e0471a86411b8e5cf02f68fe54cffcf94f0f53ed3337ab057e6dd33b" exitCode=0 Nov 27 09:21:56 crc kubenswrapper[4971]: I1127 09:21:56.922307 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-networker-jdddp" event={"ID":"98fa796b-6f0e-4968-aea3-bbea8edc617c","Type":"ContainerDied","Data":"e5fb9113e0471a86411b8e5cf02f68fe54cffcf94f0f53ed3337ab057e6dd33b"} Nov 27 09:21:56 crc kubenswrapper[4971]: I1127 09:21:56.927468 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="74942ef67bc6f0078a96e89348238bb86066d83521cb4e5856153cafd601817d" exitCode=0 Nov 27 09:21:56 crc kubenswrapper[4971]: I1127 09:21:56.927514 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"74942ef67bc6f0078a96e89348238bb86066d83521cb4e5856153cafd601817d"} Nov 27 09:21:56 crc kubenswrapper[4971]: I1127 09:21:56.927577 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"34cb8bf867d08698d2d333be44c85498b1aa2a1b0a6e242c6c9357ab60f2fb63"} Nov 27 09:21:56 crc kubenswrapper[4971]: I1127 09:21:56.927649 4971 scope.go:117] "RemoveContainer" containerID="6575cb158f547bb22eed9602ec2cad91f3d040d7cbd989460687163c84223d1c" Nov 27 09:21:58 crc kubenswrapper[4971]: I1127 09:21:58.471723 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-networker-jdddp" Nov 27 09:21:58 crc kubenswrapper[4971]: I1127 09:21:58.636424 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/98fa796b-6f0e-4968-aea3-bbea8edc617c-ssh-key\") pod \"98fa796b-6f0e-4968-aea3-bbea8edc617c\" (UID: \"98fa796b-6f0e-4968-aea3-bbea8edc617c\") " Nov 27 09:21:58 crc kubenswrapper[4971]: I1127 09:21:58.636778 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bw7gv\" (UniqueName: \"kubernetes.io/projected/98fa796b-6f0e-4968-aea3-bbea8edc617c-kube-api-access-bw7gv\") pod \"98fa796b-6f0e-4968-aea3-bbea8edc617c\" (UID: \"98fa796b-6f0e-4968-aea3-bbea8edc617c\") " Nov 27 09:21:58 crc kubenswrapper[4971]: I1127 09:21:58.636888 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/98fa796b-6f0e-4968-aea3-bbea8edc617c-inventory\") pod \"98fa796b-6f0e-4968-aea3-bbea8edc617c\" (UID: \"98fa796b-6f0e-4968-aea3-bbea8edc617c\") " Nov 27 09:21:58 crc kubenswrapper[4971]: I1127 09:21:58.643304 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98fa796b-6f0e-4968-aea3-bbea8edc617c-kube-api-access-bw7gv" (OuterVolumeSpecName: "kube-api-access-bw7gv") pod "98fa796b-6f0e-4968-aea3-bbea8edc617c" (UID: "98fa796b-6f0e-4968-aea3-bbea8edc617c"). InnerVolumeSpecName "kube-api-access-bw7gv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:21:58 crc kubenswrapper[4971]: I1127 09:21:58.666480 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98fa796b-6f0e-4968-aea3-bbea8edc617c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "98fa796b-6f0e-4968-aea3-bbea8edc617c" (UID: "98fa796b-6f0e-4968-aea3-bbea8edc617c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:21:58 crc kubenswrapper[4971]: I1127 09:21:58.680557 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98fa796b-6f0e-4968-aea3-bbea8edc617c-inventory" (OuterVolumeSpecName: "inventory") pod "98fa796b-6f0e-4968-aea3-bbea8edc617c" (UID: "98fa796b-6f0e-4968-aea3-bbea8edc617c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:21:58 crc kubenswrapper[4971]: I1127 09:21:58.740918 4971 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/98fa796b-6f0e-4968-aea3-bbea8edc617c-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 27 09:21:58 crc kubenswrapper[4971]: I1127 09:21:58.741006 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bw7gv\" (UniqueName: \"kubernetes.io/projected/98fa796b-6f0e-4968-aea3-bbea8edc617c-kube-api-access-bw7gv\") on node \"crc\" DevicePath \"\"" Nov 27 09:21:58 crc kubenswrapper[4971]: I1127 09:21:58.741023 4971 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/98fa796b-6f0e-4968-aea3-bbea8edc617c-inventory\") on node \"crc\" DevicePath \"\"" Nov 27 09:21:58 crc kubenswrapper[4971]: I1127 09:21:58.954223 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-networker-jdddp" event={"ID":"98fa796b-6f0e-4968-aea3-bbea8edc617c","Type":"ContainerDied","Data":"7328994b90de2eef8acd0b0fdc07fd0d462b4059967bb128df1128496ca26570"} Nov 27 09:21:58 crc kubenswrapper[4971]: I1127 09:21:58.954564 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7328994b90de2eef8acd0b0fdc07fd0d462b4059967bb128df1128496ca26570" Nov 27 09:21:58 crc kubenswrapper[4971]: I1127 09:21:58.954318 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-networker-jdddp" Nov 27 09:21:59 crc kubenswrapper[4971]: I1127 09:21:59.023439 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-openstack-openstack-networker-hnrlz"] Nov 27 09:21:59 crc kubenswrapper[4971]: E1127 09:21:59.023955 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98fa796b-6f0e-4968-aea3-bbea8edc617c" containerName="run-os-openstack-openstack-networker" Nov 27 09:21:59 crc kubenswrapper[4971]: I1127 09:21:59.023976 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="98fa796b-6f0e-4968-aea3-bbea8edc617c" containerName="run-os-openstack-openstack-networker" Nov 27 09:21:59 crc kubenswrapper[4971]: I1127 09:21:59.024267 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="98fa796b-6f0e-4968-aea3-bbea8edc617c" containerName="run-os-openstack-openstack-networker" Nov 27 09:21:59 crc kubenswrapper[4971]: I1127 09:21:59.027383 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-networker-hnrlz" Nov 27 09:21:59 crc kubenswrapper[4971]: I1127 09:21:59.030187 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-networker-dockercfg-hbhvx" Nov 27 09:21:59 crc kubenswrapper[4971]: I1127 09:21:59.036902 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-openstack-openstack-networker-hnrlz"] Nov 27 09:21:59 crc kubenswrapper[4971]: I1127 09:21:59.044360 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3dc9cc91-a665-4d22-a5dc-e8014b1d1925-ssh-key\") pod \"reboot-os-openstack-openstack-networker-hnrlz\" (UID: \"3dc9cc91-a665-4d22-a5dc-e8014b1d1925\") " pod="openstack/reboot-os-openstack-openstack-networker-hnrlz" Nov 27 09:21:59 crc kubenswrapper[4971]: I1127 09:21:59.044409 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3dc9cc91-a665-4d22-a5dc-e8014b1d1925-inventory\") pod \"reboot-os-openstack-openstack-networker-hnrlz\" (UID: \"3dc9cc91-a665-4d22-a5dc-e8014b1d1925\") " pod="openstack/reboot-os-openstack-openstack-networker-hnrlz" Nov 27 09:21:59 crc kubenswrapper[4971]: I1127 09:21:59.044552 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6n7m8\" (UniqueName: \"kubernetes.io/projected/3dc9cc91-a665-4d22-a5dc-e8014b1d1925-kube-api-access-6n7m8\") pod \"reboot-os-openstack-openstack-networker-hnrlz\" (UID: \"3dc9cc91-a665-4d22-a5dc-e8014b1d1925\") " pod="openstack/reboot-os-openstack-openstack-networker-hnrlz" Nov 27 09:21:59 crc kubenswrapper[4971]: I1127 09:21:59.147004 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6n7m8\" (UniqueName: \"kubernetes.io/projected/3dc9cc91-a665-4d22-a5dc-e8014b1d1925-kube-api-access-6n7m8\") pod \"reboot-os-openstack-openstack-networker-hnrlz\" (UID: \"3dc9cc91-a665-4d22-a5dc-e8014b1d1925\") " pod="openstack/reboot-os-openstack-openstack-networker-hnrlz" Nov 27 09:21:59 crc kubenswrapper[4971]: I1127 09:21:59.147251 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3dc9cc91-a665-4d22-a5dc-e8014b1d1925-ssh-key\") pod \"reboot-os-openstack-openstack-networker-hnrlz\" (UID: \"3dc9cc91-a665-4d22-a5dc-e8014b1d1925\") " pod="openstack/reboot-os-openstack-openstack-networker-hnrlz" Nov 27 09:21:59 crc kubenswrapper[4971]: I1127 09:21:59.147292 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3dc9cc91-a665-4d22-a5dc-e8014b1d1925-inventory\") pod \"reboot-os-openstack-openstack-networker-hnrlz\" (UID: \"3dc9cc91-a665-4d22-a5dc-e8014b1d1925\") " pod="openstack/reboot-os-openstack-openstack-networker-hnrlz" Nov 27 09:21:59 crc kubenswrapper[4971]: I1127 09:21:59.152254 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3dc9cc91-a665-4d22-a5dc-e8014b1d1925-ssh-key\") pod \"reboot-os-openstack-openstack-networker-hnrlz\" (UID: \"3dc9cc91-a665-4d22-a5dc-e8014b1d1925\") " pod="openstack/reboot-os-openstack-openstack-networker-hnrlz" Nov 27 09:21:59 crc kubenswrapper[4971]: I1127 09:21:59.164703 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3dc9cc91-a665-4d22-a5dc-e8014b1d1925-inventory\") pod \"reboot-os-openstack-openstack-networker-hnrlz\" (UID: \"3dc9cc91-a665-4d22-a5dc-e8014b1d1925\") " pod="openstack/reboot-os-openstack-openstack-networker-hnrlz" Nov 27 09:21:59 crc kubenswrapper[4971]: I1127 09:21:59.175760 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6n7m8\" (UniqueName: \"kubernetes.io/projected/3dc9cc91-a665-4d22-a5dc-e8014b1d1925-kube-api-access-6n7m8\") pod \"reboot-os-openstack-openstack-networker-hnrlz\" (UID: \"3dc9cc91-a665-4d22-a5dc-e8014b1d1925\") " pod="openstack/reboot-os-openstack-openstack-networker-hnrlz" Nov 27 09:21:59 crc kubenswrapper[4971]: I1127 09:21:59.353546 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-networker-hnrlz" Nov 27 09:21:59 crc kubenswrapper[4971]: I1127 09:21:59.960151 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-openstack-openstack-networker-hnrlz"] Nov 27 09:21:59 crc kubenswrapper[4971]: W1127 09:21:59.975513 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3dc9cc91_a665_4d22_a5dc_e8014b1d1925.slice/crio-7ed728faca3f207db77577cba04c66a355f2d9dc5c437c0c6dad2684591d8308 WatchSource:0}: Error finding container 7ed728faca3f207db77577cba04c66a355f2d9dc5c437c0c6dad2684591d8308: Status 404 returned error can't find the container with id 7ed728faca3f207db77577cba04c66a355f2d9dc5c437c0c6dad2684591d8308 Nov 27 09:22:00 crc kubenswrapper[4971]: I1127 09:22:00.989882 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-networker-hnrlz" event={"ID":"3dc9cc91-a665-4d22-a5dc-e8014b1d1925","Type":"ContainerStarted","Data":"785f0481aacba6490c0714d0faed6875a88a5b7cc4fba70c0f48ff427be89c86"} Nov 27 09:22:00 crc kubenswrapper[4971]: I1127 09:22:00.990558 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-networker-hnrlz" event={"ID":"3dc9cc91-a665-4d22-a5dc-e8014b1d1925","Type":"ContainerStarted","Data":"7ed728faca3f207db77577cba04c66a355f2d9dc5c437c0c6dad2684591d8308"} Nov 27 09:22:01 crc kubenswrapper[4971]: I1127 09:22:01.016155 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-openstack-openstack-networker-hnrlz" podStartSLOduration=2.267236902 podStartE2EDuration="3.016136029s" podCreationTimestamp="2025-11-27 09:21:58 +0000 UTC" firstStartedPulling="2025-11-27 09:21:59.984372709 +0000 UTC m=+8958.176416627" lastFinishedPulling="2025-11-27 09:22:00.733271846 +0000 UTC m=+8958.925315754" observedRunningTime="2025-11-27 09:22:01.009520041 +0000 UTC m=+8959.201563959" watchObservedRunningTime="2025-11-27 09:22:01.016136029 +0000 UTC m=+8959.208179947" Nov 27 09:22:10 crc kubenswrapper[4971]: I1127 09:22:10.102058 4971 generic.go:334] "Generic (PLEG): container finished" podID="0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14" containerID="956b2a8334c18fc109860e03e78761c37bb5d6fce07dd7732b50cb78a5e455d0" exitCode=0 Nov 27 09:22:10 crc kubenswrapper[4971]: I1127 09:22:10.102178 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-ncmnj" event={"ID":"0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14","Type":"ContainerDied","Data":"956b2a8334c18fc109860e03e78761c37bb5d6fce07dd7732b50cb78a5e455d0"} Nov 27 09:22:11 crc kubenswrapper[4971]: I1127 09:22:11.642999 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-ncmnj" Nov 27 09:22:11 crc kubenswrapper[4971]: I1127 09:22:11.750926 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-networker\" (UniqueName: \"kubernetes.io/secret/0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14-ssh-key-openstack-networker\") pod \"0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14\" (UID: \"0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14\") " Nov 27 09:22:11 crc kubenswrapper[4971]: I1127 09:22:11.750984 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14-ceph\") pod \"0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14\" (UID: \"0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14\") " Nov 27 09:22:11 crc kubenswrapper[4971]: I1127 09:22:11.751039 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fpjcf\" (UniqueName: \"kubernetes.io/projected/0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14-kube-api-access-fpjcf\") pod \"0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14\" (UID: \"0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14\") " Nov 27 09:22:11 crc kubenswrapper[4971]: I1127 09:22:11.751147 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14-ssh-key-openstack-cell1\") pod \"0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14\" (UID: \"0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14\") " Nov 27 09:22:11 crc kubenswrapper[4971]: I1127 09:22:11.751199 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-1\" (UniqueName: \"kubernetes.io/secret/0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14-inventory-1\") pod \"0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14\" (UID: \"0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14\") " Nov 27 09:22:11 crc kubenswrapper[4971]: I1127 09:22:11.751401 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14-inventory-0\") pod \"0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14\" (UID: \"0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14\") " Nov 27 09:22:11 crc kubenswrapper[4971]: I1127 09:22:11.758201 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14-ceph" (OuterVolumeSpecName: "ceph") pod "0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14" (UID: "0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:22:11 crc kubenswrapper[4971]: I1127 09:22:11.758369 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14-kube-api-access-fpjcf" (OuterVolumeSpecName: "kube-api-access-fpjcf") pod "0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14" (UID: "0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14"). InnerVolumeSpecName "kube-api-access-fpjcf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:22:11 crc kubenswrapper[4971]: I1127 09:22:11.781959 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14-ssh-key-openstack-networker" (OuterVolumeSpecName: "ssh-key-openstack-networker") pod "0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14" (UID: "0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14"). InnerVolumeSpecName "ssh-key-openstack-networker". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:22:11 crc kubenswrapper[4971]: I1127 09:22:11.786650 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14-inventory-1" (OuterVolumeSpecName: "inventory-1") pod "0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14" (UID: "0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14"). InnerVolumeSpecName "inventory-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:22:11 crc kubenswrapper[4971]: I1127 09:22:11.792658 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14" (UID: "0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:22:11 crc kubenswrapper[4971]: I1127 09:22:11.804297 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14" (UID: "0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:22:11 crc kubenswrapper[4971]: I1127 09:22:11.854290 4971 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-networker\" (UniqueName: \"kubernetes.io/secret/0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14-ssh-key-openstack-networker\") on node \"crc\" DevicePath \"\"" Nov 27 09:22:11 crc kubenswrapper[4971]: I1127 09:22:11.854329 4971 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14-ceph\") on node \"crc\" DevicePath \"\"" Nov 27 09:22:11 crc kubenswrapper[4971]: I1127 09:22:11.854341 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fpjcf\" (UniqueName: \"kubernetes.io/projected/0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14-kube-api-access-fpjcf\") on node \"crc\" DevicePath \"\"" Nov 27 09:22:11 crc kubenswrapper[4971]: I1127 09:22:11.854349 4971 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Nov 27 09:22:11 crc kubenswrapper[4971]: I1127 09:22:11.854360 4971 reconciler_common.go:293] "Volume detached for volume \"inventory-1\" (UniqueName: \"kubernetes.io/secret/0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14-inventory-1\") on node \"crc\" DevicePath \"\"" Nov 27 09:22:11 crc kubenswrapper[4971]: I1127 09:22:11.854374 4971 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14-inventory-0\") on node \"crc\" DevicePath \"\"" Nov 27 09:22:12 crc kubenswrapper[4971]: I1127 09:22:12.124403 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-ncmnj" event={"ID":"0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14","Type":"ContainerDied","Data":"1176d98d05c944bd79952bac8f9a01299cdb8b3918411fe0cac92af3607de5e2"} Nov 27 09:22:12 crc kubenswrapper[4971]: I1127 09:22:12.124445 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1176d98d05c944bd79952bac8f9a01299cdb8b3918411fe0cac92af3607de5e2" Nov 27 09:22:12 crc kubenswrapper[4971]: I1127 09:22:12.124510 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-ncmnj" Nov 27 09:22:12 crc kubenswrapper[4971]: I1127 09:22:12.215336 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-openstack-openstack-cell1-mn94j"] Nov 27 09:22:12 crc kubenswrapper[4971]: E1127 09:22:12.215976 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14" containerName="ssh-known-hosts-openstack" Nov 27 09:22:12 crc kubenswrapper[4971]: I1127 09:22:12.216001 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14" containerName="ssh-known-hosts-openstack" Nov 27 09:22:12 crc kubenswrapper[4971]: I1127 09:22:12.216292 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14" containerName="ssh-known-hosts-openstack" Nov 27 09:22:12 crc kubenswrapper[4971]: I1127 09:22:12.217302 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-mn94j" Nov 27 09:22:12 crc kubenswrapper[4971]: I1127 09:22:12.220239 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-jnkbm" Nov 27 09:22:12 crc kubenswrapper[4971]: I1127 09:22:12.220663 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 27 09:22:12 crc kubenswrapper[4971]: I1127 09:22:12.232983 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-openstack-openstack-cell1-mn94j"] Nov 27 09:22:12 crc kubenswrapper[4971]: I1127 09:22:12.264821 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/46fe2d18-3744-489f-93f7-d771ebb2d6a8-ceph\") pod \"run-os-openstack-openstack-cell1-mn94j\" (UID: \"46fe2d18-3744-489f-93f7-d771ebb2d6a8\") " pod="openstack/run-os-openstack-openstack-cell1-mn94j" Nov 27 09:22:12 crc kubenswrapper[4971]: I1127 09:22:12.265017 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/46fe2d18-3744-489f-93f7-d771ebb2d6a8-ssh-key\") pod \"run-os-openstack-openstack-cell1-mn94j\" (UID: \"46fe2d18-3744-489f-93f7-d771ebb2d6a8\") " pod="openstack/run-os-openstack-openstack-cell1-mn94j" Nov 27 09:22:12 crc kubenswrapper[4971]: I1127 09:22:12.265068 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/46fe2d18-3744-489f-93f7-d771ebb2d6a8-inventory\") pod \"run-os-openstack-openstack-cell1-mn94j\" (UID: \"46fe2d18-3744-489f-93f7-d771ebb2d6a8\") " pod="openstack/run-os-openstack-openstack-cell1-mn94j" Nov 27 09:22:12 crc kubenswrapper[4971]: I1127 09:22:12.265108 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ft7fq\" (UniqueName: \"kubernetes.io/projected/46fe2d18-3744-489f-93f7-d771ebb2d6a8-kube-api-access-ft7fq\") pod \"run-os-openstack-openstack-cell1-mn94j\" (UID: \"46fe2d18-3744-489f-93f7-d771ebb2d6a8\") " pod="openstack/run-os-openstack-openstack-cell1-mn94j" Nov 27 09:22:12 crc kubenswrapper[4971]: I1127 09:22:12.367048 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/46fe2d18-3744-489f-93f7-d771ebb2d6a8-ceph\") pod \"run-os-openstack-openstack-cell1-mn94j\" (UID: \"46fe2d18-3744-489f-93f7-d771ebb2d6a8\") " pod="openstack/run-os-openstack-openstack-cell1-mn94j" Nov 27 09:22:12 crc kubenswrapper[4971]: I1127 09:22:12.367312 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/46fe2d18-3744-489f-93f7-d771ebb2d6a8-ssh-key\") pod \"run-os-openstack-openstack-cell1-mn94j\" (UID: \"46fe2d18-3744-489f-93f7-d771ebb2d6a8\") " pod="openstack/run-os-openstack-openstack-cell1-mn94j" Nov 27 09:22:12 crc kubenswrapper[4971]: I1127 09:22:12.367370 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/46fe2d18-3744-489f-93f7-d771ebb2d6a8-inventory\") pod \"run-os-openstack-openstack-cell1-mn94j\" (UID: \"46fe2d18-3744-489f-93f7-d771ebb2d6a8\") " pod="openstack/run-os-openstack-openstack-cell1-mn94j" Nov 27 09:22:12 crc kubenswrapper[4971]: I1127 09:22:12.367417 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ft7fq\" (UniqueName: \"kubernetes.io/projected/46fe2d18-3744-489f-93f7-d771ebb2d6a8-kube-api-access-ft7fq\") pod \"run-os-openstack-openstack-cell1-mn94j\" (UID: \"46fe2d18-3744-489f-93f7-d771ebb2d6a8\") " pod="openstack/run-os-openstack-openstack-cell1-mn94j" Nov 27 09:22:12 crc kubenswrapper[4971]: I1127 09:22:12.371260 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/46fe2d18-3744-489f-93f7-d771ebb2d6a8-ceph\") pod \"run-os-openstack-openstack-cell1-mn94j\" (UID: \"46fe2d18-3744-489f-93f7-d771ebb2d6a8\") " pod="openstack/run-os-openstack-openstack-cell1-mn94j" Nov 27 09:22:12 crc kubenswrapper[4971]: I1127 09:22:12.372213 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/46fe2d18-3744-489f-93f7-d771ebb2d6a8-inventory\") pod \"run-os-openstack-openstack-cell1-mn94j\" (UID: \"46fe2d18-3744-489f-93f7-d771ebb2d6a8\") " pod="openstack/run-os-openstack-openstack-cell1-mn94j" Nov 27 09:22:12 crc kubenswrapper[4971]: I1127 09:22:12.375314 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/46fe2d18-3744-489f-93f7-d771ebb2d6a8-ssh-key\") pod \"run-os-openstack-openstack-cell1-mn94j\" (UID: \"46fe2d18-3744-489f-93f7-d771ebb2d6a8\") " pod="openstack/run-os-openstack-openstack-cell1-mn94j" Nov 27 09:22:12 crc kubenswrapper[4971]: I1127 09:22:12.384910 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ft7fq\" (UniqueName: \"kubernetes.io/projected/46fe2d18-3744-489f-93f7-d771ebb2d6a8-kube-api-access-ft7fq\") pod \"run-os-openstack-openstack-cell1-mn94j\" (UID: \"46fe2d18-3744-489f-93f7-d771ebb2d6a8\") " pod="openstack/run-os-openstack-openstack-cell1-mn94j" Nov 27 09:22:12 crc kubenswrapper[4971]: I1127 09:22:12.546082 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-mn94j" Nov 27 09:22:13 crc kubenswrapper[4971]: W1127 09:22:13.146171 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod46fe2d18_3744_489f_93f7_d771ebb2d6a8.slice/crio-d620fa7d94edcdb6513fd2f0ae80b5a1084bed5e185b4453d9d3d2ecdeae8679 WatchSource:0}: Error finding container d620fa7d94edcdb6513fd2f0ae80b5a1084bed5e185b4453d9d3d2ecdeae8679: Status 404 returned error can't find the container with id d620fa7d94edcdb6513fd2f0ae80b5a1084bed5e185b4453d9d3d2ecdeae8679 Nov 27 09:22:13 crc kubenswrapper[4971]: I1127 09:22:13.147363 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-openstack-openstack-cell1-mn94j"] Nov 27 09:22:14 crc kubenswrapper[4971]: I1127 09:22:14.150470 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-mn94j" event={"ID":"46fe2d18-3744-489f-93f7-d771ebb2d6a8","Type":"ContainerStarted","Data":"839fe4a862f162f43779f3a93d234bd8cacb7b882f9767862017dc23bc5321e5"} Nov 27 09:22:14 crc kubenswrapper[4971]: I1127 09:22:14.151160 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-mn94j" event={"ID":"46fe2d18-3744-489f-93f7-d771ebb2d6a8","Type":"ContainerStarted","Data":"d620fa7d94edcdb6513fd2f0ae80b5a1084bed5e185b4453d9d3d2ecdeae8679"} Nov 27 09:22:14 crc kubenswrapper[4971]: I1127 09:22:14.185269 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-openstack-openstack-cell1-mn94j" podStartSLOduration=1.62119548 podStartE2EDuration="2.185245878s" podCreationTimestamp="2025-11-27 09:22:12 +0000 UTC" firstStartedPulling="2025-11-27 09:22:13.149086854 +0000 UTC m=+8971.341130772" lastFinishedPulling="2025-11-27 09:22:13.713137252 +0000 UTC m=+8971.905181170" observedRunningTime="2025-11-27 09:22:14.170972693 +0000 UTC m=+8972.363016621" watchObservedRunningTime="2025-11-27 09:22:14.185245878 +0000 UTC m=+8972.377289796" Nov 27 09:22:16 crc kubenswrapper[4971]: I1127 09:22:16.172770 4971 generic.go:334] "Generic (PLEG): container finished" podID="3dc9cc91-a665-4d22-a5dc-e8014b1d1925" containerID="785f0481aacba6490c0714d0faed6875a88a5b7cc4fba70c0f48ff427be89c86" exitCode=0 Nov 27 09:22:16 crc kubenswrapper[4971]: I1127 09:22:16.172894 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-networker-hnrlz" event={"ID":"3dc9cc91-a665-4d22-a5dc-e8014b1d1925","Type":"ContainerDied","Data":"785f0481aacba6490c0714d0faed6875a88a5b7cc4fba70c0f48ff427be89c86"} Nov 27 09:22:17 crc kubenswrapper[4971]: I1127 09:22:17.869702 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-networker-hnrlz" Nov 27 09:22:18 crc kubenswrapper[4971]: I1127 09:22:18.004510 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3dc9cc91-a665-4d22-a5dc-e8014b1d1925-inventory\") pod \"3dc9cc91-a665-4d22-a5dc-e8014b1d1925\" (UID: \"3dc9cc91-a665-4d22-a5dc-e8014b1d1925\") " Nov 27 09:22:18 crc kubenswrapper[4971]: I1127 09:22:18.004614 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6n7m8\" (UniqueName: \"kubernetes.io/projected/3dc9cc91-a665-4d22-a5dc-e8014b1d1925-kube-api-access-6n7m8\") pod \"3dc9cc91-a665-4d22-a5dc-e8014b1d1925\" (UID: \"3dc9cc91-a665-4d22-a5dc-e8014b1d1925\") " Nov 27 09:22:18 crc kubenswrapper[4971]: I1127 09:22:18.004822 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3dc9cc91-a665-4d22-a5dc-e8014b1d1925-ssh-key\") pod \"3dc9cc91-a665-4d22-a5dc-e8014b1d1925\" (UID: \"3dc9cc91-a665-4d22-a5dc-e8014b1d1925\") " Nov 27 09:22:18 crc kubenswrapper[4971]: I1127 09:22:18.012696 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3dc9cc91-a665-4d22-a5dc-e8014b1d1925-kube-api-access-6n7m8" (OuterVolumeSpecName: "kube-api-access-6n7m8") pod "3dc9cc91-a665-4d22-a5dc-e8014b1d1925" (UID: "3dc9cc91-a665-4d22-a5dc-e8014b1d1925"). InnerVolumeSpecName "kube-api-access-6n7m8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:22:18 crc kubenswrapper[4971]: I1127 09:22:18.045129 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3dc9cc91-a665-4d22-a5dc-e8014b1d1925-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3dc9cc91-a665-4d22-a5dc-e8014b1d1925" (UID: "3dc9cc91-a665-4d22-a5dc-e8014b1d1925"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:22:18 crc kubenswrapper[4971]: I1127 09:22:18.050772 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3dc9cc91-a665-4d22-a5dc-e8014b1d1925-inventory" (OuterVolumeSpecName: "inventory") pod "3dc9cc91-a665-4d22-a5dc-e8014b1d1925" (UID: "3dc9cc91-a665-4d22-a5dc-e8014b1d1925"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:22:18 crc kubenswrapper[4971]: I1127 09:22:18.107866 4971 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3dc9cc91-a665-4d22-a5dc-e8014b1d1925-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 27 09:22:18 crc kubenswrapper[4971]: I1127 09:22:18.107897 4971 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3dc9cc91-a665-4d22-a5dc-e8014b1d1925-inventory\") on node \"crc\" DevicePath \"\"" Nov 27 09:22:18 crc kubenswrapper[4971]: I1127 09:22:18.107909 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6n7m8\" (UniqueName: \"kubernetes.io/projected/3dc9cc91-a665-4d22-a5dc-e8014b1d1925-kube-api-access-6n7m8\") on node \"crc\" DevicePath \"\"" Nov 27 09:22:18 crc kubenswrapper[4971]: I1127 09:22:18.213315 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-networker-hnrlz" event={"ID":"3dc9cc91-a665-4d22-a5dc-e8014b1d1925","Type":"ContainerDied","Data":"7ed728faca3f207db77577cba04c66a355f2d9dc5c437c0c6dad2684591d8308"} Nov 27 09:22:18 crc kubenswrapper[4971]: I1127 09:22:18.213360 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7ed728faca3f207db77577cba04c66a355f2d9dc5c437c0c6dad2684591d8308" Nov 27 09:22:18 crc kubenswrapper[4971]: I1127 09:22:18.213428 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-networker-hnrlz" Nov 27 09:22:18 crc kubenswrapper[4971]: I1127 09:22:18.300476 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-openstack-openstack-networker-zs7h5"] Nov 27 09:22:18 crc kubenswrapper[4971]: E1127 09:22:18.301158 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dc9cc91-a665-4d22-a5dc-e8014b1d1925" containerName="reboot-os-openstack-openstack-networker" Nov 27 09:22:18 crc kubenswrapper[4971]: I1127 09:22:18.301180 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dc9cc91-a665-4d22-a5dc-e8014b1d1925" containerName="reboot-os-openstack-openstack-networker" Nov 27 09:22:18 crc kubenswrapper[4971]: I1127 09:22:18.301498 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="3dc9cc91-a665-4d22-a5dc-e8014b1d1925" containerName="reboot-os-openstack-openstack-networker" Nov 27 09:22:18 crc kubenswrapper[4971]: I1127 09:22:18.302449 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-networker-zs7h5" Nov 27 09:22:18 crc kubenswrapper[4971]: I1127 09:22:18.305136 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-networker" Nov 27 09:22:18 crc kubenswrapper[4971]: I1127 09:22:18.305410 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-networker-dockercfg-hbhvx" Nov 27 09:22:18 crc kubenswrapper[4971]: I1127 09:22:18.313220 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-openstack-openstack-networker-zs7h5"] Nov 27 09:22:18 crc kubenswrapper[4971]: I1127 09:22:18.415161 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-networker-zs7h5\" (UID: \"ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd\") " pod="openstack/install-certs-openstack-openstack-networker-zs7h5" Nov 27 09:22:18 crc kubenswrapper[4971]: I1127 09:22:18.415270 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-networker-zs7h5\" (UID: \"ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd\") " pod="openstack/install-certs-openstack-openstack-networker-zs7h5" Nov 27 09:22:18 crc kubenswrapper[4971]: I1127 09:22:18.415293 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-868p4\" (UniqueName: \"kubernetes.io/projected/ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd-kube-api-access-868p4\") pod \"install-certs-openstack-openstack-networker-zs7h5\" (UID: \"ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd\") " pod="openstack/install-certs-openstack-openstack-networker-zs7h5" Nov 27 09:22:18 crc kubenswrapper[4971]: I1127 09:22:18.415479 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd-inventory\") pod \"install-certs-openstack-openstack-networker-zs7h5\" (UID: \"ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd\") " pod="openstack/install-certs-openstack-openstack-networker-zs7h5" Nov 27 09:22:18 crc kubenswrapper[4971]: I1127 09:22:18.415951 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-networker-zs7h5\" (UID: \"ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd\") " pod="openstack/install-certs-openstack-openstack-networker-zs7h5" Nov 27 09:22:18 crc kubenswrapper[4971]: I1127 09:22:18.416295 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd-ssh-key\") pod \"install-certs-openstack-openstack-networker-zs7h5\" (UID: \"ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd\") " pod="openstack/install-certs-openstack-openstack-networker-zs7h5" Nov 27 09:22:18 crc kubenswrapper[4971]: I1127 09:22:18.517371 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd-inventory\") pod \"install-certs-openstack-openstack-networker-zs7h5\" (UID: \"ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd\") " pod="openstack/install-certs-openstack-openstack-networker-zs7h5" Nov 27 09:22:18 crc kubenswrapper[4971]: I1127 09:22:18.517482 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-networker-zs7h5\" (UID: \"ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd\") " pod="openstack/install-certs-openstack-openstack-networker-zs7h5" Nov 27 09:22:18 crc kubenswrapper[4971]: I1127 09:22:18.517589 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd-ssh-key\") pod \"install-certs-openstack-openstack-networker-zs7h5\" (UID: \"ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd\") " pod="openstack/install-certs-openstack-openstack-networker-zs7h5" Nov 27 09:22:18 crc kubenswrapper[4971]: I1127 09:22:18.517659 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-networker-zs7h5\" (UID: \"ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd\") " pod="openstack/install-certs-openstack-openstack-networker-zs7h5" Nov 27 09:22:18 crc kubenswrapper[4971]: I1127 09:22:18.517684 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-networker-zs7h5\" (UID: \"ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd\") " pod="openstack/install-certs-openstack-openstack-networker-zs7h5" Nov 27 09:22:18 crc kubenswrapper[4971]: I1127 09:22:18.517707 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-868p4\" (UniqueName: \"kubernetes.io/projected/ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd-kube-api-access-868p4\") pod \"install-certs-openstack-openstack-networker-zs7h5\" (UID: \"ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd\") " pod="openstack/install-certs-openstack-openstack-networker-zs7h5" Nov 27 09:22:18 crc kubenswrapper[4971]: I1127 09:22:18.523911 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-networker-zs7h5\" (UID: \"ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd\") " pod="openstack/install-certs-openstack-openstack-networker-zs7h5" Nov 27 09:22:18 crc kubenswrapper[4971]: I1127 09:22:18.524229 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd-inventory\") pod \"install-certs-openstack-openstack-networker-zs7h5\" (UID: \"ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd\") " pod="openstack/install-certs-openstack-openstack-networker-zs7h5" Nov 27 09:22:18 crc kubenswrapper[4971]: I1127 09:22:18.524691 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-networker-zs7h5\" (UID: \"ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd\") " pod="openstack/install-certs-openstack-openstack-networker-zs7h5" Nov 27 09:22:18 crc kubenswrapper[4971]: I1127 09:22:18.525747 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd-ssh-key\") pod \"install-certs-openstack-openstack-networker-zs7h5\" (UID: \"ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd\") " pod="openstack/install-certs-openstack-openstack-networker-zs7h5" Nov 27 09:22:18 crc kubenswrapper[4971]: I1127 09:22:18.535058 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-networker-zs7h5\" (UID: \"ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd\") " pod="openstack/install-certs-openstack-openstack-networker-zs7h5" Nov 27 09:22:18 crc kubenswrapper[4971]: I1127 09:22:18.537602 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-868p4\" (UniqueName: \"kubernetes.io/projected/ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd-kube-api-access-868p4\") pod \"install-certs-openstack-openstack-networker-zs7h5\" (UID: \"ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd\") " pod="openstack/install-certs-openstack-openstack-networker-zs7h5" Nov 27 09:22:18 crc kubenswrapper[4971]: I1127 09:22:18.642480 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-networker-zs7h5" Nov 27 09:22:19 crc kubenswrapper[4971]: I1127 09:22:19.231502 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-openstack-openstack-networker-zs7h5"] Nov 27 09:22:19 crc kubenswrapper[4971]: W1127 09:22:19.236838 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podca45dc1d_d9c7_435a_bbf7_a5bb15f244cd.slice/crio-3c4d7d430c65dc4c58c28d5e7ad2eb500fbd2153189e34be09ccd9446299e4e9 WatchSource:0}: Error finding container 3c4d7d430c65dc4c58c28d5e7ad2eb500fbd2153189e34be09ccd9446299e4e9: Status 404 returned error can't find the container with id 3c4d7d430c65dc4c58c28d5e7ad2eb500fbd2153189e34be09ccd9446299e4e9 Nov 27 09:22:20 crc kubenswrapper[4971]: I1127 09:22:20.242593 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-networker-zs7h5" event={"ID":"ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd","Type":"ContainerStarted","Data":"3c4d7d430c65dc4c58c28d5e7ad2eb500fbd2153189e34be09ccd9446299e4e9"} Nov 27 09:22:21 crc kubenswrapper[4971]: I1127 09:22:21.254070 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-networker-zs7h5" event={"ID":"ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd","Type":"ContainerStarted","Data":"b408059969649843b8eb59052b71bddd79ad9ff1404d8a0cccc2b7eee6868d7d"} Nov 27 09:22:21 crc kubenswrapper[4971]: I1127 09:22:21.280929 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-openstack-openstack-networker-zs7h5" podStartSLOduration=2.7690404859999997 podStartE2EDuration="3.280903382s" podCreationTimestamp="2025-11-27 09:22:18 +0000 UTC" firstStartedPulling="2025-11-27 09:22:19.240118508 +0000 UTC m=+8977.432162436" lastFinishedPulling="2025-11-27 09:22:19.751981414 +0000 UTC m=+8977.944025332" observedRunningTime="2025-11-27 09:22:21.272652207 +0000 UTC m=+8979.464696155" watchObservedRunningTime="2025-11-27 09:22:21.280903382 +0000 UTC m=+8979.472947290" Nov 27 09:22:24 crc kubenswrapper[4971]: I1127 09:22:24.313641 4971 generic.go:334] "Generic (PLEG): container finished" podID="46fe2d18-3744-489f-93f7-d771ebb2d6a8" containerID="839fe4a862f162f43779f3a93d234bd8cacb7b882f9767862017dc23bc5321e5" exitCode=0 Nov 27 09:22:24 crc kubenswrapper[4971]: I1127 09:22:24.313716 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-mn94j" event={"ID":"46fe2d18-3744-489f-93f7-d771ebb2d6a8","Type":"ContainerDied","Data":"839fe4a862f162f43779f3a93d234bd8cacb7b882f9767862017dc23bc5321e5"} Nov 27 09:22:25 crc kubenswrapper[4971]: I1127 09:22:25.778664 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-mn94j" Nov 27 09:22:25 crc kubenswrapper[4971]: I1127 09:22:25.889476 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/46fe2d18-3744-489f-93f7-d771ebb2d6a8-ssh-key\") pod \"46fe2d18-3744-489f-93f7-d771ebb2d6a8\" (UID: \"46fe2d18-3744-489f-93f7-d771ebb2d6a8\") " Nov 27 09:22:25 crc kubenswrapper[4971]: I1127 09:22:25.889626 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ft7fq\" (UniqueName: \"kubernetes.io/projected/46fe2d18-3744-489f-93f7-d771ebb2d6a8-kube-api-access-ft7fq\") pod \"46fe2d18-3744-489f-93f7-d771ebb2d6a8\" (UID: \"46fe2d18-3744-489f-93f7-d771ebb2d6a8\") " Nov 27 09:22:25 crc kubenswrapper[4971]: I1127 09:22:25.889774 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/46fe2d18-3744-489f-93f7-d771ebb2d6a8-inventory\") pod \"46fe2d18-3744-489f-93f7-d771ebb2d6a8\" (UID: \"46fe2d18-3744-489f-93f7-d771ebb2d6a8\") " Nov 27 09:22:25 crc kubenswrapper[4971]: I1127 09:22:25.889839 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/46fe2d18-3744-489f-93f7-d771ebb2d6a8-ceph\") pod \"46fe2d18-3744-489f-93f7-d771ebb2d6a8\" (UID: \"46fe2d18-3744-489f-93f7-d771ebb2d6a8\") " Nov 27 09:22:25 crc kubenswrapper[4971]: I1127 09:22:25.896074 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46fe2d18-3744-489f-93f7-d771ebb2d6a8-kube-api-access-ft7fq" (OuterVolumeSpecName: "kube-api-access-ft7fq") pod "46fe2d18-3744-489f-93f7-d771ebb2d6a8" (UID: "46fe2d18-3744-489f-93f7-d771ebb2d6a8"). InnerVolumeSpecName "kube-api-access-ft7fq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:22:25 crc kubenswrapper[4971]: I1127 09:22:25.898439 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46fe2d18-3744-489f-93f7-d771ebb2d6a8-ceph" (OuterVolumeSpecName: "ceph") pod "46fe2d18-3744-489f-93f7-d771ebb2d6a8" (UID: "46fe2d18-3744-489f-93f7-d771ebb2d6a8"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:22:25 crc kubenswrapper[4971]: I1127 09:22:25.924725 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46fe2d18-3744-489f-93f7-d771ebb2d6a8-inventory" (OuterVolumeSpecName: "inventory") pod "46fe2d18-3744-489f-93f7-d771ebb2d6a8" (UID: "46fe2d18-3744-489f-93f7-d771ebb2d6a8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:22:25 crc kubenswrapper[4971]: I1127 09:22:25.943909 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46fe2d18-3744-489f-93f7-d771ebb2d6a8-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "46fe2d18-3744-489f-93f7-d771ebb2d6a8" (UID: "46fe2d18-3744-489f-93f7-d771ebb2d6a8"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:22:25 crc kubenswrapper[4971]: I1127 09:22:25.993153 4971 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/46fe2d18-3744-489f-93f7-d771ebb2d6a8-inventory\") on node \"crc\" DevicePath \"\"" Nov 27 09:22:25 crc kubenswrapper[4971]: I1127 09:22:25.993191 4971 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/46fe2d18-3744-489f-93f7-d771ebb2d6a8-ceph\") on node \"crc\" DevicePath \"\"" Nov 27 09:22:25 crc kubenswrapper[4971]: I1127 09:22:25.993201 4971 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/46fe2d18-3744-489f-93f7-d771ebb2d6a8-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 27 09:22:25 crc kubenswrapper[4971]: I1127 09:22:25.993214 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ft7fq\" (UniqueName: \"kubernetes.io/projected/46fe2d18-3744-489f-93f7-d771ebb2d6a8-kube-api-access-ft7fq\") on node \"crc\" DevicePath \"\"" Nov 27 09:22:26 crc kubenswrapper[4971]: I1127 09:22:26.335797 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-mn94j" event={"ID":"46fe2d18-3744-489f-93f7-d771ebb2d6a8","Type":"ContainerDied","Data":"d620fa7d94edcdb6513fd2f0ae80b5a1084bed5e185b4453d9d3d2ecdeae8679"} Nov 27 09:22:26 crc kubenswrapper[4971]: I1127 09:22:26.336164 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-mn94j" Nov 27 09:22:26 crc kubenswrapper[4971]: I1127 09:22:26.336192 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d620fa7d94edcdb6513fd2f0ae80b5a1084bed5e185b4453d9d3d2ecdeae8679" Nov 27 09:22:26 crc kubenswrapper[4971]: I1127 09:22:26.500101 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-openstack-openstack-cell1-2d4qd"] Nov 27 09:22:26 crc kubenswrapper[4971]: E1127 09:22:26.500847 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46fe2d18-3744-489f-93f7-d771ebb2d6a8" containerName="run-os-openstack-openstack-cell1" Nov 27 09:22:26 crc kubenswrapper[4971]: I1127 09:22:26.500869 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="46fe2d18-3744-489f-93f7-d771ebb2d6a8" containerName="run-os-openstack-openstack-cell1" Nov 27 09:22:26 crc kubenswrapper[4971]: I1127 09:22:26.501244 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="46fe2d18-3744-489f-93f7-d771ebb2d6a8" containerName="run-os-openstack-openstack-cell1" Nov 27 09:22:26 crc kubenswrapper[4971]: I1127 09:22:26.502282 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-2d4qd" Nov 27 09:22:26 crc kubenswrapper[4971]: I1127 09:22:26.505070 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 27 09:22:26 crc kubenswrapper[4971]: I1127 09:22:26.505384 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-jnkbm" Nov 27 09:22:26 crc kubenswrapper[4971]: I1127 09:22:26.515676 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-openstack-openstack-cell1-2d4qd"] Nov 27 09:22:26 crc kubenswrapper[4971]: I1127 09:22:26.631833 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/363453ac-740f-4603-a65c-71a4bb442fe6-ssh-key\") pod \"reboot-os-openstack-openstack-cell1-2d4qd\" (UID: \"363453ac-740f-4603-a65c-71a4bb442fe6\") " pod="openstack/reboot-os-openstack-openstack-cell1-2d4qd" Nov 27 09:22:26 crc kubenswrapper[4971]: I1127 09:22:26.632059 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/363453ac-740f-4603-a65c-71a4bb442fe6-ceph\") pod \"reboot-os-openstack-openstack-cell1-2d4qd\" (UID: \"363453ac-740f-4603-a65c-71a4bb442fe6\") " pod="openstack/reboot-os-openstack-openstack-cell1-2d4qd" Nov 27 09:22:26 crc kubenswrapper[4971]: I1127 09:22:26.632241 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zcprl\" (UniqueName: \"kubernetes.io/projected/363453ac-740f-4603-a65c-71a4bb442fe6-kube-api-access-zcprl\") pod \"reboot-os-openstack-openstack-cell1-2d4qd\" (UID: \"363453ac-740f-4603-a65c-71a4bb442fe6\") " pod="openstack/reboot-os-openstack-openstack-cell1-2d4qd" Nov 27 09:22:26 crc kubenswrapper[4971]: I1127 09:22:26.632674 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/363453ac-740f-4603-a65c-71a4bb442fe6-inventory\") pod \"reboot-os-openstack-openstack-cell1-2d4qd\" (UID: \"363453ac-740f-4603-a65c-71a4bb442fe6\") " pod="openstack/reboot-os-openstack-openstack-cell1-2d4qd" Nov 27 09:22:26 crc kubenswrapper[4971]: I1127 09:22:26.751242 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/363453ac-740f-4603-a65c-71a4bb442fe6-ssh-key\") pod \"reboot-os-openstack-openstack-cell1-2d4qd\" (UID: \"363453ac-740f-4603-a65c-71a4bb442fe6\") " pod="openstack/reboot-os-openstack-openstack-cell1-2d4qd" Nov 27 09:22:26 crc kubenswrapper[4971]: I1127 09:22:26.751332 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/363453ac-740f-4603-a65c-71a4bb442fe6-ceph\") pod \"reboot-os-openstack-openstack-cell1-2d4qd\" (UID: \"363453ac-740f-4603-a65c-71a4bb442fe6\") " pod="openstack/reboot-os-openstack-openstack-cell1-2d4qd" Nov 27 09:22:26 crc kubenswrapper[4971]: I1127 09:22:26.751398 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zcprl\" (UniqueName: \"kubernetes.io/projected/363453ac-740f-4603-a65c-71a4bb442fe6-kube-api-access-zcprl\") pod \"reboot-os-openstack-openstack-cell1-2d4qd\" (UID: \"363453ac-740f-4603-a65c-71a4bb442fe6\") " pod="openstack/reboot-os-openstack-openstack-cell1-2d4qd" Nov 27 09:22:26 crc kubenswrapper[4971]: I1127 09:22:26.751518 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/363453ac-740f-4603-a65c-71a4bb442fe6-inventory\") pod \"reboot-os-openstack-openstack-cell1-2d4qd\" (UID: \"363453ac-740f-4603-a65c-71a4bb442fe6\") " pod="openstack/reboot-os-openstack-openstack-cell1-2d4qd" Nov 27 09:22:26 crc kubenswrapper[4971]: I1127 09:22:26.761084 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/363453ac-740f-4603-a65c-71a4bb442fe6-ssh-key\") pod \"reboot-os-openstack-openstack-cell1-2d4qd\" (UID: \"363453ac-740f-4603-a65c-71a4bb442fe6\") " pod="openstack/reboot-os-openstack-openstack-cell1-2d4qd" Nov 27 09:22:26 crc kubenswrapper[4971]: I1127 09:22:26.773261 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/363453ac-740f-4603-a65c-71a4bb442fe6-inventory\") pod \"reboot-os-openstack-openstack-cell1-2d4qd\" (UID: \"363453ac-740f-4603-a65c-71a4bb442fe6\") " pod="openstack/reboot-os-openstack-openstack-cell1-2d4qd" Nov 27 09:22:26 crc kubenswrapper[4971]: I1127 09:22:26.791268 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zcprl\" (UniqueName: \"kubernetes.io/projected/363453ac-740f-4603-a65c-71a4bb442fe6-kube-api-access-zcprl\") pod \"reboot-os-openstack-openstack-cell1-2d4qd\" (UID: \"363453ac-740f-4603-a65c-71a4bb442fe6\") " pod="openstack/reboot-os-openstack-openstack-cell1-2d4qd" Nov 27 09:22:26 crc kubenswrapper[4971]: I1127 09:22:26.793233 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/363453ac-740f-4603-a65c-71a4bb442fe6-ceph\") pod \"reboot-os-openstack-openstack-cell1-2d4qd\" (UID: \"363453ac-740f-4603-a65c-71a4bb442fe6\") " pod="openstack/reboot-os-openstack-openstack-cell1-2d4qd" Nov 27 09:22:26 crc kubenswrapper[4971]: I1127 09:22:26.855771 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-2d4qd" Nov 27 09:22:28 crc kubenswrapper[4971]: I1127 09:22:28.251418 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-openstack-openstack-cell1-2d4qd"] Nov 27 09:22:28 crc kubenswrapper[4971]: I1127 09:22:28.358940 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-2d4qd" event={"ID":"363453ac-740f-4603-a65c-71a4bb442fe6","Type":"ContainerStarted","Data":"abc2be78e0b19b53cfe2a4f9b9079e4d3bb2e0f2666ffe698ba4922550c71db2"} Nov 27 09:22:29 crc kubenswrapper[4971]: I1127 09:22:29.371968 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-2d4qd" event={"ID":"363453ac-740f-4603-a65c-71a4bb442fe6","Type":"ContainerStarted","Data":"6384fd052cacbeeae10f1e5985f37014b72800c41b2f813099649635729b4900"} Nov 27 09:22:29 crc kubenswrapper[4971]: I1127 09:22:29.410708 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-openstack-openstack-cell1-2d4qd" podStartSLOduration=2.968284251 podStartE2EDuration="3.410665823s" podCreationTimestamp="2025-11-27 09:22:26 +0000 UTC" firstStartedPulling="2025-11-27 09:22:28.256645111 +0000 UTC m=+8986.448689029" lastFinishedPulling="2025-11-27 09:22:28.699026683 +0000 UTC m=+8986.891070601" observedRunningTime="2025-11-27 09:22:29.393377232 +0000 UTC m=+8987.585421160" watchObservedRunningTime="2025-11-27 09:22:29.410665823 +0000 UTC m=+8987.602709751" Nov 27 09:22:32 crc kubenswrapper[4971]: I1127 09:22:32.418817 4971 generic.go:334] "Generic (PLEG): container finished" podID="ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd" containerID="b408059969649843b8eb59052b71bddd79ad9ff1404d8a0cccc2b7eee6868d7d" exitCode=0 Nov 27 09:22:32 crc kubenswrapper[4971]: I1127 09:22:32.418905 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-networker-zs7h5" event={"ID":"ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd","Type":"ContainerDied","Data":"b408059969649843b8eb59052b71bddd79ad9ff1404d8a0cccc2b7eee6868d7d"} Nov 27 09:22:33 crc kubenswrapper[4971]: I1127 09:22:33.907798 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-networker-zs7h5" Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.028554 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd-inventory\") pod \"ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd\" (UID: \"ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd\") " Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.028624 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd-ssh-key\") pod \"ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd\" (UID: \"ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd\") " Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.028877 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd-bootstrap-combined-ca-bundle\") pod \"ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd\" (UID: \"ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd\") " Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.029983 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd-neutron-metadata-combined-ca-bundle\") pod \"ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd\" (UID: \"ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd\") " Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.030076 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-868p4\" (UniqueName: \"kubernetes.io/projected/ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd-kube-api-access-868p4\") pod \"ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd\" (UID: \"ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd\") " Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.030131 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd-ovn-combined-ca-bundle\") pod \"ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd\" (UID: \"ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd\") " Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.035888 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd" (UID: "ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.036618 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd-kube-api-access-868p4" (OuterVolumeSpecName: "kube-api-access-868p4") pod "ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd" (UID: "ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd"). InnerVolumeSpecName "kube-api-access-868p4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.036737 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd" (UID: "ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.037662 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd" (UID: "ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.061257 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd" (UID: "ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.063419 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd-inventory" (OuterVolumeSpecName: "inventory") pod "ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd" (UID: "ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.133022 4971 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.133078 4971 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.133091 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-868p4\" (UniqueName: \"kubernetes.io/projected/ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd-kube-api-access-868p4\") on node \"crc\" DevicePath \"\"" Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.133102 4971 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.133116 4971 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd-inventory\") on node \"crc\" DevicePath \"\"" Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.133125 4971 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.449868 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-networker-zs7h5" event={"ID":"ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd","Type":"ContainerDied","Data":"3c4d7d430c65dc4c58c28d5e7ad2eb500fbd2153189e34be09ccd9446299e4e9"} Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.449911 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3c4d7d430c65dc4c58c28d5e7ad2eb500fbd2153189e34be09ccd9446299e4e9" Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.450333 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-networker-zs7h5" Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.576060 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-openstack-openstack-networker-6lml5"] Nov 27 09:22:34 crc kubenswrapper[4971]: E1127 09:22:34.576796 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd" containerName="install-certs-openstack-openstack-networker" Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.576819 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd" containerName="install-certs-openstack-openstack-networker" Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.577160 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd" containerName="install-certs-openstack-openstack-networker" Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.578298 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-networker-6lml5" Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.581002 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-networker-dockercfg-hbhvx" Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.581146 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.581306 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-networker" Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.593213 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-openstack-openstack-networker-6lml5"] Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.743812 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/8e0f0dfd-4c80-47f9-b334-dfeac0f423d6-ovncontroller-config-0\") pod \"ovn-openstack-openstack-networker-6lml5\" (UID: \"8e0f0dfd-4c80-47f9-b334-dfeac0f423d6\") " pod="openstack/ovn-openstack-openstack-networker-6lml5" Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.744629 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8e0f0dfd-4c80-47f9-b334-dfeac0f423d6-inventory\") pod \"ovn-openstack-openstack-networker-6lml5\" (UID: \"8e0f0dfd-4c80-47f9-b334-dfeac0f423d6\") " pod="openstack/ovn-openstack-openstack-networker-6lml5" Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.744802 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9297w\" (UniqueName: \"kubernetes.io/projected/8e0f0dfd-4c80-47f9-b334-dfeac0f423d6-kube-api-access-9297w\") pod \"ovn-openstack-openstack-networker-6lml5\" (UID: \"8e0f0dfd-4c80-47f9-b334-dfeac0f423d6\") " pod="openstack/ovn-openstack-openstack-networker-6lml5" Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.745956 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e0f0dfd-4c80-47f9-b334-dfeac0f423d6-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-networker-6lml5\" (UID: \"8e0f0dfd-4c80-47f9-b334-dfeac0f423d6\") " pod="openstack/ovn-openstack-openstack-networker-6lml5" Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.746061 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8e0f0dfd-4c80-47f9-b334-dfeac0f423d6-ssh-key\") pod \"ovn-openstack-openstack-networker-6lml5\" (UID: \"8e0f0dfd-4c80-47f9-b334-dfeac0f423d6\") " pod="openstack/ovn-openstack-openstack-networker-6lml5" Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.848145 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8e0f0dfd-4c80-47f9-b334-dfeac0f423d6-ssh-key\") pod \"ovn-openstack-openstack-networker-6lml5\" (UID: \"8e0f0dfd-4c80-47f9-b334-dfeac0f423d6\") " pod="openstack/ovn-openstack-openstack-networker-6lml5" Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.848223 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/8e0f0dfd-4c80-47f9-b334-dfeac0f423d6-ovncontroller-config-0\") pod \"ovn-openstack-openstack-networker-6lml5\" (UID: \"8e0f0dfd-4c80-47f9-b334-dfeac0f423d6\") " pod="openstack/ovn-openstack-openstack-networker-6lml5" Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.848276 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9297w\" (UniqueName: \"kubernetes.io/projected/8e0f0dfd-4c80-47f9-b334-dfeac0f423d6-kube-api-access-9297w\") pod \"ovn-openstack-openstack-networker-6lml5\" (UID: \"8e0f0dfd-4c80-47f9-b334-dfeac0f423d6\") " pod="openstack/ovn-openstack-openstack-networker-6lml5" Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.848325 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8e0f0dfd-4c80-47f9-b334-dfeac0f423d6-inventory\") pod \"ovn-openstack-openstack-networker-6lml5\" (UID: \"8e0f0dfd-4c80-47f9-b334-dfeac0f423d6\") " pod="openstack/ovn-openstack-openstack-networker-6lml5" Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.848523 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e0f0dfd-4c80-47f9-b334-dfeac0f423d6-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-networker-6lml5\" (UID: \"8e0f0dfd-4c80-47f9-b334-dfeac0f423d6\") " pod="openstack/ovn-openstack-openstack-networker-6lml5" Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.849339 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/8e0f0dfd-4c80-47f9-b334-dfeac0f423d6-ovncontroller-config-0\") pod \"ovn-openstack-openstack-networker-6lml5\" (UID: \"8e0f0dfd-4c80-47f9-b334-dfeac0f423d6\") " pod="openstack/ovn-openstack-openstack-networker-6lml5" Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.854523 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e0f0dfd-4c80-47f9-b334-dfeac0f423d6-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-networker-6lml5\" (UID: \"8e0f0dfd-4c80-47f9-b334-dfeac0f423d6\") " pod="openstack/ovn-openstack-openstack-networker-6lml5" Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.854581 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8e0f0dfd-4c80-47f9-b334-dfeac0f423d6-ssh-key\") pod \"ovn-openstack-openstack-networker-6lml5\" (UID: \"8e0f0dfd-4c80-47f9-b334-dfeac0f423d6\") " pod="openstack/ovn-openstack-openstack-networker-6lml5" Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.854640 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8e0f0dfd-4c80-47f9-b334-dfeac0f423d6-inventory\") pod \"ovn-openstack-openstack-networker-6lml5\" (UID: \"8e0f0dfd-4c80-47f9-b334-dfeac0f423d6\") " pod="openstack/ovn-openstack-openstack-networker-6lml5" Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.870780 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9297w\" (UniqueName: \"kubernetes.io/projected/8e0f0dfd-4c80-47f9-b334-dfeac0f423d6-kube-api-access-9297w\") pod \"ovn-openstack-openstack-networker-6lml5\" (UID: \"8e0f0dfd-4c80-47f9-b334-dfeac0f423d6\") " pod="openstack/ovn-openstack-openstack-networker-6lml5" Nov 27 09:22:34 crc kubenswrapper[4971]: I1127 09:22:34.911007 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-networker-6lml5" Nov 27 09:22:35 crc kubenswrapper[4971]: I1127 09:22:35.461686 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-openstack-openstack-networker-6lml5"] Nov 27 09:22:36 crc kubenswrapper[4971]: I1127 09:22:36.471741 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-networker-6lml5" event={"ID":"8e0f0dfd-4c80-47f9-b334-dfeac0f423d6","Type":"ContainerStarted","Data":"7ca5e7cf4e5330b890eb1102e286e6293777e35331ff513d7ef3a9f3b5497ae7"} Nov 27 09:22:36 crc kubenswrapper[4971]: I1127 09:22:36.471794 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-networker-6lml5" event={"ID":"8e0f0dfd-4c80-47f9-b334-dfeac0f423d6","Type":"ContainerStarted","Data":"b5e2e3080ef4a6e12c846770aae8664118b73efa75a65250ad1fa991832a1eb5"} Nov 27 09:22:36 crc kubenswrapper[4971]: I1127 09:22:36.491148 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-openstack-openstack-networker-6lml5" podStartSLOduration=2.031091301 podStartE2EDuration="2.491130295s" podCreationTimestamp="2025-11-27 09:22:34 +0000 UTC" firstStartedPulling="2025-11-27 09:22:35.461666181 +0000 UTC m=+8993.653710099" lastFinishedPulling="2025-11-27 09:22:35.921705175 +0000 UTC m=+8994.113749093" observedRunningTime="2025-11-27 09:22:36.488906132 +0000 UTC m=+8994.680950050" watchObservedRunningTime="2025-11-27 09:22:36.491130295 +0000 UTC m=+8994.683174213" Nov 27 09:22:45 crc kubenswrapper[4971]: I1127 09:22:45.578174 4971 generic.go:334] "Generic (PLEG): container finished" podID="363453ac-740f-4603-a65c-71a4bb442fe6" containerID="6384fd052cacbeeae10f1e5985f37014b72800c41b2f813099649635729b4900" exitCode=0 Nov 27 09:22:45 crc kubenswrapper[4971]: I1127 09:22:45.578255 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-2d4qd" event={"ID":"363453ac-740f-4603-a65c-71a4bb442fe6","Type":"ContainerDied","Data":"6384fd052cacbeeae10f1e5985f37014b72800c41b2f813099649635729b4900"} Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.158217 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-2d4qd" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.354229 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/363453ac-740f-4603-a65c-71a4bb442fe6-ceph\") pod \"363453ac-740f-4603-a65c-71a4bb442fe6\" (UID: \"363453ac-740f-4603-a65c-71a4bb442fe6\") " Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.354442 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/363453ac-740f-4603-a65c-71a4bb442fe6-ssh-key\") pod \"363453ac-740f-4603-a65c-71a4bb442fe6\" (UID: \"363453ac-740f-4603-a65c-71a4bb442fe6\") " Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.354501 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/363453ac-740f-4603-a65c-71a4bb442fe6-inventory\") pod \"363453ac-740f-4603-a65c-71a4bb442fe6\" (UID: \"363453ac-740f-4603-a65c-71a4bb442fe6\") " Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.354725 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zcprl\" (UniqueName: \"kubernetes.io/projected/363453ac-740f-4603-a65c-71a4bb442fe6-kube-api-access-zcprl\") pod \"363453ac-740f-4603-a65c-71a4bb442fe6\" (UID: \"363453ac-740f-4603-a65c-71a4bb442fe6\") " Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.361694 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/363453ac-740f-4603-a65c-71a4bb442fe6-kube-api-access-zcprl" (OuterVolumeSpecName: "kube-api-access-zcprl") pod "363453ac-740f-4603-a65c-71a4bb442fe6" (UID: "363453ac-740f-4603-a65c-71a4bb442fe6"). InnerVolumeSpecName "kube-api-access-zcprl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.362150 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/363453ac-740f-4603-a65c-71a4bb442fe6-ceph" (OuterVolumeSpecName: "ceph") pod "363453ac-740f-4603-a65c-71a4bb442fe6" (UID: "363453ac-740f-4603-a65c-71a4bb442fe6"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:22:47 crc kubenswrapper[4971]: E1127 09:22:47.384132 4971 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/363453ac-740f-4603-a65c-71a4bb442fe6-ssh-key podName:363453ac-740f-4603-a65c-71a4bb442fe6 nodeName:}" failed. No retries permitted until 2025-11-27 09:22:47.884078435 +0000 UTC m=+9006.076122383 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "ssh-key" (UniqueName: "kubernetes.io/secret/363453ac-740f-4603-a65c-71a4bb442fe6-ssh-key") pod "363453ac-740f-4603-a65c-71a4bb442fe6" (UID: "363453ac-740f-4603-a65c-71a4bb442fe6") : error deleting /var/lib/kubelet/pods/363453ac-740f-4603-a65c-71a4bb442fe6/volume-subpaths: remove /var/lib/kubelet/pods/363453ac-740f-4603-a65c-71a4bb442fe6/volume-subpaths: no such file or directory Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.388385 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/363453ac-740f-4603-a65c-71a4bb442fe6-inventory" (OuterVolumeSpecName: "inventory") pod "363453ac-740f-4603-a65c-71a4bb442fe6" (UID: "363453ac-740f-4603-a65c-71a4bb442fe6"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.458362 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zcprl\" (UniqueName: \"kubernetes.io/projected/363453ac-740f-4603-a65c-71a4bb442fe6-kube-api-access-zcprl\") on node \"crc\" DevicePath \"\"" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.458401 4971 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/363453ac-740f-4603-a65c-71a4bb442fe6-ceph\") on node \"crc\" DevicePath \"\"" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.458413 4971 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/363453ac-740f-4603-a65c-71a4bb442fe6-inventory\") on node \"crc\" DevicePath \"\"" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.623759 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-2d4qd" event={"ID":"363453ac-740f-4603-a65c-71a4bb442fe6","Type":"ContainerDied","Data":"abc2be78e0b19b53cfe2a4f9b9079e4d3bb2e0f2666ffe698ba4922550c71db2"} Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.624253 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="abc2be78e0b19b53cfe2a4f9b9079e4d3bb2e0f2666ffe698ba4922550c71db2" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.623921 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-2d4qd" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.697190 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-openstack-openstack-cell1-m6f7c"] Nov 27 09:22:47 crc kubenswrapper[4971]: E1127 09:22:47.697718 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="363453ac-740f-4603-a65c-71a4bb442fe6" containerName="reboot-os-openstack-openstack-cell1" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.697746 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="363453ac-740f-4603-a65c-71a4bb442fe6" containerName="reboot-os-openstack-openstack-cell1" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.698234 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="363453ac-740f-4603-a65c-71a4bb442fe6" containerName="reboot-os-openstack-openstack-cell1" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.699356 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.716048 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-openstack-openstack-cell1-m6f7c"] Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.867462 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-ssh-key\") pod \"install-certs-openstack-openstack-cell1-m6f7c\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.867694 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-telemetry-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-m6f7c\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.867746 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-libvirt-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-m6f7c\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.867771 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-neutron-sriov-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-m6f7c\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.867799 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-inventory\") pod \"install-certs-openstack-openstack-cell1-m6f7c\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.867928 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-m6f7c\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.868000 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-ceph\") pod \"install-certs-openstack-openstack-cell1-m6f7c\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.868203 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-nova-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-m6f7c\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.868313 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kcrq6\" (UniqueName: \"kubernetes.io/projected/65820bcd-c13b-4c2d-a475-e98ec977d33d-kube-api-access-kcrq6\") pod \"install-certs-openstack-openstack-cell1-m6f7c\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.868409 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-m6f7c\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.868447 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-m6f7c\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.868471 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-m6f7c\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.970464 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/363453ac-740f-4603-a65c-71a4bb442fe6-ssh-key\") pod \"363453ac-740f-4603-a65c-71a4bb442fe6\" (UID: \"363453ac-740f-4603-a65c-71a4bb442fe6\") " Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.970990 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-m6f7c\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.971024 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-m6f7c\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.971057 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-m6f7c\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.971100 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-ssh-key\") pod \"install-certs-openstack-openstack-cell1-m6f7c\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.971178 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-telemetry-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-m6f7c\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.971220 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-neutron-sriov-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-m6f7c\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.971239 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-libvirt-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-m6f7c\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.971260 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-inventory\") pod \"install-certs-openstack-openstack-cell1-m6f7c\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.971294 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-m6f7c\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.971327 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-ceph\") pod \"install-certs-openstack-openstack-cell1-m6f7c\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.971350 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-nova-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-m6f7c\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.971376 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kcrq6\" (UniqueName: \"kubernetes.io/projected/65820bcd-c13b-4c2d-a475-e98ec977d33d-kube-api-access-kcrq6\") pod \"install-certs-openstack-openstack-cell1-m6f7c\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.975660 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/363453ac-740f-4603-a65c-71a4bb442fe6-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "363453ac-740f-4603-a65c-71a4bb442fe6" (UID: "363453ac-740f-4603-a65c-71a4bb442fe6"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.976236 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-inventory\") pod \"install-certs-openstack-openstack-cell1-m6f7c\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.976404 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-neutron-sriov-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-m6f7c\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.976518 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-m6f7c\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.977650 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-ceph\") pod \"install-certs-openstack-openstack-cell1-m6f7c\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.977932 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-m6f7c\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.978165 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-telemetry-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-m6f7c\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.979045 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-m6f7c\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.981121 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-libvirt-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-m6f7c\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.986293 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-m6f7c\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.987396 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-nova-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-m6f7c\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.987565 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-ssh-key\") pod \"install-certs-openstack-openstack-cell1-m6f7c\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:22:47 crc kubenswrapper[4971]: I1127 09:22:47.989782 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kcrq6\" (UniqueName: \"kubernetes.io/projected/65820bcd-c13b-4c2d-a475-e98ec977d33d-kube-api-access-kcrq6\") pod \"install-certs-openstack-openstack-cell1-m6f7c\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:22:48 crc kubenswrapper[4971]: I1127 09:22:48.021024 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:22:48 crc kubenswrapper[4971]: I1127 09:22:48.074075 4971 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/363453ac-740f-4603-a65c-71a4bb442fe6-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 27 09:22:48 crc kubenswrapper[4971]: I1127 09:22:48.629517 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-openstack-openstack-cell1-m6f7c"] Nov 27 09:22:49 crc kubenswrapper[4971]: I1127 09:22:49.646205 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" event={"ID":"65820bcd-c13b-4c2d-a475-e98ec977d33d","Type":"ContainerStarted","Data":"7bf9dd973fa8a27227ac15d4f9f50f5d70c86084012cafd39a6a0d42c15bf27a"} Nov 27 09:22:50 crc kubenswrapper[4971]: I1127 09:22:50.656594 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" event={"ID":"65820bcd-c13b-4c2d-a475-e98ec977d33d","Type":"ContainerStarted","Data":"2e24dc1ea523b799fc99325fbd3bf56390e38c2907ac9663459fc36984bf497c"} Nov 27 09:22:50 crc kubenswrapper[4971]: I1127 09:22:50.682325 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" podStartSLOduration=2.9169723039999997 podStartE2EDuration="3.682301489s" podCreationTimestamp="2025-11-27 09:22:47 +0000 UTC" firstStartedPulling="2025-11-27 09:22:48.641891325 +0000 UTC m=+9006.833935243" lastFinishedPulling="2025-11-27 09:22:49.4072205 +0000 UTC m=+9007.599264428" observedRunningTime="2025-11-27 09:22:50.676925766 +0000 UTC m=+9008.868969684" watchObservedRunningTime="2025-11-27 09:22:50.682301489 +0000 UTC m=+9008.874345407" Nov 27 09:23:10 crc kubenswrapper[4971]: I1127 09:23:10.933986 4971 generic.go:334] "Generic (PLEG): container finished" podID="65820bcd-c13b-4c2d-a475-e98ec977d33d" containerID="2e24dc1ea523b799fc99325fbd3bf56390e38c2907ac9663459fc36984bf497c" exitCode=0 Nov 27 09:23:10 crc kubenswrapper[4971]: I1127 09:23:10.934097 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" event={"ID":"65820bcd-c13b-4c2d-a475-e98ec977d33d","Type":"ContainerDied","Data":"2e24dc1ea523b799fc99325fbd3bf56390e38c2907ac9663459fc36984bf497c"} Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.396100 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.443314 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-nova-combined-ca-bundle\") pod \"65820bcd-c13b-4c2d-a475-e98ec977d33d\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.443396 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-ceph\") pod \"65820bcd-c13b-4c2d-a475-e98ec977d33d\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.443506 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-libvirt-combined-ca-bundle\") pod \"65820bcd-c13b-4c2d-a475-e98ec977d33d\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.443628 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-telemetry-combined-ca-bundle\") pod \"65820bcd-c13b-4c2d-a475-e98ec977d33d\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.443660 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kcrq6\" (UniqueName: \"kubernetes.io/projected/65820bcd-c13b-4c2d-a475-e98ec977d33d-kube-api-access-kcrq6\") pod \"65820bcd-c13b-4c2d-a475-e98ec977d33d\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.443733 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-neutron-sriov-combined-ca-bundle\") pod \"65820bcd-c13b-4c2d-a475-e98ec977d33d\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.443761 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-ssh-key\") pod \"65820bcd-c13b-4c2d-a475-e98ec977d33d\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.443799 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-ovn-combined-ca-bundle\") pod \"65820bcd-c13b-4c2d-a475-e98ec977d33d\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.443840 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-bootstrap-combined-ca-bundle\") pod \"65820bcd-c13b-4c2d-a475-e98ec977d33d\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.443912 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-neutron-dhcp-combined-ca-bundle\") pod \"65820bcd-c13b-4c2d-a475-e98ec977d33d\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.443981 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-neutron-metadata-combined-ca-bundle\") pod \"65820bcd-c13b-4c2d-a475-e98ec977d33d\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.444032 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-inventory\") pod \"65820bcd-c13b-4c2d-a475-e98ec977d33d\" (UID: \"65820bcd-c13b-4c2d-a475-e98ec977d33d\") " Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.456791 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "65820bcd-c13b-4c2d-a475-e98ec977d33d" (UID: "65820bcd-c13b-4c2d-a475-e98ec977d33d"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.457410 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "65820bcd-c13b-4c2d-a475-e98ec977d33d" (UID: "65820bcd-c13b-4c2d-a475-e98ec977d33d"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.458746 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "65820bcd-c13b-4c2d-a475-e98ec977d33d" (UID: "65820bcd-c13b-4c2d-a475-e98ec977d33d"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.458943 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "65820bcd-c13b-4c2d-a475-e98ec977d33d" (UID: "65820bcd-c13b-4c2d-a475-e98ec977d33d"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.459109 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-ceph" (OuterVolumeSpecName: "ceph") pod "65820bcd-c13b-4c2d-a475-e98ec977d33d" (UID: "65820bcd-c13b-4c2d-a475-e98ec977d33d"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.460687 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-neutron-dhcp-combined-ca-bundle" (OuterVolumeSpecName: "neutron-dhcp-combined-ca-bundle") pod "65820bcd-c13b-4c2d-a475-e98ec977d33d" (UID: "65820bcd-c13b-4c2d-a475-e98ec977d33d"). InnerVolumeSpecName "neutron-dhcp-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.461214 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-neutron-sriov-combined-ca-bundle" (OuterVolumeSpecName: "neutron-sriov-combined-ca-bundle") pod "65820bcd-c13b-4c2d-a475-e98ec977d33d" (UID: "65820bcd-c13b-4c2d-a475-e98ec977d33d"). InnerVolumeSpecName "neutron-sriov-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.464952 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "65820bcd-c13b-4c2d-a475-e98ec977d33d" (UID: "65820bcd-c13b-4c2d-a475-e98ec977d33d"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.481723 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "65820bcd-c13b-4c2d-a475-e98ec977d33d" (UID: "65820bcd-c13b-4c2d-a475-e98ec977d33d"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.482433 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65820bcd-c13b-4c2d-a475-e98ec977d33d-kube-api-access-kcrq6" (OuterVolumeSpecName: "kube-api-access-kcrq6") pod "65820bcd-c13b-4c2d-a475-e98ec977d33d" (UID: "65820bcd-c13b-4c2d-a475-e98ec977d33d"). InnerVolumeSpecName "kube-api-access-kcrq6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.485403 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "65820bcd-c13b-4c2d-a475-e98ec977d33d" (UID: "65820bcd-c13b-4c2d-a475-e98ec977d33d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.495412 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-inventory" (OuterVolumeSpecName: "inventory") pod "65820bcd-c13b-4c2d-a475-e98ec977d33d" (UID: "65820bcd-c13b-4c2d-a475-e98ec977d33d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.547149 4971 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-neutron-dhcp-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.547186 4971 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.547196 4971 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-inventory\") on node \"crc\" DevicePath \"\"" Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.547206 4971 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.547215 4971 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-ceph\") on node \"crc\" DevicePath \"\"" Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.547223 4971 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.547232 4971 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.547241 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kcrq6\" (UniqueName: \"kubernetes.io/projected/65820bcd-c13b-4c2d-a475-e98ec977d33d-kube-api-access-kcrq6\") on node \"crc\" DevicePath \"\"" Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.547251 4971 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-neutron-sriov-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.547261 4971 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.547271 4971 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.547279 4971 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65820bcd-c13b-4c2d-a475-e98ec977d33d-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.957331 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" event={"ID":"65820bcd-c13b-4c2d-a475-e98ec977d33d","Type":"ContainerDied","Data":"7bf9dd973fa8a27227ac15d4f9f50f5d70c86084012cafd39a6a0d42c15bf27a"} Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.957889 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7bf9dd973fa8a27227ac15d4f9f50f5d70c86084012cafd39a6a0d42c15bf27a" Nov 27 09:23:12 crc kubenswrapper[4971]: I1127 09:23:12.957423 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-m6f7c" Nov 27 09:23:13 crc kubenswrapper[4971]: I1127 09:23:13.060661 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-client-openstack-openstack-cell1-7snx6"] Nov 27 09:23:13 crc kubenswrapper[4971]: E1127 09:23:13.061504 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65820bcd-c13b-4c2d-a475-e98ec977d33d" containerName="install-certs-openstack-openstack-cell1" Nov 27 09:23:13 crc kubenswrapper[4971]: I1127 09:23:13.061562 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="65820bcd-c13b-4c2d-a475-e98ec977d33d" containerName="install-certs-openstack-openstack-cell1" Nov 27 09:23:13 crc kubenswrapper[4971]: I1127 09:23:13.061811 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="65820bcd-c13b-4c2d-a475-e98ec977d33d" containerName="install-certs-openstack-openstack-cell1" Nov 27 09:23:13 crc kubenswrapper[4971]: I1127 09:23:13.063010 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-7snx6" Nov 27 09:23:13 crc kubenswrapper[4971]: I1127 09:23:13.066686 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 27 09:23:13 crc kubenswrapper[4971]: I1127 09:23:13.066695 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-jnkbm" Nov 27 09:23:13 crc kubenswrapper[4971]: I1127 09:23:13.073556 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-openstack-openstack-cell1-7snx6"] Nov 27 09:23:13 crc kubenswrapper[4971]: I1127 09:23:13.165413 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7101fdfd-d9ee-4e8b-8971-82acd2b5b391-ceph\") pod \"ceph-client-openstack-openstack-cell1-7snx6\" (UID: \"7101fdfd-d9ee-4e8b-8971-82acd2b5b391\") " pod="openstack/ceph-client-openstack-openstack-cell1-7snx6" Nov 27 09:23:13 crc kubenswrapper[4971]: I1127 09:23:13.165483 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7101fdfd-d9ee-4e8b-8971-82acd2b5b391-ssh-key\") pod \"ceph-client-openstack-openstack-cell1-7snx6\" (UID: \"7101fdfd-d9ee-4e8b-8971-82acd2b5b391\") " pod="openstack/ceph-client-openstack-openstack-cell1-7snx6" Nov 27 09:23:13 crc kubenswrapper[4971]: I1127 09:23:13.165503 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7101fdfd-d9ee-4e8b-8971-82acd2b5b391-inventory\") pod \"ceph-client-openstack-openstack-cell1-7snx6\" (UID: \"7101fdfd-d9ee-4e8b-8971-82acd2b5b391\") " pod="openstack/ceph-client-openstack-openstack-cell1-7snx6" Nov 27 09:23:13 crc kubenswrapper[4971]: I1127 09:23:13.165561 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ths7g\" (UniqueName: \"kubernetes.io/projected/7101fdfd-d9ee-4e8b-8971-82acd2b5b391-kube-api-access-ths7g\") pod \"ceph-client-openstack-openstack-cell1-7snx6\" (UID: \"7101fdfd-d9ee-4e8b-8971-82acd2b5b391\") " pod="openstack/ceph-client-openstack-openstack-cell1-7snx6" Nov 27 09:23:13 crc kubenswrapper[4971]: I1127 09:23:13.267745 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ths7g\" (UniqueName: \"kubernetes.io/projected/7101fdfd-d9ee-4e8b-8971-82acd2b5b391-kube-api-access-ths7g\") pod \"ceph-client-openstack-openstack-cell1-7snx6\" (UID: \"7101fdfd-d9ee-4e8b-8971-82acd2b5b391\") " pod="openstack/ceph-client-openstack-openstack-cell1-7snx6" Nov 27 09:23:13 crc kubenswrapper[4971]: I1127 09:23:13.267927 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7101fdfd-d9ee-4e8b-8971-82acd2b5b391-ceph\") pod \"ceph-client-openstack-openstack-cell1-7snx6\" (UID: \"7101fdfd-d9ee-4e8b-8971-82acd2b5b391\") " pod="openstack/ceph-client-openstack-openstack-cell1-7snx6" Nov 27 09:23:13 crc kubenswrapper[4971]: I1127 09:23:13.267951 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7101fdfd-d9ee-4e8b-8971-82acd2b5b391-ssh-key\") pod \"ceph-client-openstack-openstack-cell1-7snx6\" (UID: \"7101fdfd-d9ee-4e8b-8971-82acd2b5b391\") " pod="openstack/ceph-client-openstack-openstack-cell1-7snx6" Nov 27 09:23:13 crc kubenswrapper[4971]: I1127 09:23:13.267969 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7101fdfd-d9ee-4e8b-8971-82acd2b5b391-inventory\") pod \"ceph-client-openstack-openstack-cell1-7snx6\" (UID: \"7101fdfd-d9ee-4e8b-8971-82acd2b5b391\") " pod="openstack/ceph-client-openstack-openstack-cell1-7snx6" Nov 27 09:23:13 crc kubenswrapper[4971]: I1127 09:23:13.275174 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7101fdfd-d9ee-4e8b-8971-82acd2b5b391-inventory\") pod \"ceph-client-openstack-openstack-cell1-7snx6\" (UID: \"7101fdfd-d9ee-4e8b-8971-82acd2b5b391\") " pod="openstack/ceph-client-openstack-openstack-cell1-7snx6" Nov 27 09:23:13 crc kubenswrapper[4971]: I1127 09:23:13.275312 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7101fdfd-d9ee-4e8b-8971-82acd2b5b391-ceph\") pod \"ceph-client-openstack-openstack-cell1-7snx6\" (UID: \"7101fdfd-d9ee-4e8b-8971-82acd2b5b391\") " pod="openstack/ceph-client-openstack-openstack-cell1-7snx6" Nov 27 09:23:13 crc kubenswrapper[4971]: I1127 09:23:13.275552 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7101fdfd-d9ee-4e8b-8971-82acd2b5b391-ssh-key\") pod \"ceph-client-openstack-openstack-cell1-7snx6\" (UID: \"7101fdfd-d9ee-4e8b-8971-82acd2b5b391\") " pod="openstack/ceph-client-openstack-openstack-cell1-7snx6" Nov 27 09:23:13 crc kubenswrapper[4971]: I1127 09:23:13.294755 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ths7g\" (UniqueName: \"kubernetes.io/projected/7101fdfd-d9ee-4e8b-8971-82acd2b5b391-kube-api-access-ths7g\") pod \"ceph-client-openstack-openstack-cell1-7snx6\" (UID: \"7101fdfd-d9ee-4e8b-8971-82acd2b5b391\") " pod="openstack/ceph-client-openstack-openstack-cell1-7snx6" Nov 27 09:23:13 crc kubenswrapper[4971]: I1127 09:23:13.382462 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-7snx6" Nov 27 09:23:13 crc kubenswrapper[4971]: I1127 09:23:13.964573 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-openstack-openstack-cell1-7snx6"] Nov 27 09:23:14 crc kubenswrapper[4971]: I1127 09:23:14.980181 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-7snx6" event={"ID":"7101fdfd-d9ee-4e8b-8971-82acd2b5b391","Type":"ContainerStarted","Data":"55e4ff319f352130ab805e8374e4dd0ef501a01268ca28ccba33b21e72d1671e"} Nov 27 09:23:14 crc kubenswrapper[4971]: I1127 09:23:14.980469 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-7snx6" event={"ID":"7101fdfd-d9ee-4e8b-8971-82acd2b5b391","Type":"ContainerStarted","Data":"680cce1da3d1525efd4b34aa9617ae02f033632241cc4b2ce5da3a9cbeba99d7"} Nov 27 09:23:15 crc kubenswrapper[4971]: I1127 09:23:15.009377 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-client-openstack-openstack-cell1-7snx6" podStartSLOduration=1.469678329 podStartE2EDuration="2.009354955s" podCreationTimestamp="2025-11-27 09:23:13 +0000 UTC" firstStartedPulling="2025-11-27 09:23:14.056353472 +0000 UTC m=+9032.248397390" lastFinishedPulling="2025-11-27 09:23:14.596030098 +0000 UTC m=+9032.788074016" observedRunningTime="2025-11-27 09:23:14.995565273 +0000 UTC m=+9033.187609201" watchObservedRunningTime="2025-11-27 09:23:15.009354955 +0000 UTC m=+9033.201398873" Nov 27 09:23:21 crc kubenswrapper[4971]: I1127 09:23:21.040870 4971 generic.go:334] "Generic (PLEG): container finished" podID="7101fdfd-d9ee-4e8b-8971-82acd2b5b391" containerID="55e4ff319f352130ab805e8374e4dd0ef501a01268ca28ccba33b21e72d1671e" exitCode=0 Nov 27 09:23:21 crc kubenswrapper[4971]: I1127 09:23:21.040963 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-7snx6" event={"ID":"7101fdfd-d9ee-4e8b-8971-82acd2b5b391","Type":"ContainerDied","Data":"55e4ff319f352130ab805e8374e4dd0ef501a01268ca28ccba33b21e72d1671e"} Nov 27 09:23:22 crc kubenswrapper[4971]: I1127 09:23:22.481077 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-7snx6" Nov 27 09:23:22 crc kubenswrapper[4971]: I1127 09:23:22.581396 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ths7g\" (UniqueName: \"kubernetes.io/projected/7101fdfd-d9ee-4e8b-8971-82acd2b5b391-kube-api-access-ths7g\") pod \"7101fdfd-d9ee-4e8b-8971-82acd2b5b391\" (UID: \"7101fdfd-d9ee-4e8b-8971-82acd2b5b391\") " Nov 27 09:23:22 crc kubenswrapper[4971]: I1127 09:23:22.581868 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7101fdfd-d9ee-4e8b-8971-82acd2b5b391-ceph\") pod \"7101fdfd-d9ee-4e8b-8971-82acd2b5b391\" (UID: \"7101fdfd-d9ee-4e8b-8971-82acd2b5b391\") " Nov 27 09:23:22 crc kubenswrapper[4971]: I1127 09:23:22.581943 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7101fdfd-d9ee-4e8b-8971-82acd2b5b391-inventory\") pod \"7101fdfd-d9ee-4e8b-8971-82acd2b5b391\" (UID: \"7101fdfd-d9ee-4e8b-8971-82acd2b5b391\") " Nov 27 09:23:22 crc kubenswrapper[4971]: I1127 09:23:22.582158 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7101fdfd-d9ee-4e8b-8971-82acd2b5b391-ssh-key\") pod \"7101fdfd-d9ee-4e8b-8971-82acd2b5b391\" (UID: \"7101fdfd-d9ee-4e8b-8971-82acd2b5b391\") " Nov 27 09:23:22 crc kubenswrapper[4971]: I1127 09:23:22.588811 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7101fdfd-d9ee-4e8b-8971-82acd2b5b391-ceph" (OuterVolumeSpecName: "ceph") pod "7101fdfd-d9ee-4e8b-8971-82acd2b5b391" (UID: "7101fdfd-d9ee-4e8b-8971-82acd2b5b391"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:23:22 crc kubenswrapper[4971]: I1127 09:23:22.589146 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7101fdfd-d9ee-4e8b-8971-82acd2b5b391-kube-api-access-ths7g" (OuterVolumeSpecName: "kube-api-access-ths7g") pod "7101fdfd-d9ee-4e8b-8971-82acd2b5b391" (UID: "7101fdfd-d9ee-4e8b-8971-82acd2b5b391"). InnerVolumeSpecName "kube-api-access-ths7g". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:23:22 crc kubenswrapper[4971]: I1127 09:23:22.615592 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7101fdfd-d9ee-4e8b-8971-82acd2b5b391-inventory" (OuterVolumeSpecName: "inventory") pod "7101fdfd-d9ee-4e8b-8971-82acd2b5b391" (UID: "7101fdfd-d9ee-4e8b-8971-82acd2b5b391"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:23:22 crc kubenswrapper[4971]: I1127 09:23:22.637652 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7101fdfd-d9ee-4e8b-8971-82acd2b5b391-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "7101fdfd-d9ee-4e8b-8971-82acd2b5b391" (UID: "7101fdfd-d9ee-4e8b-8971-82acd2b5b391"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:23:22 crc kubenswrapper[4971]: I1127 09:23:22.685659 4971 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7101fdfd-d9ee-4e8b-8971-82acd2b5b391-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 27 09:23:22 crc kubenswrapper[4971]: I1127 09:23:22.685688 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ths7g\" (UniqueName: \"kubernetes.io/projected/7101fdfd-d9ee-4e8b-8971-82acd2b5b391-kube-api-access-ths7g\") on node \"crc\" DevicePath \"\"" Nov 27 09:23:22 crc kubenswrapper[4971]: I1127 09:23:22.685699 4971 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7101fdfd-d9ee-4e8b-8971-82acd2b5b391-ceph\") on node \"crc\" DevicePath \"\"" Nov 27 09:23:22 crc kubenswrapper[4971]: I1127 09:23:22.685710 4971 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7101fdfd-d9ee-4e8b-8971-82acd2b5b391-inventory\") on node \"crc\" DevicePath \"\"" Nov 27 09:23:23 crc kubenswrapper[4971]: I1127 09:23:23.081992 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-7snx6" event={"ID":"7101fdfd-d9ee-4e8b-8971-82acd2b5b391","Type":"ContainerDied","Data":"680cce1da3d1525efd4b34aa9617ae02f033632241cc4b2ce5da3a9cbeba99d7"} Nov 27 09:23:23 crc kubenswrapper[4971]: I1127 09:23:23.082038 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="680cce1da3d1525efd4b34aa9617ae02f033632241cc4b2ce5da3a9cbeba99d7" Nov 27 09:23:23 crc kubenswrapper[4971]: I1127 09:23:23.082091 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-7snx6" Nov 27 09:23:23 crc kubenswrapper[4971]: I1127 09:23:23.153296 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-openstack-openstack-cell1-mpk56"] Nov 27 09:23:23 crc kubenswrapper[4971]: E1127 09:23:23.153941 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7101fdfd-d9ee-4e8b-8971-82acd2b5b391" containerName="ceph-client-openstack-openstack-cell1" Nov 27 09:23:23 crc kubenswrapper[4971]: I1127 09:23:23.153956 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="7101fdfd-d9ee-4e8b-8971-82acd2b5b391" containerName="ceph-client-openstack-openstack-cell1" Nov 27 09:23:23 crc kubenswrapper[4971]: I1127 09:23:23.154220 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="7101fdfd-d9ee-4e8b-8971-82acd2b5b391" containerName="ceph-client-openstack-openstack-cell1" Nov 27 09:23:23 crc kubenswrapper[4971]: I1127 09:23:23.155710 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-mpk56" Nov 27 09:23:23 crc kubenswrapper[4971]: I1127 09:23:23.159597 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 27 09:23:23 crc kubenswrapper[4971]: I1127 09:23:23.159677 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-jnkbm" Nov 27 09:23:23 crc kubenswrapper[4971]: I1127 09:23:23.164977 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-openstack-openstack-cell1-mpk56"] Nov 27 09:23:23 crc kubenswrapper[4971]: I1127 09:23:23.200472 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba-ssh-key\") pod \"ovn-openstack-openstack-cell1-mpk56\" (UID: \"7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba\") " pod="openstack/ovn-openstack-openstack-cell1-mpk56" Nov 27 09:23:23 crc kubenswrapper[4971]: I1127 09:23:23.200583 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba-inventory\") pod \"ovn-openstack-openstack-cell1-mpk56\" (UID: \"7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba\") " pod="openstack/ovn-openstack-openstack-cell1-mpk56" Nov 27 09:23:23 crc kubenswrapper[4971]: I1127 09:23:23.200713 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q24g2\" (UniqueName: \"kubernetes.io/projected/7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba-kube-api-access-q24g2\") pod \"ovn-openstack-openstack-cell1-mpk56\" (UID: \"7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba\") " pod="openstack/ovn-openstack-openstack-cell1-mpk56" Nov 27 09:23:23 crc kubenswrapper[4971]: I1127 09:23:23.200817 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba-ovncontroller-config-0\") pod \"ovn-openstack-openstack-cell1-mpk56\" (UID: \"7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba\") " pod="openstack/ovn-openstack-openstack-cell1-mpk56" Nov 27 09:23:23 crc kubenswrapper[4971]: I1127 09:23:23.200880 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba-ceph\") pod \"ovn-openstack-openstack-cell1-mpk56\" (UID: \"7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba\") " pod="openstack/ovn-openstack-openstack-cell1-mpk56" Nov 27 09:23:23 crc kubenswrapper[4971]: I1127 09:23:23.200914 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-cell1-mpk56\" (UID: \"7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba\") " pod="openstack/ovn-openstack-openstack-cell1-mpk56" Nov 27 09:23:23 crc kubenswrapper[4971]: I1127 09:23:23.303503 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba-ssh-key\") pod \"ovn-openstack-openstack-cell1-mpk56\" (UID: \"7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba\") " pod="openstack/ovn-openstack-openstack-cell1-mpk56" Nov 27 09:23:23 crc kubenswrapper[4971]: I1127 09:23:23.303603 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba-inventory\") pod \"ovn-openstack-openstack-cell1-mpk56\" (UID: \"7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba\") " pod="openstack/ovn-openstack-openstack-cell1-mpk56" Nov 27 09:23:23 crc kubenswrapper[4971]: I1127 09:23:23.303689 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q24g2\" (UniqueName: \"kubernetes.io/projected/7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba-kube-api-access-q24g2\") pod \"ovn-openstack-openstack-cell1-mpk56\" (UID: \"7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba\") " pod="openstack/ovn-openstack-openstack-cell1-mpk56" Nov 27 09:23:23 crc kubenswrapper[4971]: I1127 09:23:23.303790 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba-ovncontroller-config-0\") pod \"ovn-openstack-openstack-cell1-mpk56\" (UID: \"7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba\") " pod="openstack/ovn-openstack-openstack-cell1-mpk56" Nov 27 09:23:23 crc kubenswrapper[4971]: I1127 09:23:23.303858 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba-ceph\") pod \"ovn-openstack-openstack-cell1-mpk56\" (UID: \"7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba\") " pod="openstack/ovn-openstack-openstack-cell1-mpk56" Nov 27 09:23:23 crc kubenswrapper[4971]: I1127 09:23:23.303886 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-cell1-mpk56\" (UID: \"7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba\") " pod="openstack/ovn-openstack-openstack-cell1-mpk56" Nov 27 09:23:23 crc kubenswrapper[4971]: I1127 09:23:23.305157 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba-ovncontroller-config-0\") pod \"ovn-openstack-openstack-cell1-mpk56\" (UID: \"7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba\") " pod="openstack/ovn-openstack-openstack-cell1-mpk56" Nov 27 09:23:23 crc kubenswrapper[4971]: I1127 09:23:23.846281 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba-inventory\") pod \"ovn-openstack-openstack-cell1-mpk56\" (UID: \"7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba\") " pod="openstack/ovn-openstack-openstack-cell1-mpk56" Nov 27 09:23:23 crc kubenswrapper[4971]: I1127 09:23:23.849090 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba-ssh-key\") pod \"ovn-openstack-openstack-cell1-mpk56\" (UID: \"7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba\") " pod="openstack/ovn-openstack-openstack-cell1-mpk56" Nov 27 09:23:23 crc kubenswrapper[4971]: I1127 09:23:23.849115 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-cell1-mpk56\" (UID: \"7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba\") " pod="openstack/ovn-openstack-openstack-cell1-mpk56" Nov 27 09:23:23 crc kubenswrapper[4971]: I1127 09:23:23.849511 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q24g2\" (UniqueName: \"kubernetes.io/projected/7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba-kube-api-access-q24g2\") pod \"ovn-openstack-openstack-cell1-mpk56\" (UID: \"7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba\") " pod="openstack/ovn-openstack-openstack-cell1-mpk56" Nov 27 09:23:23 crc kubenswrapper[4971]: I1127 09:23:23.852501 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba-ceph\") pod \"ovn-openstack-openstack-cell1-mpk56\" (UID: \"7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba\") " pod="openstack/ovn-openstack-openstack-cell1-mpk56" Nov 27 09:23:24 crc kubenswrapper[4971]: I1127 09:23:24.076963 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-mpk56" Nov 27 09:23:24 crc kubenswrapper[4971]: I1127 09:23:24.684113 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-openstack-openstack-cell1-mpk56"] Nov 27 09:23:25 crc kubenswrapper[4971]: I1127 09:23:25.108482 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-mpk56" event={"ID":"7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba","Type":"ContainerStarted","Data":"6825e4b408f3022898e3e48e21b8fbc204e408489a4ccda768f359f8d2b7d9bf"} Nov 27 09:23:26 crc kubenswrapper[4971]: I1127 09:23:26.119028 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-mpk56" event={"ID":"7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba","Type":"ContainerStarted","Data":"d0cc82ae4a433df62a2c62fb68daf0eb5657cd5abd360a68a788d3d305a69f2f"} Nov 27 09:23:26 crc kubenswrapper[4971]: I1127 09:23:26.146258 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-openstack-openstack-cell1-mpk56" podStartSLOduration=2.686239379 podStartE2EDuration="3.146235422s" podCreationTimestamp="2025-11-27 09:23:23 +0000 UTC" firstStartedPulling="2025-11-27 09:23:24.681201888 +0000 UTC m=+9042.873245806" lastFinishedPulling="2025-11-27 09:23:25.141197921 +0000 UTC m=+9043.333241849" observedRunningTime="2025-11-27 09:23:26.137870924 +0000 UTC m=+9044.329914852" watchObservedRunningTime="2025-11-27 09:23:26.146235422 +0000 UTC m=+9044.338279340" Nov 27 09:23:56 crc kubenswrapper[4971]: I1127 09:23:56.413647 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 09:23:56 crc kubenswrapper[4971]: I1127 09:23:56.414610 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 09:23:57 crc kubenswrapper[4971]: I1127 09:23:57.451739 4971 generic.go:334] "Generic (PLEG): container finished" podID="8e0f0dfd-4c80-47f9-b334-dfeac0f423d6" containerID="7ca5e7cf4e5330b890eb1102e286e6293777e35331ff513d7ef3a9f3b5497ae7" exitCode=0 Nov 27 09:23:57 crc kubenswrapper[4971]: I1127 09:23:57.451831 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-networker-6lml5" event={"ID":"8e0f0dfd-4c80-47f9-b334-dfeac0f423d6","Type":"ContainerDied","Data":"7ca5e7cf4e5330b890eb1102e286e6293777e35331ff513d7ef3a9f3b5497ae7"} Nov 27 09:23:59 crc kubenswrapper[4971]: I1127 09:23:59.761908 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-networker-6lml5" Nov 27 09:23:59 crc kubenswrapper[4971]: I1127 09:23:59.815822 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9297w\" (UniqueName: \"kubernetes.io/projected/8e0f0dfd-4c80-47f9-b334-dfeac0f423d6-kube-api-access-9297w\") pod \"8e0f0dfd-4c80-47f9-b334-dfeac0f423d6\" (UID: \"8e0f0dfd-4c80-47f9-b334-dfeac0f423d6\") " Nov 27 09:23:59 crc kubenswrapper[4971]: I1127 09:23:59.815942 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8e0f0dfd-4c80-47f9-b334-dfeac0f423d6-ssh-key\") pod \"8e0f0dfd-4c80-47f9-b334-dfeac0f423d6\" (UID: \"8e0f0dfd-4c80-47f9-b334-dfeac0f423d6\") " Nov 27 09:23:59 crc kubenswrapper[4971]: I1127 09:23:59.817031 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/8e0f0dfd-4c80-47f9-b334-dfeac0f423d6-ovncontroller-config-0\") pod \"8e0f0dfd-4c80-47f9-b334-dfeac0f423d6\" (UID: \"8e0f0dfd-4c80-47f9-b334-dfeac0f423d6\") " Nov 27 09:23:59 crc kubenswrapper[4971]: I1127 09:23:59.817213 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e0f0dfd-4c80-47f9-b334-dfeac0f423d6-ovn-combined-ca-bundle\") pod \"8e0f0dfd-4c80-47f9-b334-dfeac0f423d6\" (UID: \"8e0f0dfd-4c80-47f9-b334-dfeac0f423d6\") " Nov 27 09:23:59 crc kubenswrapper[4971]: I1127 09:23:59.817238 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8e0f0dfd-4c80-47f9-b334-dfeac0f423d6-inventory\") pod \"8e0f0dfd-4c80-47f9-b334-dfeac0f423d6\" (UID: \"8e0f0dfd-4c80-47f9-b334-dfeac0f423d6\") " Nov 27 09:23:59 crc kubenswrapper[4971]: I1127 09:23:59.851839 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e0f0dfd-4c80-47f9-b334-dfeac0f423d6-kube-api-access-9297w" (OuterVolumeSpecName: "kube-api-access-9297w") pod "8e0f0dfd-4c80-47f9-b334-dfeac0f423d6" (UID: "8e0f0dfd-4c80-47f9-b334-dfeac0f423d6"). InnerVolumeSpecName "kube-api-access-9297w". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:23:59 crc kubenswrapper[4971]: I1127 09:23:59.851846 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e0f0dfd-4c80-47f9-b334-dfeac0f423d6-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "8e0f0dfd-4c80-47f9-b334-dfeac0f423d6" (UID: "8e0f0dfd-4c80-47f9-b334-dfeac0f423d6"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:23:59 crc kubenswrapper[4971]: I1127 09:23:59.855978 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e0f0dfd-4c80-47f9-b334-dfeac0f423d6-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8e0f0dfd-4c80-47f9-b334-dfeac0f423d6" (UID: "8e0f0dfd-4c80-47f9-b334-dfeac0f423d6"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:23:59 crc kubenswrapper[4971]: I1127 09:23:59.857723 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e0f0dfd-4c80-47f9-b334-dfeac0f423d6-inventory" (OuterVolumeSpecName: "inventory") pod "8e0f0dfd-4c80-47f9-b334-dfeac0f423d6" (UID: "8e0f0dfd-4c80-47f9-b334-dfeac0f423d6"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:23:59 crc kubenswrapper[4971]: I1127 09:23:59.872739 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e0f0dfd-4c80-47f9-b334-dfeac0f423d6-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "8e0f0dfd-4c80-47f9-b334-dfeac0f423d6" (UID: "8e0f0dfd-4c80-47f9-b334-dfeac0f423d6"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:23:59 crc kubenswrapper[4971]: I1127 09:23:59.920191 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9297w\" (UniqueName: \"kubernetes.io/projected/8e0f0dfd-4c80-47f9-b334-dfeac0f423d6-kube-api-access-9297w\") on node \"crc\" DevicePath \"\"" Nov 27 09:23:59 crc kubenswrapper[4971]: I1127 09:23:59.920231 4971 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8e0f0dfd-4c80-47f9-b334-dfeac0f423d6-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 27 09:23:59 crc kubenswrapper[4971]: I1127 09:23:59.920244 4971 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/8e0f0dfd-4c80-47f9-b334-dfeac0f423d6-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Nov 27 09:23:59 crc kubenswrapper[4971]: I1127 09:23:59.920256 4971 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e0f0dfd-4c80-47f9-b334-dfeac0f423d6-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:23:59 crc kubenswrapper[4971]: I1127 09:23:59.920267 4971 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8e0f0dfd-4c80-47f9-b334-dfeac0f423d6-inventory\") on node \"crc\" DevicePath \"\"" Nov 27 09:24:00 crc kubenswrapper[4971]: I1127 09:24:00.485083 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-networker-6lml5" event={"ID":"8e0f0dfd-4c80-47f9-b334-dfeac0f423d6","Type":"ContainerDied","Data":"b5e2e3080ef4a6e12c846770aae8664118b73efa75a65250ad1fa991832a1eb5"} Nov 27 09:24:00 crc kubenswrapper[4971]: I1127 09:24:00.485132 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b5e2e3080ef4a6e12c846770aae8664118b73efa75a65250ad1fa991832a1eb5" Nov 27 09:24:00 crc kubenswrapper[4971]: I1127 09:24:00.485205 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-networker-6lml5" Nov 27 09:24:00 crc kubenswrapper[4971]: I1127 09:24:00.958729 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-openstack-openstack-networker-fqnz2"] Nov 27 09:24:00 crc kubenswrapper[4971]: E1127 09:24:00.959698 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e0f0dfd-4c80-47f9-b334-dfeac0f423d6" containerName="ovn-openstack-openstack-networker" Nov 27 09:24:00 crc kubenswrapper[4971]: I1127 09:24:00.959715 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e0f0dfd-4c80-47f9-b334-dfeac0f423d6" containerName="ovn-openstack-openstack-networker" Nov 27 09:24:00 crc kubenswrapper[4971]: I1127 09:24:00.960032 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e0f0dfd-4c80-47f9-b334-dfeac0f423d6" containerName="ovn-openstack-openstack-networker" Nov 27 09:24:00 crc kubenswrapper[4971]: I1127 09:24:00.961070 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-networker-fqnz2" Nov 27 09:24:00 crc kubenswrapper[4971]: I1127 09:24:00.964320 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-networker" Nov 27 09:24:00 crc kubenswrapper[4971]: I1127 09:24:00.965656 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Nov 27 09:24:00 crc kubenswrapper[4971]: I1127 09:24:00.967077 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-networker-dockercfg-hbhvx" Nov 27 09:24:00 crc kubenswrapper[4971]: I1127 09:24:00.978772 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-openstack-openstack-networker-fqnz2"] Nov 27 09:24:00 crc kubenswrapper[4971]: I1127 09:24:00.985334 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Nov 27 09:24:01 crc kubenswrapper[4971]: I1127 09:24:01.042050 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rw57m\" (UniqueName: \"kubernetes.io/projected/69e0000b-dec5-4186-85ac-b65c6091f4d8-kube-api-access-rw57m\") pod \"neutron-metadata-openstack-openstack-networker-fqnz2\" (UID: \"69e0000b-dec5-4186-85ac-b65c6091f4d8\") " pod="openstack/neutron-metadata-openstack-openstack-networker-fqnz2" Nov 27 09:24:01 crc kubenswrapper[4971]: I1127 09:24:01.042108 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/69e0000b-dec5-4186-85ac-b65c6091f4d8-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-networker-fqnz2\" (UID: \"69e0000b-dec5-4186-85ac-b65c6091f4d8\") " pod="openstack/neutron-metadata-openstack-openstack-networker-fqnz2" Nov 27 09:24:01 crc kubenswrapper[4971]: I1127 09:24:01.042134 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69e0000b-dec5-4186-85ac-b65c6091f4d8-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-networker-fqnz2\" (UID: \"69e0000b-dec5-4186-85ac-b65c6091f4d8\") " pod="openstack/neutron-metadata-openstack-openstack-networker-fqnz2" Nov 27 09:24:01 crc kubenswrapper[4971]: I1127 09:24:01.042168 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/69e0000b-dec5-4186-85ac-b65c6091f4d8-ssh-key\") pod \"neutron-metadata-openstack-openstack-networker-fqnz2\" (UID: \"69e0000b-dec5-4186-85ac-b65c6091f4d8\") " pod="openstack/neutron-metadata-openstack-openstack-networker-fqnz2" Nov 27 09:24:01 crc kubenswrapper[4971]: I1127 09:24:01.042236 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/69e0000b-dec5-4186-85ac-b65c6091f4d8-inventory\") pod \"neutron-metadata-openstack-openstack-networker-fqnz2\" (UID: \"69e0000b-dec5-4186-85ac-b65c6091f4d8\") " pod="openstack/neutron-metadata-openstack-openstack-networker-fqnz2" Nov 27 09:24:01 crc kubenswrapper[4971]: I1127 09:24:01.042258 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/69e0000b-dec5-4186-85ac-b65c6091f4d8-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-networker-fqnz2\" (UID: \"69e0000b-dec5-4186-85ac-b65c6091f4d8\") " pod="openstack/neutron-metadata-openstack-openstack-networker-fqnz2" Nov 27 09:24:02 crc kubenswrapper[4971]: I1127 09:24:02.012550 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rw57m\" (UniqueName: \"kubernetes.io/projected/69e0000b-dec5-4186-85ac-b65c6091f4d8-kube-api-access-rw57m\") pod \"neutron-metadata-openstack-openstack-networker-fqnz2\" (UID: \"69e0000b-dec5-4186-85ac-b65c6091f4d8\") " pod="openstack/neutron-metadata-openstack-openstack-networker-fqnz2" Nov 27 09:24:02 crc kubenswrapper[4971]: I1127 09:24:02.012618 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/69e0000b-dec5-4186-85ac-b65c6091f4d8-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-networker-fqnz2\" (UID: \"69e0000b-dec5-4186-85ac-b65c6091f4d8\") " pod="openstack/neutron-metadata-openstack-openstack-networker-fqnz2" Nov 27 09:24:02 crc kubenswrapper[4971]: I1127 09:24:02.012655 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69e0000b-dec5-4186-85ac-b65c6091f4d8-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-networker-fqnz2\" (UID: \"69e0000b-dec5-4186-85ac-b65c6091f4d8\") " pod="openstack/neutron-metadata-openstack-openstack-networker-fqnz2" Nov 27 09:24:02 crc kubenswrapper[4971]: I1127 09:24:02.012700 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/69e0000b-dec5-4186-85ac-b65c6091f4d8-ssh-key\") pod \"neutron-metadata-openstack-openstack-networker-fqnz2\" (UID: \"69e0000b-dec5-4186-85ac-b65c6091f4d8\") " pod="openstack/neutron-metadata-openstack-openstack-networker-fqnz2" Nov 27 09:24:02 crc kubenswrapper[4971]: I1127 09:24:02.012790 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/69e0000b-dec5-4186-85ac-b65c6091f4d8-inventory\") pod \"neutron-metadata-openstack-openstack-networker-fqnz2\" (UID: \"69e0000b-dec5-4186-85ac-b65c6091f4d8\") " pod="openstack/neutron-metadata-openstack-openstack-networker-fqnz2" Nov 27 09:24:02 crc kubenswrapper[4971]: I1127 09:24:02.012818 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/69e0000b-dec5-4186-85ac-b65c6091f4d8-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-networker-fqnz2\" (UID: \"69e0000b-dec5-4186-85ac-b65c6091f4d8\") " pod="openstack/neutron-metadata-openstack-openstack-networker-fqnz2" Nov 27 09:24:02 crc kubenswrapper[4971]: I1127 09:24:02.447267 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/69e0000b-dec5-4186-85ac-b65c6091f4d8-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-networker-fqnz2\" (UID: \"69e0000b-dec5-4186-85ac-b65c6091f4d8\") " pod="openstack/neutron-metadata-openstack-openstack-networker-fqnz2" Nov 27 09:24:02 crc kubenswrapper[4971]: I1127 09:24:02.448142 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/69e0000b-dec5-4186-85ac-b65c6091f4d8-ssh-key\") pod \"neutron-metadata-openstack-openstack-networker-fqnz2\" (UID: \"69e0000b-dec5-4186-85ac-b65c6091f4d8\") " pod="openstack/neutron-metadata-openstack-openstack-networker-fqnz2" Nov 27 09:24:02 crc kubenswrapper[4971]: I1127 09:24:02.449076 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69e0000b-dec5-4186-85ac-b65c6091f4d8-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-networker-fqnz2\" (UID: \"69e0000b-dec5-4186-85ac-b65c6091f4d8\") " pod="openstack/neutron-metadata-openstack-openstack-networker-fqnz2" Nov 27 09:24:02 crc kubenswrapper[4971]: I1127 09:24:02.461733 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/69e0000b-dec5-4186-85ac-b65c6091f4d8-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-networker-fqnz2\" (UID: \"69e0000b-dec5-4186-85ac-b65c6091f4d8\") " pod="openstack/neutron-metadata-openstack-openstack-networker-fqnz2" Nov 27 09:24:02 crc kubenswrapper[4971]: I1127 09:24:02.462672 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/69e0000b-dec5-4186-85ac-b65c6091f4d8-inventory\") pod \"neutron-metadata-openstack-openstack-networker-fqnz2\" (UID: \"69e0000b-dec5-4186-85ac-b65c6091f4d8\") " pod="openstack/neutron-metadata-openstack-openstack-networker-fqnz2" Nov 27 09:24:02 crc kubenswrapper[4971]: I1127 09:24:02.468814 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rw57m\" (UniqueName: \"kubernetes.io/projected/69e0000b-dec5-4186-85ac-b65c6091f4d8-kube-api-access-rw57m\") pod \"neutron-metadata-openstack-openstack-networker-fqnz2\" (UID: \"69e0000b-dec5-4186-85ac-b65c6091f4d8\") " pod="openstack/neutron-metadata-openstack-openstack-networker-fqnz2" Nov 27 09:24:02 crc kubenswrapper[4971]: I1127 09:24:02.540757 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-networker-fqnz2" Nov 27 09:24:03 crc kubenswrapper[4971]: I1127 09:24:03.110432 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-openstack-openstack-networker-fqnz2"] Nov 27 09:24:04 crc kubenswrapper[4971]: I1127 09:24:04.084416 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-networker-fqnz2" event={"ID":"69e0000b-dec5-4186-85ac-b65c6091f4d8","Type":"ContainerStarted","Data":"a8ccbce1edcbc0aefe16ff94c4ce846f8c1eeb09e45912ea427893441a6719b2"} Nov 27 09:24:04 crc kubenswrapper[4971]: I1127 09:24:04.085016 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-networker-fqnz2" event={"ID":"69e0000b-dec5-4186-85ac-b65c6091f4d8","Type":"ContainerStarted","Data":"7406e192e8f48580b31402e656f80817ebaa5f81e6d6cd011a3fdceacdfa2860"} Nov 27 09:24:04 crc kubenswrapper[4971]: I1127 09:24:04.113486 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-openstack-openstack-networker-fqnz2" podStartSLOduration=3.5519267660000002 podStartE2EDuration="4.113466533s" podCreationTimestamp="2025-11-27 09:24:00 +0000 UTC" firstStartedPulling="2025-11-27 09:24:03.116775289 +0000 UTC m=+9081.308819207" lastFinishedPulling="2025-11-27 09:24:03.678315056 +0000 UTC m=+9081.870358974" observedRunningTime="2025-11-27 09:24:04.101098082 +0000 UTC m=+9082.293142010" watchObservedRunningTime="2025-11-27 09:24:04.113466533 +0000 UTC m=+9082.305510451" Nov 27 09:24:26 crc kubenswrapper[4971]: I1127 09:24:26.413282 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 09:24:26 crc kubenswrapper[4971]: I1127 09:24:26.414394 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 09:24:35 crc kubenswrapper[4971]: I1127 09:24:35.714257 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-8ffng"] Nov 27 09:24:35 crc kubenswrapper[4971]: I1127 09:24:35.717315 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8ffng" Nov 27 09:24:35 crc kubenswrapper[4971]: I1127 09:24:35.742594 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8ffng"] Nov 27 09:24:35 crc kubenswrapper[4971]: I1127 09:24:35.831074 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56f7e8a9-4db2-48bb-8032-0474b5096b91-catalog-content\") pod \"community-operators-8ffng\" (UID: \"56f7e8a9-4db2-48bb-8032-0474b5096b91\") " pod="openshift-marketplace/community-operators-8ffng" Nov 27 09:24:35 crc kubenswrapper[4971]: I1127 09:24:35.831183 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56f7e8a9-4db2-48bb-8032-0474b5096b91-utilities\") pod \"community-operators-8ffng\" (UID: \"56f7e8a9-4db2-48bb-8032-0474b5096b91\") " pod="openshift-marketplace/community-operators-8ffng" Nov 27 09:24:35 crc kubenswrapper[4971]: I1127 09:24:35.831441 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5rrt\" (UniqueName: \"kubernetes.io/projected/56f7e8a9-4db2-48bb-8032-0474b5096b91-kube-api-access-g5rrt\") pod \"community-operators-8ffng\" (UID: \"56f7e8a9-4db2-48bb-8032-0474b5096b91\") " pod="openshift-marketplace/community-operators-8ffng" Nov 27 09:24:35 crc kubenswrapper[4971]: I1127 09:24:35.933477 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56f7e8a9-4db2-48bb-8032-0474b5096b91-catalog-content\") pod \"community-operators-8ffng\" (UID: \"56f7e8a9-4db2-48bb-8032-0474b5096b91\") " pod="openshift-marketplace/community-operators-8ffng" Nov 27 09:24:35 crc kubenswrapper[4971]: I1127 09:24:35.933630 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56f7e8a9-4db2-48bb-8032-0474b5096b91-utilities\") pod \"community-operators-8ffng\" (UID: \"56f7e8a9-4db2-48bb-8032-0474b5096b91\") " pod="openshift-marketplace/community-operators-8ffng" Nov 27 09:24:35 crc kubenswrapper[4971]: I1127 09:24:35.933793 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5rrt\" (UniqueName: \"kubernetes.io/projected/56f7e8a9-4db2-48bb-8032-0474b5096b91-kube-api-access-g5rrt\") pod \"community-operators-8ffng\" (UID: \"56f7e8a9-4db2-48bb-8032-0474b5096b91\") " pod="openshift-marketplace/community-operators-8ffng" Nov 27 09:24:35 crc kubenswrapper[4971]: I1127 09:24:35.934154 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56f7e8a9-4db2-48bb-8032-0474b5096b91-catalog-content\") pod \"community-operators-8ffng\" (UID: \"56f7e8a9-4db2-48bb-8032-0474b5096b91\") " pod="openshift-marketplace/community-operators-8ffng" Nov 27 09:24:35 crc kubenswrapper[4971]: I1127 09:24:35.934189 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56f7e8a9-4db2-48bb-8032-0474b5096b91-utilities\") pod \"community-operators-8ffng\" (UID: \"56f7e8a9-4db2-48bb-8032-0474b5096b91\") " pod="openshift-marketplace/community-operators-8ffng" Nov 27 09:24:35 crc kubenswrapper[4971]: I1127 09:24:35.959130 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5rrt\" (UniqueName: \"kubernetes.io/projected/56f7e8a9-4db2-48bb-8032-0474b5096b91-kube-api-access-g5rrt\") pod \"community-operators-8ffng\" (UID: \"56f7e8a9-4db2-48bb-8032-0474b5096b91\") " pod="openshift-marketplace/community-operators-8ffng" Nov 27 09:24:36 crc kubenswrapper[4971]: I1127 09:24:36.047673 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8ffng" Nov 27 09:24:36 crc kubenswrapper[4971]: I1127 09:24:36.687582 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8ffng"] Nov 27 09:24:36 crc kubenswrapper[4971]: W1127 09:24:36.693784 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod56f7e8a9_4db2_48bb_8032_0474b5096b91.slice/crio-a9edae8576c90eed814591412358725c980cdc4a62912839f3d84079d39bdbdf WatchSource:0}: Error finding container a9edae8576c90eed814591412358725c980cdc4a62912839f3d84079d39bdbdf: Status 404 returned error can't find the container with id a9edae8576c90eed814591412358725c980cdc4a62912839f3d84079d39bdbdf Nov 27 09:24:37 crc kubenswrapper[4971]: I1127 09:24:37.463164 4971 generic.go:334] "Generic (PLEG): container finished" podID="56f7e8a9-4db2-48bb-8032-0474b5096b91" containerID="d77853b01d9e59abea98b1fb58a809d801c91613b43feb78951352d08adb1568" exitCode=0 Nov 27 09:24:37 crc kubenswrapper[4971]: I1127 09:24:37.463496 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8ffng" event={"ID":"56f7e8a9-4db2-48bb-8032-0474b5096b91","Type":"ContainerDied","Data":"d77853b01d9e59abea98b1fb58a809d801c91613b43feb78951352d08adb1568"} Nov 27 09:24:37 crc kubenswrapper[4971]: I1127 09:24:37.463668 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8ffng" event={"ID":"56f7e8a9-4db2-48bb-8032-0474b5096b91","Type":"ContainerStarted","Data":"a9edae8576c90eed814591412358725c980cdc4a62912839f3d84079d39bdbdf"} Nov 27 09:24:37 crc kubenswrapper[4971]: I1127 09:24:37.466578 4971 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 27 09:24:39 crc kubenswrapper[4971]: I1127 09:24:39.483783 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8ffng" event={"ID":"56f7e8a9-4db2-48bb-8032-0474b5096b91","Type":"ContainerStarted","Data":"8e750803e64e33c418899655277293f27023d367753ad3de792c5d866f93dc8c"} Nov 27 09:24:40 crc kubenswrapper[4971]: I1127 09:24:40.498055 4971 generic.go:334] "Generic (PLEG): container finished" podID="7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba" containerID="d0cc82ae4a433df62a2c62fb68daf0eb5657cd5abd360a68a788d3d305a69f2f" exitCode=0 Nov 27 09:24:40 crc kubenswrapper[4971]: I1127 09:24:40.498135 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-mpk56" event={"ID":"7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba","Type":"ContainerDied","Data":"d0cc82ae4a433df62a2c62fb68daf0eb5657cd5abd360a68a788d3d305a69f2f"} Nov 27 09:24:41 crc kubenswrapper[4971]: I1127 09:24:41.508306 4971 generic.go:334] "Generic (PLEG): container finished" podID="56f7e8a9-4db2-48bb-8032-0474b5096b91" containerID="8e750803e64e33c418899655277293f27023d367753ad3de792c5d866f93dc8c" exitCode=0 Nov 27 09:24:41 crc kubenswrapper[4971]: I1127 09:24:41.508382 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8ffng" event={"ID":"56f7e8a9-4db2-48bb-8032-0474b5096b91","Type":"ContainerDied","Data":"8e750803e64e33c418899655277293f27023d367753ad3de792c5d866f93dc8c"} Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.021250 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-mpk56" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.068795 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba-ssh-key\") pod \"7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba\" (UID: \"7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba\") " Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.068913 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba-ovncontroller-config-0\") pod \"7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba\" (UID: \"7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba\") " Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.069024 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba-inventory\") pod \"7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba\" (UID: \"7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba\") " Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.069080 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba-ovn-combined-ca-bundle\") pod \"7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba\" (UID: \"7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba\") " Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.069308 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba-ceph\") pod \"7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba\" (UID: \"7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba\") " Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.069356 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q24g2\" (UniqueName: \"kubernetes.io/projected/7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba-kube-api-access-q24g2\") pod \"7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba\" (UID: \"7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba\") " Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.075969 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba-ceph" (OuterVolumeSpecName: "ceph") pod "7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba" (UID: "7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.077585 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba" (UID: "7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.079895 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba-kube-api-access-q24g2" (OuterVolumeSpecName: "kube-api-access-q24g2") pod "7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba" (UID: "7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba"). InnerVolumeSpecName "kube-api-access-q24g2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.103893 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba-inventory" (OuterVolumeSpecName: "inventory") pod "7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba" (UID: "7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.104077 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba" (UID: "7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.110109 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba" (UID: "7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.171968 4971 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba-ceph\") on node \"crc\" DevicePath \"\"" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.172056 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q24g2\" (UniqueName: \"kubernetes.io/projected/7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba-kube-api-access-q24g2\") on node \"crc\" DevicePath \"\"" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.172073 4971 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.172086 4971 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.172098 4971 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba-inventory\") on node \"crc\" DevicePath \"\"" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.172114 4971 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.522054 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8ffng" event={"ID":"56f7e8a9-4db2-48bb-8032-0474b5096b91","Type":"ContainerStarted","Data":"cc267926704606d10ab9fb2e9be100e8944af3eca9d7f2f2d9324fb191af9fb4"} Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.524681 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-mpk56" event={"ID":"7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba","Type":"ContainerDied","Data":"6825e4b408f3022898e3e48e21b8fbc204e408489a4ccda768f359f8d2b7d9bf"} Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.524728 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6825e4b408f3022898e3e48e21b8fbc204e408489a4ccda768f359f8d2b7d9bf" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.524756 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-mpk56" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.564708 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-8ffng" podStartSLOduration=3.035656423 podStartE2EDuration="7.564676569s" podCreationTimestamp="2025-11-27 09:24:35 +0000 UTC" firstStartedPulling="2025-11-27 09:24:37.466287434 +0000 UTC m=+9115.658331362" lastFinishedPulling="2025-11-27 09:24:41.99530759 +0000 UTC m=+9120.187351508" observedRunningTime="2025-11-27 09:24:42.547464751 +0000 UTC m=+9120.739508679" watchObservedRunningTime="2025-11-27 09:24:42.564676569 +0000 UTC m=+9120.756720487" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.632624 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-openstack-openstack-cell1-mkwjr"] Nov 27 09:24:42 crc kubenswrapper[4971]: E1127 09:24:42.633163 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba" containerName="ovn-openstack-openstack-cell1" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.633187 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba" containerName="ovn-openstack-openstack-cell1" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.633465 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba" containerName="ovn-openstack-openstack-cell1" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.634360 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-mkwjr" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.637855 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.638035 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-jnkbm" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.649337 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-openstack-openstack-cell1-mkwjr"] Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.680409 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/3f8fa824-e9d3-469b-8dec-9edc6aa63271-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-mkwjr\" (UID: \"3f8fa824-e9d3-469b-8dec-9edc6aa63271\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mkwjr" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.680469 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3f8fa824-e9d3-469b-8dec-9edc6aa63271-inventory\") pod \"neutron-metadata-openstack-openstack-cell1-mkwjr\" (UID: \"3f8fa824-e9d3-469b-8dec-9edc6aa63271\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mkwjr" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.680497 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3f8fa824-e9d3-469b-8dec-9edc6aa63271-ceph\") pod \"neutron-metadata-openstack-openstack-cell1-mkwjr\" (UID: \"3f8fa824-e9d3-469b-8dec-9edc6aa63271\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mkwjr" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.680524 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f8fa824-e9d3-469b-8dec-9edc6aa63271-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-cell1-mkwjr\" (UID: \"3f8fa824-e9d3-469b-8dec-9edc6aa63271\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mkwjr" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.680676 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3f8fa824-e9d3-469b-8dec-9edc6aa63271-ssh-key\") pod \"neutron-metadata-openstack-openstack-cell1-mkwjr\" (UID: \"3f8fa824-e9d3-469b-8dec-9edc6aa63271\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mkwjr" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.680719 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9st4\" (UniqueName: \"kubernetes.io/projected/3f8fa824-e9d3-469b-8dec-9edc6aa63271-kube-api-access-c9st4\") pod \"neutron-metadata-openstack-openstack-cell1-mkwjr\" (UID: \"3f8fa824-e9d3-469b-8dec-9edc6aa63271\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mkwjr" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.680746 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/3f8fa824-e9d3-469b-8dec-9edc6aa63271-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-mkwjr\" (UID: \"3f8fa824-e9d3-469b-8dec-9edc6aa63271\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mkwjr" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.783111 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3f8fa824-e9d3-469b-8dec-9edc6aa63271-ssh-key\") pod \"neutron-metadata-openstack-openstack-cell1-mkwjr\" (UID: \"3f8fa824-e9d3-469b-8dec-9edc6aa63271\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mkwjr" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.783216 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9st4\" (UniqueName: \"kubernetes.io/projected/3f8fa824-e9d3-469b-8dec-9edc6aa63271-kube-api-access-c9st4\") pod \"neutron-metadata-openstack-openstack-cell1-mkwjr\" (UID: \"3f8fa824-e9d3-469b-8dec-9edc6aa63271\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mkwjr" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.783255 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/3f8fa824-e9d3-469b-8dec-9edc6aa63271-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-mkwjr\" (UID: \"3f8fa824-e9d3-469b-8dec-9edc6aa63271\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mkwjr" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.783370 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/3f8fa824-e9d3-469b-8dec-9edc6aa63271-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-mkwjr\" (UID: \"3f8fa824-e9d3-469b-8dec-9edc6aa63271\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mkwjr" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.783405 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3f8fa824-e9d3-469b-8dec-9edc6aa63271-inventory\") pod \"neutron-metadata-openstack-openstack-cell1-mkwjr\" (UID: \"3f8fa824-e9d3-469b-8dec-9edc6aa63271\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mkwjr" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.783450 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3f8fa824-e9d3-469b-8dec-9edc6aa63271-ceph\") pod \"neutron-metadata-openstack-openstack-cell1-mkwjr\" (UID: \"3f8fa824-e9d3-469b-8dec-9edc6aa63271\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mkwjr" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.783569 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f8fa824-e9d3-469b-8dec-9edc6aa63271-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-cell1-mkwjr\" (UID: \"3f8fa824-e9d3-469b-8dec-9edc6aa63271\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mkwjr" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.788476 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f8fa824-e9d3-469b-8dec-9edc6aa63271-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-cell1-mkwjr\" (UID: \"3f8fa824-e9d3-469b-8dec-9edc6aa63271\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mkwjr" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.790249 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/3f8fa824-e9d3-469b-8dec-9edc6aa63271-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-mkwjr\" (UID: \"3f8fa824-e9d3-469b-8dec-9edc6aa63271\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mkwjr" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.790260 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3f8fa824-e9d3-469b-8dec-9edc6aa63271-ssh-key\") pod \"neutron-metadata-openstack-openstack-cell1-mkwjr\" (UID: \"3f8fa824-e9d3-469b-8dec-9edc6aa63271\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mkwjr" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.791347 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/3f8fa824-e9d3-469b-8dec-9edc6aa63271-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-mkwjr\" (UID: \"3f8fa824-e9d3-469b-8dec-9edc6aa63271\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mkwjr" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.792493 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3f8fa824-e9d3-469b-8dec-9edc6aa63271-ceph\") pod \"neutron-metadata-openstack-openstack-cell1-mkwjr\" (UID: \"3f8fa824-e9d3-469b-8dec-9edc6aa63271\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mkwjr" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.796121 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3f8fa824-e9d3-469b-8dec-9edc6aa63271-inventory\") pod \"neutron-metadata-openstack-openstack-cell1-mkwjr\" (UID: \"3f8fa824-e9d3-469b-8dec-9edc6aa63271\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mkwjr" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.800940 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9st4\" (UniqueName: \"kubernetes.io/projected/3f8fa824-e9d3-469b-8dec-9edc6aa63271-kube-api-access-c9st4\") pod \"neutron-metadata-openstack-openstack-cell1-mkwjr\" (UID: \"3f8fa824-e9d3-469b-8dec-9edc6aa63271\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-mkwjr" Nov 27 09:24:42 crc kubenswrapper[4971]: I1127 09:24:42.958044 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-mkwjr" Nov 27 09:24:43 crc kubenswrapper[4971]: I1127 09:24:43.544491 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-openstack-openstack-cell1-mkwjr"] Nov 27 09:24:43 crc kubenswrapper[4971]: I1127 09:24:43.549427 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-mkwjr" event={"ID":"3f8fa824-e9d3-469b-8dec-9edc6aa63271","Type":"ContainerStarted","Data":"5be302a758da96f3cac909cdf9ef304bf228eccb7fb959b1d959087d4117492e"} Nov 27 09:24:45 crc kubenswrapper[4971]: I1127 09:24:45.574253 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-mkwjr" event={"ID":"3f8fa824-e9d3-469b-8dec-9edc6aa63271","Type":"ContainerStarted","Data":"4e16d7356bc2d8c7f0ff8e9ae93cf9a8af424e71be65e524751bf132afdda28e"} Nov 27 09:24:45 crc kubenswrapper[4971]: I1127 09:24:45.600193 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-openstack-openstack-cell1-mkwjr" podStartSLOduration=3.152461778 podStartE2EDuration="3.600167362s" podCreationTimestamp="2025-11-27 09:24:42 +0000 UTC" firstStartedPulling="2025-11-27 09:24:43.543008273 +0000 UTC m=+9121.735052191" lastFinishedPulling="2025-11-27 09:24:43.990713857 +0000 UTC m=+9122.182757775" observedRunningTime="2025-11-27 09:24:45.591029742 +0000 UTC m=+9123.783073680" watchObservedRunningTime="2025-11-27 09:24:45.600167362 +0000 UTC m=+9123.792211290" Nov 27 09:24:46 crc kubenswrapper[4971]: I1127 09:24:46.048133 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-8ffng" Nov 27 09:24:46 crc kubenswrapper[4971]: I1127 09:24:46.048222 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-8ffng" Nov 27 09:24:46 crc kubenswrapper[4971]: I1127 09:24:46.400969 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-8ffng" Nov 27 09:24:56 crc kubenswrapper[4971]: I1127 09:24:56.413219 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 09:24:56 crc kubenswrapper[4971]: I1127 09:24:56.414219 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 09:24:56 crc kubenswrapper[4971]: I1127 09:24:56.414298 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 09:24:56 crc kubenswrapper[4971]: I1127 09:24:56.416179 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"34cb8bf867d08698d2d333be44c85498b1aa2a1b0a6e242c6c9357ab60f2fb63"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 09:24:56 crc kubenswrapper[4971]: I1127 09:24:56.416266 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://34cb8bf867d08698d2d333be44c85498b1aa2a1b0a6e242c6c9357ab60f2fb63" gracePeriod=600 Nov 27 09:24:56 crc kubenswrapper[4971]: I1127 09:24:56.513162 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-8ffng" Nov 27 09:24:56 crc kubenswrapper[4971]: E1127 09:24:56.557405 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:24:56 crc kubenswrapper[4971]: I1127 09:24:56.588355 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8ffng"] Nov 27 09:24:56 crc kubenswrapper[4971]: I1127 09:24:56.695157 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="34cb8bf867d08698d2d333be44c85498b1aa2a1b0a6e242c6c9357ab60f2fb63" exitCode=0 Nov 27 09:24:56 crc kubenswrapper[4971]: I1127 09:24:56.695703 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-8ffng" podUID="56f7e8a9-4db2-48bb-8032-0474b5096b91" containerName="registry-server" containerID="cri-o://cc267926704606d10ab9fb2e9be100e8944af3eca9d7f2f2d9324fb191af9fb4" gracePeriod=2 Nov 27 09:24:56 crc kubenswrapper[4971]: I1127 09:24:56.695879 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"34cb8bf867d08698d2d333be44c85498b1aa2a1b0a6e242c6c9357ab60f2fb63"} Nov 27 09:24:56 crc kubenswrapper[4971]: I1127 09:24:56.695964 4971 scope.go:117] "RemoveContainer" containerID="74942ef67bc6f0078a96e89348238bb86066d83521cb4e5856153cafd601817d" Nov 27 09:24:56 crc kubenswrapper[4971]: I1127 09:24:56.696670 4971 scope.go:117] "RemoveContainer" containerID="34cb8bf867d08698d2d333be44c85498b1aa2a1b0a6e242c6c9357ab60f2fb63" Nov 27 09:24:56 crc kubenswrapper[4971]: E1127 09:24:56.696954 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:24:57 crc kubenswrapper[4971]: I1127 09:24:57.220979 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8ffng" Nov 27 09:24:57 crc kubenswrapper[4971]: I1127 09:24:57.402302 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56f7e8a9-4db2-48bb-8032-0474b5096b91-catalog-content\") pod \"56f7e8a9-4db2-48bb-8032-0474b5096b91\" (UID: \"56f7e8a9-4db2-48bb-8032-0474b5096b91\") " Nov 27 09:24:57 crc kubenswrapper[4971]: I1127 09:24:57.402492 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g5rrt\" (UniqueName: \"kubernetes.io/projected/56f7e8a9-4db2-48bb-8032-0474b5096b91-kube-api-access-g5rrt\") pod \"56f7e8a9-4db2-48bb-8032-0474b5096b91\" (UID: \"56f7e8a9-4db2-48bb-8032-0474b5096b91\") " Nov 27 09:24:57 crc kubenswrapper[4971]: I1127 09:24:57.402689 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56f7e8a9-4db2-48bb-8032-0474b5096b91-utilities\") pod \"56f7e8a9-4db2-48bb-8032-0474b5096b91\" (UID: \"56f7e8a9-4db2-48bb-8032-0474b5096b91\") " Nov 27 09:24:57 crc kubenswrapper[4971]: I1127 09:24:57.403609 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56f7e8a9-4db2-48bb-8032-0474b5096b91-utilities" (OuterVolumeSpecName: "utilities") pod "56f7e8a9-4db2-48bb-8032-0474b5096b91" (UID: "56f7e8a9-4db2-48bb-8032-0474b5096b91"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:24:57 crc kubenswrapper[4971]: I1127 09:24:57.411480 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56f7e8a9-4db2-48bb-8032-0474b5096b91-kube-api-access-g5rrt" (OuterVolumeSpecName: "kube-api-access-g5rrt") pod "56f7e8a9-4db2-48bb-8032-0474b5096b91" (UID: "56f7e8a9-4db2-48bb-8032-0474b5096b91"). InnerVolumeSpecName "kube-api-access-g5rrt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:24:57 crc kubenswrapper[4971]: I1127 09:24:57.466726 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56f7e8a9-4db2-48bb-8032-0474b5096b91-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "56f7e8a9-4db2-48bb-8032-0474b5096b91" (UID: "56f7e8a9-4db2-48bb-8032-0474b5096b91"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:24:57 crc kubenswrapper[4971]: I1127 09:24:57.505754 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56f7e8a9-4db2-48bb-8032-0474b5096b91-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 09:24:57 crc kubenswrapper[4971]: I1127 09:24:57.505806 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56f7e8a9-4db2-48bb-8032-0474b5096b91-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 09:24:57 crc kubenswrapper[4971]: I1127 09:24:57.505821 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g5rrt\" (UniqueName: \"kubernetes.io/projected/56f7e8a9-4db2-48bb-8032-0474b5096b91-kube-api-access-g5rrt\") on node \"crc\" DevicePath \"\"" Nov 27 09:24:57 crc kubenswrapper[4971]: I1127 09:24:57.711980 4971 generic.go:334] "Generic (PLEG): container finished" podID="56f7e8a9-4db2-48bb-8032-0474b5096b91" containerID="cc267926704606d10ab9fb2e9be100e8944af3eca9d7f2f2d9324fb191af9fb4" exitCode=0 Nov 27 09:24:57 crc kubenswrapper[4971]: I1127 09:24:57.712023 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8ffng" event={"ID":"56f7e8a9-4db2-48bb-8032-0474b5096b91","Type":"ContainerDied","Data":"cc267926704606d10ab9fb2e9be100e8944af3eca9d7f2f2d9324fb191af9fb4"} Nov 27 09:24:57 crc kubenswrapper[4971]: I1127 09:24:57.712056 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8ffng" event={"ID":"56f7e8a9-4db2-48bb-8032-0474b5096b91","Type":"ContainerDied","Data":"a9edae8576c90eed814591412358725c980cdc4a62912839f3d84079d39bdbdf"} Nov 27 09:24:57 crc kubenswrapper[4971]: I1127 09:24:57.712076 4971 scope.go:117] "RemoveContainer" containerID="cc267926704606d10ab9fb2e9be100e8944af3eca9d7f2f2d9324fb191af9fb4" Nov 27 09:24:57 crc kubenswrapper[4971]: I1127 09:24:57.712211 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8ffng" Nov 27 09:24:57 crc kubenswrapper[4971]: I1127 09:24:57.759234 4971 scope.go:117] "RemoveContainer" containerID="8e750803e64e33c418899655277293f27023d367753ad3de792c5d866f93dc8c" Nov 27 09:24:57 crc kubenswrapper[4971]: I1127 09:24:57.759973 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8ffng"] Nov 27 09:24:57 crc kubenswrapper[4971]: I1127 09:24:57.807482 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-8ffng"] Nov 27 09:24:57 crc kubenswrapper[4971]: I1127 09:24:57.807862 4971 scope.go:117] "RemoveContainer" containerID="d77853b01d9e59abea98b1fb58a809d801c91613b43feb78951352d08adb1568" Nov 27 09:24:57 crc kubenswrapper[4971]: I1127 09:24:57.844419 4971 scope.go:117] "RemoveContainer" containerID="cc267926704606d10ab9fb2e9be100e8944af3eca9d7f2f2d9324fb191af9fb4" Nov 27 09:24:57 crc kubenswrapper[4971]: E1127 09:24:57.844776 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc267926704606d10ab9fb2e9be100e8944af3eca9d7f2f2d9324fb191af9fb4\": container with ID starting with cc267926704606d10ab9fb2e9be100e8944af3eca9d7f2f2d9324fb191af9fb4 not found: ID does not exist" containerID="cc267926704606d10ab9fb2e9be100e8944af3eca9d7f2f2d9324fb191af9fb4" Nov 27 09:24:57 crc kubenswrapper[4971]: I1127 09:24:57.844818 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc267926704606d10ab9fb2e9be100e8944af3eca9d7f2f2d9324fb191af9fb4"} err="failed to get container status \"cc267926704606d10ab9fb2e9be100e8944af3eca9d7f2f2d9324fb191af9fb4\": rpc error: code = NotFound desc = could not find container \"cc267926704606d10ab9fb2e9be100e8944af3eca9d7f2f2d9324fb191af9fb4\": container with ID starting with cc267926704606d10ab9fb2e9be100e8944af3eca9d7f2f2d9324fb191af9fb4 not found: ID does not exist" Nov 27 09:24:57 crc kubenswrapper[4971]: I1127 09:24:57.844847 4971 scope.go:117] "RemoveContainer" containerID="8e750803e64e33c418899655277293f27023d367753ad3de792c5d866f93dc8c" Nov 27 09:24:57 crc kubenswrapper[4971]: E1127 09:24:57.845173 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e750803e64e33c418899655277293f27023d367753ad3de792c5d866f93dc8c\": container with ID starting with 8e750803e64e33c418899655277293f27023d367753ad3de792c5d866f93dc8c not found: ID does not exist" containerID="8e750803e64e33c418899655277293f27023d367753ad3de792c5d866f93dc8c" Nov 27 09:24:57 crc kubenswrapper[4971]: I1127 09:24:57.845199 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e750803e64e33c418899655277293f27023d367753ad3de792c5d866f93dc8c"} err="failed to get container status \"8e750803e64e33c418899655277293f27023d367753ad3de792c5d866f93dc8c\": rpc error: code = NotFound desc = could not find container \"8e750803e64e33c418899655277293f27023d367753ad3de792c5d866f93dc8c\": container with ID starting with 8e750803e64e33c418899655277293f27023d367753ad3de792c5d866f93dc8c not found: ID does not exist" Nov 27 09:24:57 crc kubenswrapper[4971]: I1127 09:24:57.845218 4971 scope.go:117] "RemoveContainer" containerID="d77853b01d9e59abea98b1fb58a809d801c91613b43feb78951352d08adb1568" Nov 27 09:24:57 crc kubenswrapper[4971]: E1127 09:24:57.845914 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d77853b01d9e59abea98b1fb58a809d801c91613b43feb78951352d08adb1568\": container with ID starting with d77853b01d9e59abea98b1fb58a809d801c91613b43feb78951352d08adb1568 not found: ID does not exist" containerID="d77853b01d9e59abea98b1fb58a809d801c91613b43feb78951352d08adb1568" Nov 27 09:24:57 crc kubenswrapper[4971]: I1127 09:24:57.845942 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d77853b01d9e59abea98b1fb58a809d801c91613b43feb78951352d08adb1568"} err="failed to get container status \"d77853b01d9e59abea98b1fb58a809d801c91613b43feb78951352d08adb1568\": rpc error: code = NotFound desc = could not find container \"d77853b01d9e59abea98b1fb58a809d801c91613b43feb78951352d08adb1568\": container with ID starting with d77853b01d9e59abea98b1fb58a809d801c91613b43feb78951352d08adb1568 not found: ID does not exist" Nov 27 09:24:58 crc kubenswrapper[4971]: I1127 09:24:58.564932 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56f7e8a9-4db2-48bb-8032-0474b5096b91" path="/var/lib/kubelet/pods/56f7e8a9-4db2-48bb-8032-0474b5096b91/volumes" Nov 27 09:25:08 crc kubenswrapper[4971]: I1127 09:25:08.551414 4971 scope.go:117] "RemoveContainer" containerID="34cb8bf867d08698d2d333be44c85498b1aa2a1b0a6e242c6c9357ab60f2fb63" Nov 27 09:25:08 crc kubenswrapper[4971]: E1127 09:25:08.552791 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:25:09 crc kubenswrapper[4971]: I1127 09:25:09.849661 4971 generic.go:334] "Generic (PLEG): container finished" podID="69e0000b-dec5-4186-85ac-b65c6091f4d8" containerID="a8ccbce1edcbc0aefe16ff94c4ce846f8c1eeb09e45912ea427893441a6719b2" exitCode=0 Nov 27 09:25:09 crc kubenswrapper[4971]: I1127 09:25:09.849732 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-networker-fqnz2" event={"ID":"69e0000b-dec5-4186-85ac-b65c6091f4d8","Type":"ContainerDied","Data":"a8ccbce1edcbc0aefe16ff94c4ce846f8c1eeb09e45912ea427893441a6719b2"} Nov 27 09:25:11 crc kubenswrapper[4971]: I1127 09:25:11.384073 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-networker-fqnz2" Nov 27 09:25:11 crc kubenswrapper[4971]: I1127 09:25:11.537788 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/69e0000b-dec5-4186-85ac-b65c6091f4d8-inventory\") pod \"69e0000b-dec5-4186-85ac-b65c6091f4d8\" (UID: \"69e0000b-dec5-4186-85ac-b65c6091f4d8\") " Nov 27 09:25:11 crc kubenswrapper[4971]: I1127 09:25:11.537954 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69e0000b-dec5-4186-85ac-b65c6091f4d8-neutron-metadata-combined-ca-bundle\") pod \"69e0000b-dec5-4186-85ac-b65c6091f4d8\" (UID: \"69e0000b-dec5-4186-85ac-b65c6091f4d8\") " Nov 27 09:25:11 crc kubenswrapper[4971]: I1127 09:25:11.538043 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/69e0000b-dec5-4186-85ac-b65c6091f4d8-neutron-ovn-metadata-agent-neutron-config-0\") pod \"69e0000b-dec5-4186-85ac-b65c6091f4d8\" (UID: \"69e0000b-dec5-4186-85ac-b65c6091f4d8\") " Nov 27 09:25:11 crc kubenswrapper[4971]: I1127 09:25:11.538148 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/69e0000b-dec5-4186-85ac-b65c6091f4d8-nova-metadata-neutron-config-0\") pod \"69e0000b-dec5-4186-85ac-b65c6091f4d8\" (UID: \"69e0000b-dec5-4186-85ac-b65c6091f4d8\") " Nov 27 09:25:11 crc kubenswrapper[4971]: I1127 09:25:11.538230 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/69e0000b-dec5-4186-85ac-b65c6091f4d8-ssh-key\") pod \"69e0000b-dec5-4186-85ac-b65c6091f4d8\" (UID: \"69e0000b-dec5-4186-85ac-b65c6091f4d8\") " Nov 27 09:25:11 crc kubenswrapper[4971]: I1127 09:25:11.538332 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rw57m\" (UniqueName: \"kubernetes.io/projected/69e0000b-dec5-4186-85ac-b65c6091f4d8-kube-api-access-rw57m\") pod \"69e0000b-dec5-4186-85ac-b65c6091f4d8\" (UID: \"69e0000b-dec5-4186-85ac-b65c6091f4d8\") " Nov 27 09:25:11 crc kubenswrapper[4971]: I1127 09:25:11.545909 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69e0000b-dec5-4186-85ac-b65c6091f4d8-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "69e0000b-dec5-4186-85ac-b65c6091f4d8" (UID: "69e0000b-dec5-4186-85ac-b65c6091f4d8"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:25:11 crc kubenswrapper[4971]: I1127 09:25:11.547059 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69e0000b-dec5-4186-85ac-b65c6091f4d8-kube-api-access-rw57m" (OuterVolumeSpecName: "kube-api-access-rw57m") pod "69e0000b-dec5-4186-85ac-b65c6091f4d8" (UID: "69e0000b-dec5-4186-85ac-b65c6091f4d8"). InnerVolumeSpecName "kube-api-access-rw57m". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:25:11 crc kubenswrapper[4971]: I1127 09:25:11.577603 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69e0000b-dec5-4186-85ac-b65c6091f4d8-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "69e0000b-dec5-4186-85ac-b65c6091f4d8" (UID: "69e0000b-dec5-4186-85ac-b65c6091f4d8"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:25:11 crc kubenswrapper[4971]: I1127 09:25:11.580076 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69e0000b-dec5-4186-85ac-b65c6091f4d8-inventory" (OuterVolumeSpecName: "inventory") pod "69e0000b-dec5-4186-85ac-b65c6091f4d8" (UID: "69e0000b-dec5-4186-85ac-b65c6091f4d8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:25:11 crc kubenswrapper[4971]: I1127 09:25:11.580205 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69e0000b-dec5-4186-85ac-b65c6091f4d8-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "69e0000b-dec5-4186-85ac-b65c6091f4d8" (UID: "69e0000b-dec5-4186-85ac-b65c6091f4d8"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:25:11 crc kubenswrapper[4971]: I1127 09:25:11.587324 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69e0000b-dec5-4186-85ac-b65c6091f4d8-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "69e0000b-dec5-4186-85ac-b65c6091f4d8" (UID: "69e0000b-dec5-4186-85ac-b65c6091f4d8"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:25:11 crc kubenswrapper[4971]: I1127 09:25:11.641836 4971 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/69e0000b-dec5-4186-85ac-b65c6091f4d8-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 27 09:25:11 crc kubenswrapper[4971]: I1127 09:25:11.641881 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rw57m\" (UniqueName: \"kubernetes.io/projected/69e0000b-dec5-4186-85ac-b65c6091f4d8-kube-api-access-rw57m\") on node \"crc\" DevicePath \"\"" Nov 27 09:25:11 crc kubenswrapper[4971]: I1127 09:25:11.641895 4971 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/69e0000b-dec5-4186-85ac-b65c6091f4d8-inventory\") on node \"crc\" DevicePath \"\"" Nov 27 09:25:11 crc kubenswrapper[4971]: I1127 09:25:11.641907 4971 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69e0000b-dec5-4186-85ac-b65c6091f4d8-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:25:11 crc kubenswrapper[4971]: I1127 09:25:11.641920 4971 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/69e0000b-dec5-4186-85ac-b65c6091f4d8-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Nov 27 09:25:11 crc kubenswrapper[4971]: I1127 09:25:11.641930 4971 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/69e0000b-dec5-4186-85ac-b65c6091f4d8-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Nov 27 09:25:11 crc kubenswrapper[4971]: I1127 09:25:11.877177 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-networker-fqnz2" event={"ID":"69e0000b-dec5-4186-85ac-b65c6091f4d8","Type":"ContainerDied","Data":"7406e192e8f48580b31402e656f80817ebaa5f81e6d6cd011a3fdceacdfa2860"} Nov 27 09:25:11 crc kubenswrapper[4971]: I1127 09:25:11.877495 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7406e192e8f48580b31402e656f80817ebaa5f81e6d6cd011a3fdceacdfa2860" Nov 27 09:25:11 crc kubenswrapper[4971]: I1127 09:25:11.877598 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-networker-fqnz2" Nov 27 09:25:23 crc kubenswrapper[4971]: I1127 09:25:23.551632 4971 scope.go:117] "RemoveContainer" containerID="34cb8bf867d08698d2d333be44c85498b1aa2a1b0a6e242c6c9357ab60f2fb63" Nov 27 09:25:23 crc kubenswrapper[4971]: E1127 09:25:23.552515 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:25:37 crc kubenswrapper[4971]: I1127 09:25:37.550211 4971 scope.go:117] "RemoveContainer" containerID="34cb8bf867d08698d2d333be44c85498b1aa2a1b0a6e242c6c9357ab60f2fb63" Nov 27 09:25:37 crc kubenswrapper[4971]: E1127 09:25:37.551375 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:25:45 crc kubenswrapper[4971]: I1127 09:25:45.290610 4971 generic.go:334] "Generic (PLEG): container finished" podID="3f8fa824-e9d3-469b-8dec-9edc6aa63271" containerID="4e16d7356bc2d8c7f0ff8e9ae93cf9a8af424e71be65e524751bf132afdda28e" exitCode=0 Nov 27 09:25:45 crc kubenswrapper[4971]: I1127 09:25:45.290701 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-mkwjr" event={"ID":"3f8fa824-e9d3-469b-8dec-9edc6aa63271","Type":"ContainerDied","Data":"4e16d7356bc2d8c7f0ff8e9ae93cf9a8af424e71be65e524751bf132afdda28e"} Nov 27 09:25:46 crc kubenswrapper[4971]: I1127 09:25:46.887349 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-mkwjr" Nov 27 09:25:46 crc kubenswrapper[4971]: I1127 09:25:46.918262 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3f8fa824-e9d3-469b-8dec-9edc6aa63271-ssh-key\") pod \"3f8fa824-e9d3-469b-8dec-9edc6aa63271\" (UID: \"3f8fa824-e9d3-469b-8dec-9edc6aa63271\") " Nov 27 09:25:46 crc kubenswrapper[4971]: I1127 09:25:46.918350 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3f8fa824-e9d3-469b-8dec-9edc6aa63271-inventory\") pod \"3f8fa824-e9d3-469b-8dec-9edc6aa63271\" (UID: \"3f8fa824-e9d3-469b-8dec-9edc6aa63271\") " Nov 27 09:25:46 crc kubenswrapper[4971]: I1127 09:25:46.918401 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f8fa824-e9d3-469b-8dec-9edc6aa63271-neutron-metadata-combined-ca-bundle\") pod \"3f8fa824-e9d3-469b-8dec-9edc6aa63271\" (UID: \"3f8fa824-e9d3-469b-8dec-9edc6aa63271\") " Nov 27 09:25:46 crc kubenswrapper[4971]: I1127 09:25:46.918522 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/3f8fa824-e9d3-469b-8dec-9edc6aa63271-nova-metadata-neutron-config-0\") pod \"3f8fa824-e9d3-469b-8dec-9edc6aa63271\" (UID: \"3f8fa824-e9d3-469b-8dec-9edc6aa63271\") " Nov 27 09:25:46 crc kubenswrapper[4971]: I1127 09:25:46.918589 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c9st4\" (UniqueName: \"kubernetes.io/projected/3f8fa824-e9d3-469b-8dec-9edc6aa63271-kube-api-access-c9st4\") pod \"3f8fa824-e9d3-469b-8dec-9edc6aa63271\" (UID: \"3f8fa824-e9d3-469b-8dec-9edc6aa63271\") " Nov 27 09:25:46 crc kubenswrapper[4971]: I1127 09:25:46.918647 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3f8fa824-e9d3-469b-8dec-9edc6aa63271-ceph\") pod \"3f8fa824-e9d3-469b-8dec-9edc6aa63271\" (UID: \"3f8fa824-e9d3-469b-8dec-9edc6aa63271\") " Nov 27 09:25:46 crc kubenswrapper[4971]: I1127 09:25:46.918698 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/3f8fa824-e9d3-469b-8dec-9edc6aa63271-neutron-ovn-metadata-agent-neutron-config-0\") pod \"3f8fa824-e9d3-469b-8dec-9edc6aa63271\" (UID: \"3f8fa824-e9d3-469b-8dec-9edc6aa63271\") " Nov 27 09:25:46 crc kubenswrapper[4971]: I1127 09:25:46.926241 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f8fa824-e9d3-469b-8dec-9edc6aa63271-ceph" (OuterVolumeSpecName: "ceph") pod "3f8fa824-e9d3-469b-8dec-9edc6aa63271" (UID: "3f8fa824-e9d3-469b-8dec-9edc6aa63271"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:25:46 crc kubenswrapper[4971]: I1127 09:25:46.930184 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f8fa824-e9d3-469b-8dec-9edc6aa63271-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "3f8fa824-e9d3-469b-8dec-9edc6aa63271" (UID: "3f8fa824-e9d3-469b-8dec-9edc6aa63271"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:25:46 crc kubenswrapper[4971]: I1127 09:25:46.933128 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f8fa824-e9d3-469b-8dec-9edc6aa63271-kube-api-access-c9st4" (OuterVolumeSpecName: "kube-api-access-c9st4") pod "3f8fa824-e9d3-469b-8dec-9edc6aa63271" (UID: "3f8fa824-e9d3-469b-8dec-9edc6aa63271"). InnerVolumeSpecName "kube-api-access-c9st4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:25:46 crc kubenswrapper[4971]: I1127 09:25:46.959438 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f8fa824-e9d3-469b-8dec-9edc6aa63271-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "3f8fa824-e9d3-469b-8dec-9edc6aa63271" (UID: "3f8fa824-e9d3-469b-8dec-9edc6aa63271"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:25:46 crc kubenswrapper[4971]: I1127 09:25:46.962296 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f8fa824-e9d3-469b-8dec-9edc6aa63271-inventory" (OuterVolumeSpecName: "inventory") pod "3f8fa824-e9d3-469b-8dec-9edc6aa63271" (UID: "3f8fa824-e9d3-469b-8dec-9edc6aa63271"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:25:46 crc kubenswrapper[4971]: I1127 09:25:46.971118 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f8fa824-e9d3-469b-8dec-9edc6aa63271-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "3f8fa824-e9d3-469b-8dec-9edc6aa63271" (UID: "3f8fa824-e9d3-469b-8dec-9edc6aa63271"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:25:46 crc kubenswrapper[4971]: I1127 09:25:46.982207 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f8fa824-e9d3-469b-8dec-9edc6aa63271-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3f8fa824-e9d3-469b-8dec-9edc6aa63271" (UID: "3f8fa824-e9d3-469b-8dec-9edc6aa63271"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.022519 4971 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3f8fa824-e9d3-469b-8dec-9edc6aa63271-inventory\") on node \"crc\" DevicePath \"\"" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.022910 4971 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f8fa824-e9d3-469b-8dec-9edc6aa63271-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.022983 4971 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/3f8fa824-e9d3-469b-8dec-9edc6aa63271-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.023248 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c9st4\" (UniqueName: \"kubernetes.io/projected/3f8fa824-e9d3-469b-8dec-9edc6aa63271-kube-api-access-c9st4\") on node \"crc\" DevicePath \"\"" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.023307 4971 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3f8fa824-e9d3-469b-8dec-9edc6aa63271-ceph\") on node \"crc\" DevicePath \"\"" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.023366 4971 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/3f8fa824-e9d3-469b-8dec-9edc6aa63271-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.023422 4971 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3f8fa824-e9d3-469b-8dec-9edc6aa63271-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.319740 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-mkwjr" event={"ID":"3f8fa824-e9d3-469b-8dec-9edc6aa63271","Type":"ContainerDied","Data":"5be302a758da96f3cac909cdf9ef304bf228eccb7fb959b1d959087d4117492e"} Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.319805 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5be302a758da96f3cac909cdf9ef304bf228eccb7fb959b1d959087d4117492e" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.319815 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-mkwjr" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.432830 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-openstack-openstack-cell1-hhpn7"] Nov 27 09:25:47 crc kubenswrapper[4971]: E1127 09:25:47.433473 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56f7e8a9-4db2-48bb-8032-0474b5096b91" containerName="registry-server" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.433498 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="56f7e8a9-4db2-48bb-8032-0474b5096b91" containerName="registry-server" Nov 27 09:25:47 crc kubenswrapper[4971]: E1127 09:25:47.433573 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56f7e8a9-4db2-48bb-8032-0474b5096b91" containerName="extract-content" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.433581 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="56f7e8a9-4db2-48bb-8032-0474b5096b91" containerName="extract-content" Nov 27 09:25:47 crc kubenswrapper[4971]: E1127 09:25:47.433593 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f8fa824-e9d3-469b-8dec-9edc6aa63271" containerName="neutron-metadata-openstack-openstack-cell1" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.433601 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f8fa824-e9d3-469b-8dec-9edc6aa63271" containerName="neutron-metadata-openstack-openstack-cell1" Nov 27 09:25:47 crc kubenswrapper[4971]: E1127 09:25:47.433612 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69e0000b-dec5-4186-85ac-b65c6091f4d8" containerName="neutron-metadata-openstack-openstack-networker" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.433621 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="69e0000b-dec5-4186-85ac-b65c6091f4d8" containerName="neutron-metadata-openstack-openstack-networker" Nov 27 09:25:47 crc kubenswrapper[4971]: E1127 09:25:47.433647 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56f7e8a9-4db2-48bb-8032-0474b5096b91" containerName="extract-utilities" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.433653 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="56f7e8a9-4db2-48bb-8032-0474b5096b91" containerName="extract-utilities" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.433874 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="69e0000b-dec5-4186-85ac-b65c6091f4d8" containerName="neutron-metadata-openstack-openstack-networker" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.433889 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f8fa824-e9d3-469b-8dec-9edc6aa63271" containerName="neutron-metadata-openstack-openstack-cell1" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.433905 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="56f7e8a9-4db2-48bb-8032-0474b5096b91" containerName="registry-server" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.434957 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-hhpn7" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.438857 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.439526 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-jnkbm" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.441026 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.441053 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.441522 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.461991 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-openstack-openstack-cell1-hhpn7"] Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.537440 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/780772cd-49b3-471d-b7a6-cdbde5c28c7a-inventory\") pod \"libvirt-openstack-openstack-cell1-hhpn7\" (UID: \"780772cd-49b3-471d-b7a6-cdbde5c28c7a\") " pod="openstack/libvirt-openstack-openstack-cell1-hhpn7" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.537893 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/780772cd-49b3-471d-b7a6-cdbde5c28c7a-libvirt-combined-ca-bundle\") pod \"libvirt-openstack-openstack-cell1-hhpn7\" (UID: \"780772cd-49b3-471d-b7a6-cdbde5c28c7a\") " pod="openstack/libvirt-openstack-openstack-cell1-hhpn7" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.538040 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/780772cd-49b3-471d-b7a6-cdbde5c28c7a-libvirt-secret-0\") pod \"libvirt-openstack-openstack-cell1-hhpn7\" (UID: \"780772cd-49b3-471d-b7a6-cdbde5c28c7a\") " pod="openstack/libvirt-openstack-openstack-cell1-hhpn7" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.538169 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2ngz\" (UniqueName: \"kubernetes.io/projected/780772cd-49b3-471d-b7a6-cdbde5c28c7a-kube-api-access-q2ngz\") pod \"libvirt-openstack-openstack-cell1-hhpn7\" (UID: \"780772cd-49b3-471d-b7a6-cdbde5c28c7a\") " pod="openstack/libvirt-openstack-openstack-cell1-hhpn7" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.538422 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/780772cd-49b3-471d-b7a6-cdbde5c28c7a-ceph\") pod \"libvirt-openstack-openstack-cell1-hhpn7\" (UID: \"780772cd-49b3-471d-b7a6-cdbde5c28c7a\") " pod="openstack/libvirt-openstack-openstack-cell1-hhpn7" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.538628 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/780772cd-49b3-471d-b7a6-cdbde5c28c7a-ssh-key\") pod \"libvirt-openstack-openstack-cell1-hhpn7\" (UID: \"780772cd-49b3-471d-b7a6-cdbde5c28c7a\") " pod="openstack/libvirt-openstack-openstack-cell1-hhpn7" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.640058 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/780772cd-49b3-471d-b7a6-cdbde5c28c7a-inventory\") pod \"libvirt-openstack-openstack-cell1-hhpn7\" (UID: \"780772cd-49b3-471d-b7a6-cdbde5c28c7a\") " pod="openstack/libvirt-openstack-openstack-cell1-hhpn7" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.640157 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/780772cd-49b3-471d-b7a6-cdbde5c28c7a-libvirt-combined-ca-bundle\") pod \"libvirt-openstack-openstack-cell1-hhpn7\" (UID: \"780772cd-49b3-471d-b7a6-cdbde5c28c7a\") " pod="openstack/libvirt-openstack-openstack-cell1-hhpn7" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.640197 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/780772cd-49b3-471d-b7a6-cdbde5c28c7a-libvirt-secret-0\") pod \"libvirt-openstack-openstack-cell1-hhpn7\" (UID: \"780772cd-49b3-471d-b7a6-cdbde5c28c7a\") " pod="openstack/libvirt-openstack-openstack-cell1-hhpn7" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.640252 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2ngz\" (UniqueName: \"kubernetes.io/projected/780772cd-49b3-471d-b7a6-cdbde5c28c7a-kube-api-access-q2ngz\") pod \"libvirt-openstack-openstack-cell1-hhpn7\" (UID: \"780772cd-49b3-471d-b7a6-cdbde5c28c7a\") " pod="openstack/libvirt-openstack-openstack-cell1-hhpn7" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.640322 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/780772cd-49b3-471d-b7a6-cdbde5c28c7a-ceph\") pod \"libvirt-openstack-openstack-cell1-hhpn7\" (UID: \"780772cd-49b3-471d-b7a6-cdbde5c28c7a\") " pod="openstack/libvirt-openstack-openstack-cell1-hhpn7" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.640368 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/780772cd-49b3-471d-b7a6-cdbde5c28c7a-ssh-key\") pod \"libvirt-openstack-openstack-cell1-hhpn7\" (UID: \"780772cd-49b3-471d-b7a6-cdbde5c28c7a\") " pod="openstack/libvirt-openstack-openstack-cell1-hhpn7" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.646311 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/780772cd-49b3-471d-b7a6-cdbde5c28c7a-libvirt-secret-0\") pod \"libvirt-openstack-openstack-cell1-hhpn7\" (UID: \"780772cd-49b3-471d-b7a6-cdbde5c28c7a\") " pod="openstack/libvirt-openstack-openstack-cell1-hhpn7" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.647195 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/780772cd-49b3-471d-b7a6-cdbde5c28c7a-ceph\") pod \"libvirt-openstack-openstack-cell1-hhpn7\" (UID: \"780772cd-49b3-471d-b7a6-cdbde5c28c7a\") " pod="openstack/libvirt-openstack-openstack-cell1-hhpn7" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.648001 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/780772cd-49b3-471d-b7a6-cdbde5c28c7a-ssh-key\") pod \"libvirt-openstack-openstack-cell1-hhpn7\" (UID: \"780772cd-49b3-471d-b7a6-cdbde5c28c7a\") " pod="openstack/libvirt-openstack-openstack-cell1-hhpn7" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.648394 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/780772cd-49b3-471d-b7a6-cdbde5c28c7a-inventory\") pod \"libvirt-openstack-openstack-cell1-hhpn7\" (UID: \"780772cd-49b3-471d-b7a6-cdbde5c28c7a\") " pod="openstack/libvirt-openstack-openstack-cell1-hhpn7" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.648521 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/780772cd-49b3-471d-b7a6-cdbde5c28c7a-libvirt-combined-ca-bundle\") pod \"libvirt-openstack-openstack-cell1-hhpn7\" (UID: \"780772cd-49b3-471d-b7a6-cdbde5c28c7a\") " pod="openstack/libvirt-openstack-openstack-cell1-hhpn7" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.663375 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2ngz\" (UniqueName: \"kubernetes.io/projected/780772cd-49b3-471d-b7a6-cdbde5c28c7a-kube-api-access-q2ngz\") pod \"libvirt-openstack-openstack-cell1-hhpn7\" (UID: \"780772cd-49b3-471d-b7a6-cdbde5c28c7a\") " pod="openstack/libvirt-openstack-openstack-cell1-hhpn7" Nov 27 09:25:47 crc kubenswrapper[4971]: I1127 09:25:47.764027 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-hhpn7" Nov 27 09:25:48 crc kubenswrapper[4971]: I1127 09:25:48.372818 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-openstack-openstack-cell1-hhpn7"] Nov 27 09:25:48 crc kubenswrapper[4971]: I1127 09:25:48.551073 4971 scope.go:117] "RemoveContainer" containerID="34cb8bf867d08698d2d333be44c85498b1aa2a1b0a6e242c6c9357ab60f2fb63" Nov 27 09:25:48 crc kubenswrapper[4971]: E1127 09:25:48.551749 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:25:49 crc kubenswrapper[4971]: I1127 09:25:49.344880 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-hhpn7" event={"ID":"780772cd-49b3-471d-b7a6-cdbde5c28c7a","Type":"ContainerStarted","Data":"770811c78b6be84ee6f16c0f6c8b66ae68fcbb3d54208a7e1692f2460890c115"} Nov 27 09:25:50 crc kubenswrapper[4971]: I1127 09:25:50.356870 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-hhpn7" event={"ID":"780772cd-49b3-471d-b7a6-cdbde5c28c7a","Type":"ContainerStarted","Data":"83746bf3e172651d3a4195169fee15c919cc61a386b437d22d421b06ccf2cdaa"} Nov 27 09:25:50 crc kubenswrapper[4971]: I1127 09:25:50.389291 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-openstack-openstack-cell1-hhpn7" podStartSLOduration=2.3623289720000002 podStartE2EDuration="3.389270175s" podCreationTimestamp="2025-11-27 09:25:47 +0000 UTC" firstStartedPulling="2025-11-27 09:25:48.376579039 +0000 UTC m=+9186.568622957" lastFinishedPulling="2025-11-27 09:25:49.403520242 +0000 UTC m=+9187.595564160" observedRunningTime="2025-11-27 09:25:50.383045488 +0000 UTC m=+9188.575089416" watchObservedRunningTime="2025-11-27 09:25:50.389270175 +0000 UTC m=+9188.581314083" Nov 27 09:25:59 crc kubenswrapper[4971]: I1127 09:25:59.550794 4971 scope.go:117] "RemoveContainer" containerID="34cb8bf867d08698d2d333be44c85498b1aa2a1b0a6e242c6c9357ab60f2fb63" Nov 27 09:25:59 crc kubenswrapper[4971]: E1127 09:25:59.552734 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:26:12 crc kubenswrapper[4971]: I1127 09:26:12.558719 4971 scope.go:117] "RemoveContainer" containerID="34cb8bf867d08698d2d333be44c85498b1aa2a1b0a6e242c6c9357ab60f2fb63" Nov 27 09:26:12 crc kubenswrapper[4971]: E1127 09:26:12.559610 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:26:26 crc kubenswrapper[4971]: I1127 09:26:26.550214 4971 scope.go:117] "RemoveContainer" containerID="34cb8bf867d08698d2d333be44c85498b1aa2a1b0a6e242c6c9357ab60f2fb63" Nov 27 09:26:26 crc kubenswrapper[4971]: E1127 09:26:26.551077 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:26:40 crc kubenswrapper[4971]: I1127 09:26:40.550727 4971 scope.go:117] "RemoveContainer" containerID="34cb8bf867d08698d2d333be44c85498b1aa2a1b0a6e242c6c9357ab60f2fb63" Nov 27 09:26:40 crc kubenswrapper[4971]: E1127 09:26:40.551731 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:26:52 crc kubenswrapper[4971]: I1127 09:26:52.551504 4971 scope.go:117] "RemoveContainer" containerID="34cb8bf867d08698d2d333be44c85498b1aa2a1b0a6e242c6c9357ab60f2fb63" Nov 27 09:26:52 crc kubenswrapper[4971]: E1127 09:26:52.552475 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:27:07 crc kubenswrapper[4971]: I1127 09:27:07.550959 4971 scope.go:117] "RemoveContainer" containerID="34cb8bf867d08698d2d333be44c85498b1aa2a1b0a6e242c6c9357ab60f2fb63" Nov 27 09:27:07 crc kubenswrapper[4971]: E1127 09:27:07.552032 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:27:19 crc kubenswrapper[4971]: I1127 09:27:19.553237 4971 scope.go:117] "RemoveContainer" containerID="34cb8bf867d08698d2d333be44c85498b1aa2a1b0a6e242c6c9357ab60f2fb63" Nov 27 09:27:19 crc kubenswrapper[4971]: E1127 09:27:19.555118 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:27:34 crc kubenswrapper[4971]: I1127 09:27:34.551407 4971 scope.go:117] "RemoveContainer" containerID="34cb8bf867d08698d2d333be44c85498b1aa2a1b0a6e242c6c9357ab60f2fb63" Nov 27 09:27:34 crc kubenswrapper[4971]: E1127 09:27:34.552432 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:27:49 crc kubenswrapper[4971]: I1127 09:27:49.550335 4971 scope.go:117] "RemoveContainer" containerID="34cb8bf867d08698d2d333be44c85498b1aa2a1b0a6e242c6c9357ab60f2fb63" Nov 27 09:27:49 crc kubenswrapper[4971]: E1127 09:27:49.551136 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:28:04 crc kubenswrapper[4971]: I1127 09:28:04.554065 4971 scope.go:117] "RemoveContainer" containerID="34cb8bf867d08698d2d333be44c85498b1aa2a1b0a6e242c6c9357ab60f2fb63" Nov 27 09:28:04 crc kubenswrapper[4971]: E1127 09:28:04.566867 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:28:17 crc kubenswrapper[4971]: I1127 09:28:17.551442 4971 scope.go:117] "RemoveContainer" containerID="34cb8bf867d08698d2d333be44c85498b1aa2a1b0a6e242c6c9357ab60f2fb63" Nov 27 09:28:17 crc kubenswrapper[4971]: E1127 09:28:17.553222 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:28:32 crc kubenswrapper[4971]: I1127 09:28:32.559295 4971 scope.go:117] "RemoveContainer" containerID="34cb8bf867d08698d2d333be44c85498b1aa2a1b0a6e242c6c9357ab60f2fb63" Nov 27 09:28:32 crc kubenswrapper[4971]: E1127 09:28:32.560206 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:28:47 crc kubenswrapper[4971]: I1127 09:28:47.551023 4971 scope.go:117] "RemoveContainer" containerID="34cb8bf867d08698d2d333be44c85498b1aa2a1b0a6e242c6c9357ab60f2fb63" Nov 27 09:28:47 crc kubenswrapper[4971]: E1127 09:28:47.552043 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:29:02 crc kubenswrapper[4971]: I1127 09:29:02.550701 4971 scope.go:117] "RemoveContainer" containerID="34cb8bf867d08698d2d333be44c85498b1aa2a1b0a6e242c6c9357ab60f2fb63" Nov 27 09:29:02 crc kubenswrapper[4971]: E1127 09:29:02.551748 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:29:11 crc kubenswrapper[4971]: I1127 09:29:11.593353 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-rml6l"] Nov 27 09:29:11 crc kubenswrapper[4971]: I1127 09:29:11.597106 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rml6l" Nov 27 09:29:11 crc kubenswrapper[4971]: I1127 09:29:11.631989 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rml6l"] Nov 27 09:29:11 crc kubenswrapper[4971]: I1127 09:29:11.707441 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ccdc0e9-1132-4ee5-892d-fe1e9baddc32-catalog-content\") pod \"certified-operators-rml6l\" (UID: \"0ccdc0e9-1132-4ee5-892d-fe1e9baddc32\") " pod="openshift-marketplace/certified-operators-rml6l" Nov 27 09:29:11 crc kubenswrapper[4971]: I1127 09:29:11.707597 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t2rsb\" (UniqueName: \"kubernetes.io/projected/0ccdc0e9-1132-4ee5-892d-fe1e9baddc32-kube-api-access-t2rsb\") pod \"certified-operators-rml6l\" (UID: \"0ccdc0e9-1132-4ee5-892d-fe1e9baddc32\") " pod="openshift-marketplace/certified-operators-rml6l" Nov 27 09:29:11 crc kubenswrapper[4971]: I1127 09:29:11.707700 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ccdc0e9-1132-4ee5-892d-fe1e9baddc32-utilities\") pod \"certified-operators-rml6l\" (UID: \"0ccdc0e9-1132-4ee5-892d-fe1e9baddc32\") " pod="openshift-marketplace/certified-operators-rml6l" Nov 27 09:29:11 crc kubenswrapper[4971]: I1127 09:29:11.809361 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t2rsb\" (UniqueName: \"kubernetes.io/projected/0ccdc0e9-1132-4ee5-892d-fe1e9baddc32-kube-api-access-t2rsb\") pod \"certified-operators-rml6l\" (UID: \"0ccdc0e9-1132-4ee5-892d-fe1e9baddc32\") " pod="openshift-marketplace/certified-operators-rml6l" Nov 27 09:29:11 crc kubenswrapper[4971]: I1127 09:29:11.809517 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ccdc0e9-1132-4ee5-892d-fe1e9baddc32-utilities\") pod \"certified-operators-rml6l\" (UID: \"0ccdc0e9-1132-4ee5-892d-fe1e9baddc32\") " pod="openshift-marketplace/certified-operators-rml6l" Nov 27 09:29:11 crc kubenswrapper[4971]: I1127 09:29:11.809601 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ccdc0e9-1132-4ee5-892d-fe1e9baddc32-catalog-content\") pod \"certified-operators-rml6l\" (UID: \"0ccdc0e9-1132-4ee5-892d-fe1e9baddc32\") " pod="openshift-marketplace/certified-operators-rml6l" Nov 27 09:29:11 crc kubenswrapper[4971]: I1127 09:29:11.810227 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ccdc0e9-1132-4ee5-892d-fe1e9baddc32-catalog-content\") pod \"certified-operators-rml6l\" (UID: \"0ccdc0e9-1132-4ee5-892d-fe1e9baddc32\") " pod="openshift-marketplace/certified-operators-rml6l" Nov 27 09:29:11 crc kubenswrapper[4971]: I1127 09:29:11.810228 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ccdc0e9-1132-4ee5-892d-fe1e9baddc32-utilities\") pod \"certified-operators-rml6l\" (UID: \"0ccdc0e9-1132-4ee5-892d-fe1e9baddc32\") " pod="openshift-marketplace/certified-operators-rml6l" Nov 27 09:29:11 crc kubenswrapper[4971]: I1127 09:29:11.832144 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t2rsb\" (UniqueName: \"kubernetes.io/projected/0ccdc0e9-1132-4ee5-892d-fe1e9baddc32-kube-api-access-t2rsb\") pod \"certified-operators-rml6l\" (UID: \"0ccdc0e9-1132-4ee5-892d-fe1e9baddc32\") " pod="openshift-marketplace/certified-operators-rml6l" Nov 27 09:29:11 crc kubenswrapper[4971]: I1127 09:29:11.934389 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rml6l" Nov 27 09:29:12 crc kubenswrapper[4971]: I1127 09:29:12.523828 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rml6l"] Nov 27 09:29:12 crc kubenswrapper[4971]: I1127 09:29:12.674907 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rml6l" event={"ID":"0ccdc0e9-1132-4ee5-892d-fe1e9baddc32","Type":"ContainerStarted","Data":"df93ea40a27463627a8bd17da6365c6c0981e5f52901bfaf4c692f2cbe8606ac"} Nov 27 09:29:13 crc kubenswrapper[4971]: I1127 09:29:13.691700 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ccdc0e9-1132-4ee5-892d-fe1e9baddc32" containerID="55d5deb99f161c1be36331c4dbedd3d30a4af1245b1079e3ba94affe5023b474" exitCode=0 Nov 27 09:29:13 crc kubenswrapper[4971]: I1127 09:29:13.691763 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rml6l" event={"ID":"0ccdc0e9-1132-4ee5-892d-fe1e9baddc32","Type":"ContainerDied","Data":"55d5deb99f161c1be36331c4dbedd3d30a4af1245b1079e3ba94affe5023b474"} Nov 27 09:29:14 crc kubenswrapper[4971]: I1127 09:29:14.706283 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rml6l" event={"ID":"0ccdc0e9-1132-4ee5-892d-fe1e9baddc32","Type":"ContainerStarted","Data":"e47fe4911898f05949ae07f5d96aa3945ae8327753c75729fabd3d6c8155a19a"} Nov 27 09:29:15 crc kubenswrapper[4971]: I1127 09:29:15.732928 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ccdc0e9-1132-4ee5-892d-fe1e9baddc32" containerID="e47fe4911898f05949ae07f5d96aa3945ae8327753c75729fabd3d6c8155a19a" exitCode=0 Nov 27 09:29:15 crc kubenswrapper[4971]: I1127 09:29:15.732993 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rml6l" event={"ID":"0ccdc0e9-1132-4ee5-892d-fe1e9baddc32","Type":"ContainerDied","Data":"e47fe4911898f05949ae07f5d96aa3945ae8327753c75729fabd3d6c8155a19a"} Nov 27 09:29:16 crc kubenswrapper[4971]: I1127 09:29:16.552914 4971 scope.go:117] "RemoveContainer" containerID="34cb8bf867d08698d2d333be44c85498b1aa2a1b0a6e242c6c9357ab60f2fb63" Nov 27 09:29:16 crc kubenswrapper[4971]: E1127 09:29:16.553711 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:29:16 crc kubenswrapper[4971]: I1127 09:29:16.748789 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rml6l" event={"ID":"0ccdc0e9-1132-4ee5-892d-fe1e9baddc32","Type":"ContainerStarted","Data":"fc144a705243b576bdb1567efa73cd675713bf6fd6a016281ebf0b722667c9e9"} Nov 27 09:29:16 crc kubenswrapper[4971]: I1127 09:29:16.785296 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-rml6l" podStartSLOduration=3.154896994 podStartE2EDuration="5.785266163s" podCreationTimestamp="2025-11-27 09:29:11 +0000 UTC" firstStartedPulling="2025-11-27 09:29:13.694148111 +0000 UTC m=+9391.886192029" lastFinishedPulling="2025-11-27 09:29:16.32451728 +0000 UTC m=+9394.516561198" observedRunningTime="2025-11-27 09:29:16.776863885 +0000 UTC m=+9394.968907803" watchObservedRunningTime="2025-11-27 09:29:16.785266163 +0000 UTC m=+9394.977310101" Nov 27 09:29:21 crc kubenswrapper[4971]: I1127 09:29:21.935632 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-rml6l" Nov 27 09:29:21 crc kubenswrapper[4971]: I1127 09:29:21.937908 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-rml6l" Nov 27 09:29:21 crc kubenswrapper[4971]: I1127 09:29:21.990397 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-rml6l" Nov 27 09:29:22 crc kubenswrapper[4971]: I1127 09:29:22.865918 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-rml6l" Nov 27 09:29:22 crc kubenswrapper[4971]: I1127 09:29:22.918873 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rml6l"] Nov 27 09:29:24 crc kubenswrapper[4971]: I1127 09:29:24.845806 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-rml6l" podUID="0ccdc0e9-1132-4ee5-892d-fe1e9baddc32" containerName="registry-server" containerID="cri-o://fc144a705243b576bdb1567efa73cd675713bf6fd6a016281ebf0b722667c9e9" gracePeriod=2 Nov 27 09:29:25 crc kubenswrapper[4971]: I1127 09:29:25.399465 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rml6l" Nov 27 09:29:25 crc kubenswrapper[4971]: I1127 09:29:25.591647 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t2rsb\" (UniqueName: \"kubernetes.io/projected/0ccdc0e9-1132-4ee5-892d-fe1e9baddc32-kube-api-access-t2rsb\") pod \"0ccdc0e9-1132-4ee5-892d-fe1e9baddc32\" (UID: \"0ccdc0e9-1132-4ee5-892d-fe1e9baddc32\") " Nov 27 09:29:25 crc kubenswrapper[4971]: I1127 09:29:25.591729 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ccdc0e9-1132-4ee5-892d-fe1e9baddc32-utilities\") pod \"0ccdc0e9-1132-4ee5-892d-fe1e9baddc32\" (UID: \"0ccdc0e9-1132-4ee5-892d-fe1e9baddc32\") " Nov 27 09:29:25 crc kubenswrapper[4971]: I1127 09:29:25.591976 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ccdc0e9-1132-4ee5-892d-fe1e9baddc32-catalog-content\") pod \"0ccdc0e9-1132-4ee5-892d-fe1e9baddc32\" (UID: \"0ccdc0e9-1132-4ee5-892d-fe1e9baddc32\") " Nov 27 09:29:25 crc kubenswrapper[4971]: I1127 09:29:25.593144 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ccdc0e9-1132-4ee5-892d-fe1e9baddc32-utilities" (OuterVolumeSpecName: "utilities") pod "0ccdc0e9-1132-4ee5-892d-fe1e9baddc32" (UID: "0ccdc0e9-1132-4ee5-892d-fe1e9baddc32"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:29:25 crc kubenswrapper[4971]: I1127 09:29:25.594290 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ccdc0e9-1132-4ee5-892d-fe1e9baddc32-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 09:29:25 crc kubenswrapper[4971]: I1127 09:29:25.601821 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ccdc0e9-1132-4ee5-892d-fe1e9baddc32-kube-api-access-t2rsb" (OuterVolumeSpecName: "kube-api-access-t2rsb") pod "0ccdc0e9-1132-4ee5-892d-fe1e9baddc32" (UID: "0ccdc0e9-1132-4ee5-892d-fe1e9baddc32"). InnerVolumeSpecName "kube-api-access-t2rsb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:29:25 crc kubenswrapper[4971]: I1127 09:29:25.662396 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ccdc0e9-1132-4ee5-892d-fe1e9baddc32-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0ccdc0e9-1132-4ee5-892d-fe1e9baddc32" (UID: "0ccdc0e9-1132-4ee5-892d-fe1e9baddc32"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:29:25 crc kubenswrapper[4971]: I1127 09:29:25.697488 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ccdc0e9-1132-4ee5-892d-fe1e9baddc32-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 09:29:25 crc kubenswrapper[4971]: I1127 09:29:25.697549 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t2rsb\" (UniqueName: \"kubernetes.io/projected/0ccdc0e9-1132-4ee5-892d-fe1e9baddc32-kube-api-access-t2rsb\") on node \"crc\" DevicePath \"\"" Nov 27 09:29:25 crc kubenswrapper[4971]: I1127 09:29:25.857405 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ccdc0e9-1132-4ee5-892d-fe1e9baddc32" containerID="fc144a705243b576bdb1567efa73cd675713bf6fd6a016281ebf0b722667c9e9" exitCode=0 Nov 27 09:29:25 crc kubenswrapper[4971]: I1127 09:29:25.857479 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rml6l" event={"ID":"0ccdc0e9-1132-4ee5-892d-fe1e9baddc32","Type":"ContainerDied","Data":"fc144a705243b576bdb1567efa73cd675713bf6fd6a016281ebf0b722667c9e9"} Nov 27 09:29:25 crc kubenswrapper[4971]: I1127 09:29:25.857503 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rml6l" Nov 27 09:29:25 crc kubenswrapper[4971]: I1127 09:29:25.857616 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rml6l" event={"ID":"0ccdc0e9-1132-4ee5-892d-fe1e9baddc32","Type":"ContainerDied","Data":"df93ea40a27463627a8bd17da6365c6c0981e5f52901bfaf4c692f2cbe8606ac"} Nov 27 09:29:25 crc kubenswrapper[4971]: I1127 09:29:25.857666 4971 scope.go:117] "RemoveContainer" containerID="fc144a705243b576bdb1567efa73cd675713bf6fd6a016281ebf0b722667c9e9" Nov 27 09:29:25 crc kubenswrapper[4971]: I1127 09:29:25.881514 4971 scope.go:117] "RemoveContainer" containerID="e47fe4911898f05949ae07f5d96aa3945ae8327753c75729fabd3d6c8155a19a" Nov 27 09:29:25 crc kubenswrapper[4971]: I1127 09:29:25.904408 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rml6l"] Nov 27 09:29:25 crc kubenswrapper[4971]: I1127 09:29:25.916727 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-rml6l"] Nov 27 09:29:25 crc kubenswrapper[4971]: I1127 09:29:25.921630 4971 scope.go:117] "RemoveContainer" containerID="55d5deb99f161c1be36331c4dbedd3d30a4af1245b1079e3ba94affe5023b474" Nov 27 09:29:25 crc kubenswrapper[4971]: I1127 09:29:25.978295 4971 scope.go:117] "RemoveContainer" containerID="fc144a705243b576bdb1567efa73cd675713bf6fd6a016281ebf0b722667c9e9" Nov 27 09:29:25 crc kubenswrapper[4971]: E1127 09:29:25.978822 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc144a705243b576bdb1567efa73cd675713bf6fd6a016281ebf0b722667c9e9\": container with ID starting with fc144a705243b576bdb1567efa73cd675713bf6fd6a016281ebf0b722667c9e9 not found: ID does not exist" containerID="fc144a705243b576bdb1567efa73cd675713bf6fd6a016281ebf0b722667c9e9" Nov 27 09:29:25 crc kubenswrapper[4971]: I1127 09:29:25.978860 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc144a705243b576bdb1567efa73cd675713bf6fd6a016281ebf0b722667c9e9"} err="failed to get container status \"fc144a705243b576bdb1567efa73cd675713bf6fd6a016281ebf0b722667c9e9\": rpc error: code = NotFound desc = could not find container \"fc144a705243b576bdb1567efa73cd675713bf6fd6a016281ebf0b722667c9e9\": container with ID starting with fc144a705243b576bdb1567efa73cd675713bf6fd6a016281ebf0b722667c9e9 not found: ID does not exist" Nov 27 09:29:25 crc kubenswrapper[4971]: I1127 09:29:25.978887 4971 scope.go:117] "RemoveContainer" containerID="e47fe4911898f05949ae07f5d96aa3945ae8327753c75729fabd3d6c8155a19a" Nov 27 09:29:25 crc kubenswrapper[4971]: E1127 09:29:25.979144 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e47fe4911898f05949ae07f5d96aa3945ae8327753c75729fabd3d6c8155a19a\": container with ID starting with e47fe4911898f05949ae07f5d96aa3945ae8327753c75729fabd3d6c8155a19a not found: ID does not exist" containerID="e47fe4911898f05949ae07f5d96aa3945ae8327753c75729fabd3d6c8155a19a" Nov 27 09:29:25 crc kubenswrapper[4971]: I1127 09:29:25.979170 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e47fe4911898f05949ae07f5d96aa3945ae8327753c75729fabd3d6c8155a19a"} err="failed to get container status \"e47fe4911898f05949ae07f5d96aa3945ae8327753c75729fabd3d6c8155a19a\": rpc error: code = NotFound desc = could not find container \"e47fe4911898f05949ae07f5d96aa3945ae8327753c75729fabd3d6c8155a19a\": container with ID starting with e47fe4911898f05949ae07f5d96aa3945ae8327753c75729fabd3d6c8155a19a not found: ID does not exist" Nov 27 09:29:25 crc kubenswrapper[4971]: I1127 09:29:25.979183 4971 scope.go:117] "RemoveContainer" containerID="55d5deb99f161c1be36331c4dbedd3d30a4af1245b1079e3ba94affe5023b474" Nov 27 09:29:25 crc kubenswrapper[4971]: E1127 09:29:25.979409 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55d5deb99f161c1be36331c4dbedd3d30a4af1245b1079e3ba94affe5023b474\": container with ID starting with 55d5deb99f161c1be36331c4dbedd3d30a4af1245b1079e3ba94affe5023b474 not found: ID does not exist" containerID="55d5deb99f161c1be36331c4dbedd3d30a4af1245b1079e3ba94affe5023b474" Nov 27 09:29:25 crc kubenswrapper[4971]: I1127 09:29:25.979438 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55d5deb99f161c1be36331c4dbedd3d30a4af1245b1079e3ba94affe5023b474"} err="failed to get container status \"55d5deb99f161c1be36331c4dbedd3d30a4af1245b1079e3ba94affe5023b474\": rpc error: code = NotFound desc = could not find container \"55d5deb99f161c1be36331c4dbedd3d30a4af1245b1079e3ba94affe5023b474\": container with ID starting with 55d5deb99f161c1be36331c4dbedd3d30a4af1245b1079e3ba94affe5023b474 not found: ID does not exist" Nov 27 09:29:26 crc kubenswrapper[4971]: I1127 09:29:26.562869 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ccdc0e9-1132-4ee5-892d-fe1e9baddc32" path="/var/lib/kubelet/pods/0ccdc0e9-1132-4ee5-892d-fe1e9baddc32/volumes" Nov 27 09:29:29 crc kubenswrapper[4971]: I1127 09:29:29.550967 4971 scope.go:117] "RemoveContainer" containerID="34cb8bf867d08698d2d333be44c85498b1aa2a1b0a6e242c6c9357ab60f2fb63" Nov 27 09:29:29 crc kubenswrapper[4971]: E1127 09:29:29.551942 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:29:42 crc kubenswrapper[4971]: I1127 09:29:42.561691 4971 scope.go:117] "RemoveContainer" containerID="34cb8bf867d08698d2d333be44c85498b1aa2a1b0a6e242c6c9357ab60f2fb63" Nov 27 09:29:42 crc kubenswrapper[4971]: E1127 09:29:42.562793 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:29:47 crc kubenswrapper[4971]: I1127 09:29:47.136042 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-4ddwh"] Nov 27 09:29:47 crc kubenswrapper[4971]: E1127 09:29:47.137313 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ccdc0e9-1132-4ee5-892d-fe1e9baddc32" containerName="extract-utilities" Nov 27 09:29:47 crc kubenswrapper[4971]: I1127 09:29:47.137339 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ccdc0e9-1132-4ee5-892d-fe1e9baddc32" containerName="extract-utilities" Nov 27 09:29:47 crc kubenswrapper[4971]: E1127 09:29:47.137361 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ccdc0e9-1132-4ee5-892d-fe1e9baddc32" containerName="extract-content" Nov 27 09:29:47 crc kubenswrapper[4971]: I1127 09:29:47.137371 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ccdc0e9-1132-4ee5-892d-fe1e9baddc32" containerName="extract-content" Nov 27 09:29:47 crc kubenswrapper[4971]: E1127 09:29:47.137388 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ccdc0e9-1132-4ee5-892d-fe1e9baddc32" containerName="registry-server" Nov 27 09:29:47 crc kubenswrapper[4971]: I1127 09:29:47.137398 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ccdc0e9-1132-4ee5-892d-fe1e9baddc32" containerName="registry-server" Nov 27 09:29:47 crc kubenswrapper[4971]: I1127 09:29:47.137740 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ccdc0e9-1132-4ee5-892d-fe1e9baddc32" containerName="registry-server" Nov 27 09:29:47 crc kubenswrapper[4971]: I1127 09:29:47.140171 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4ddwh" Nov 27 09:29:47 crc kubenswrapper[4971]: I1127 09:29:47.154240 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4ddwh"] Nov 27 09:29:47 crc kubenswrapper[4971]: I1127 09:29:47.229843 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb1df5af-eef3-4367-aa86-a48187caf2f3-utilities\") pod \"redhat-marketplace-4ddwh\" (UID: \"cb1df5af-eef3-4367-aa86-a48187caf2f3\") " pod="openshift-marketplace/redhat-marketplace-4ddwh" Nov 27 09:29:47 crc kubenswrapper[4971]: I1127 09:29:47.229961 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb1df5af-eef3-4367-aa86-a48187caf2f3-catalog-content\") pod \"redhat-marketplace-4ddwh\" (UID: \"cb1df5af-eef3-4367-aa86-a48187caf2f3\") " pod="openshift-marketplace/redhat-marketplace-4ddwh" Nov 27 09:29:47 crc kubenswrapper[4971]: I1127 09:29:47.230745 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktjkw\" (UniqueName: \"kubernetes.io/projected/cb1df5af-eef3-4367-aa86-a48187caf2f3-kube-api-access-ktjkw\") pod \"redhat-marketplace-4ddwh\" (UID: \"cb1df5af-eef3-4367-aa86-a48187caf2f3\") " pod="openshift-marketplace/redhat-marketplace-4ddwh" Nov 27 09:29:47 crc kubenswrapper[4971]: I1127 09:29:47.333536 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktjkw\" (UniqueName: \"kubernetes.io/projected/cb1df5af-eef3-4367-aa86-a48187caf2f3-kube-api-access-ktjkw\") pod \"redhat-marketplace-4ddwh\" (UID: \"cb1df5af-eef3-4367-aa86-a48187caf2f3\") " pod="openshift-marketplace/redhat-marketplace-4ddwh" Nov 27 09:29:47 crc kubenswrapper[4971]: I1127 09:29:47.333784 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb1df5af-eef3-4367-aa86-a48187caf2f3-utilities\") pod \"redhat-marketplace-4ddwh\" (UID: \"cb1df5af-eef3-4367-aa86-a48187caf2f3\") " pod="openshift-marketplace/redhat-marketplace-4ddwh" Nov 27 09:29:47 crc kubenswrapper[4971]: I1127 09:29:47.333867 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb1df5af-eef3-4367-aa86-a48187caf2f3-catalog-content\") pod \"redhat-marketplace-4ddwh\" (UID: \"cb1df5af-eef3-4367-aa86-a48187caf2f3\") " pod="openshift-marketplace/redhat-marketplace-4ddwh" Nov 27 09:29:47 crc kubenswrapper[4971]: I1127 09:29:47.334531 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb1df5af-eef3-4367-aa86-a48187caf2f3-catalog-content\") pod \"redhat-marketplace-4ddwh\" (UID: \"cb1df5af-eef3-4367-aa86-a48187caf2f3\") " pod="openshift-marketplace/redhat-marketplace-4ddwh" Nov 27 09:29:47 crc kubenswrapper[4971]: I1127 09:29:47.334621 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb1df5af-eef3-4367-aa86-a48187caf2f3-utilities\") pod \"redhat-marketplace-4ddwh\" (UID: \"cb1df5af-eef3-4367-aa86-a48187caf2f3\") " pod="openshift-marketplace/redhat-marketplace-4ddwh" Nov 27 09:29:47 crc kubenswrapper[4971]: I1127 09:29:47.361150 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktjkw\" (UniqueName: \"kubernetes.io/projected/cb1df5af-eef3-4367-aa86-a48187caf2f3-kube-api-access-ktjkw\") pod \"redhat-marketplace-4ddwh\" (UID: \"cb1df5af-eef3-4367-aa86-a48187caf2f3\") " pod="openshift-marketplace/redhat-marketplace-4ddwh" Nov 27 09:29:47 crc kubenswrapper[4971]: I1127 09:29:47.467101 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4ddwh" Nov 27 09:29:48 crc kubenswrapper[4971]: I1127 09:29:48.021601 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4ddwh"] Nov 27 09:29:48 crc kubenswrapper[4971]: I1127 09:29:48.113835 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4ddwh" event={"ID":"cb1df5af-eef3-4367-aa86-a48187caf2f3","Type":"ContainerStarted","Data":"d4018a5a360ad4bfdd0ce22bbb35a817cea98bf916c7a4e305bd775a47ee9621"} Nov 27 09:29:49 crc kubenswrapper[4971]: I1127 09:29:49.152292 4971 generic.go:334] "Generic (PLEG): container finished" podID="cb1df5af-eef3-4367-aa86-a48187caf2f3" containerID="c9141cf74cfc43fc7e7071948aafe2d93fef60b0082d26af66a11b68a48fe7c2" exitCode=0 Nov 27 09:29:49 crc kubenswrapper[4971]: I1127 09:29:49.153494 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4ddwh" event={"ID":"cb1df5af-eef3-4367-aa86-a48187caf2f3","Type":"ContainerDied","Data":"c9141cf74cfc43fc7e7071948aafe2d93fef60b0082d26af66a11b68a48fe7c2"} Nov 27 09:29:49 crc kubenswrapper[4971]: I1127 09:29:49.157894 4971 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 27 09:29:50 crc kubenswrapper[4971]: I1127 09:29:50.168243 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4ddwh" event={"ID":"cb1df5af-eef3-4367-aa86-a48187caf2f3","Type":"ContainerStarted","Data":"385a770dd8e51ea5add685e69b9c9e5ab7ac48bdb42a416843ddb31096d56aa5"} Nov 27 09:29:51 crc kubenswrapper[4971]: I1127 09:29:51.184477 4971 generic.go:334] "Generic (PLEG): container finished" podID="cb1df5af-eef3-4367-aa86-a48187caf2f3" containerID="385a770dd8e51ea5add685e69b9c9e5ab7ac48bdb42a416843ddb31096d56aa5" exitCode=0 Nov 27 09:29:51 crc kubenswrapper[4971]: I1127 09:29:51.184621 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4ddwh" event={"ID":"cb1df5af-eef3-4367-aa86-a48187caf2f3","Type":"ContainerDied","Data":"385a770dd8e51ea5add685e69b9c9e5ab7ac48bdb42a416843ddb31096d56aa5"} Nov 27 09:29:52 crc kubenswrapper[4971]: I1127 09:29:52.199591 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4ddwh" event={"ID":"cb1df5af-eef3-4367-aa86-a48187caf2f3","Type":"ContainerStarted","Data":"3cf9a7c5f05946bedb9ef622fedb2db104154afc279a7aaafd5c02078da35cfa"} Nov 27 09:29:52 crc kubenswrapper[4971]: I1127 09:29:52.221208 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-4ddwh" podStartSLOduration=2.7541223 podStartE2EDuration="5.221182171s" podCreationTimestamp="2025-11-27 09:29:47 +0000 UTC" firstStartedPulling="2025-11-27 09:29:49.157634131 +0000 UTC m=+9427.349678049" lastFinishedPulling="2025-11-27 09:29:51.624694002 +0000 UTC m=+9429.816737920" observedRunningTime="2025-11-27 09:29:52.219546864 +0000 UTC m=+9430.411590812" watchObservedRunningTime="2025-11-27 09:29:52.221182171 +0000 UTC m=+9430.413226089" Nov 27 09:29:56 crc kubenswrapper[4971]: I1127 09:29:56.551659 4971 scope.go:117] "RemoveContainer" containerID="34cb8bf867d08698d2d333be44c85498b1aa2a1b0a6e242c6c9357ab60f2fb63" Nov 27 09:29:57 crc kubenswrapper[4971]: I1127 09:29:57.468414 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-4ddwh" Nov 27 09:29:57 crc kubenswrapper[4971]: I1127 09:29:57.468474 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-4ddwh" Nov 27 09:29:57 crc kubenswrapper[4971]: I1127 09:29:57.537281 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-4ddwh" Nov 27 09:29:58 crc kubenswrapper[4971]: I1127 09:29:58.274343 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"5ffc606bf634b42b1e0a76f2a24b1cf5412e73d845ab4a6a20fd731e6c6294ae"} Nov 27 09:29:58 crc kubenswrapper[4971]: I1127 09:29:58.339004 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-4ddwh" Nov 27 09:29:58 crc kubenswrapper[4971]: I1127 09:29:58.389478 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4ddwh"] Nov 27 09:30:00 crc kubenswrapper[4971]: I1127 09:30:00.160991 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403930-4xqpd"] Nov 27 09:30:00 crc kubenswrapper[4971]: I1127 09:30:00.164155 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403930-4xqpd" Nov 27 09:30:00 crc kubenswrapper[4971]: I1127 09:30:00.167333 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 27 09:30:00 crc kubenswrapper[4971]: I1127 09:30:00.167739 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 27 09:30:00 crc kubenswrapper[4971]: I1127 09:30:00.176288 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403930-4xqpd"] Nov 27 09:30:00 crc kubenswrapper[4971]: I1127 09:30:00.268846 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1ffc5a08-00af-4407-8e27-881e5ac48b51-secret-volume\") pod \"collect-profiles-29403930-4xqpd\" (UID: \"1ffc5a08-00af-4407-8e27-881e5ac48b51\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403930-4xqpd" Nov 27 09:30:00 crc kubenswrapper[4971]: I1127 09:30:00.268931 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1ffc5a08-00af-4407-8e27-881e5ac48b51-config-volume\") pod \"collect-profiles-29403930-4xqpd\" (UID: \"1ffc5a08-00af-4407-8e27-881e5ac48b51\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403930-4xqpd" Nov 27 09:30:00 crc kubenswrapper[4971]: I1127 09:30:00.269023 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2tn5\" (UniqueName: \"kubernetes.io/projected/1ffc5a08-00af-4407-8e27-881e5ac48b51-kube-api-access-b2tn5\") pod \"collect-profiles-29403930-4xqpd\" (UID: \"1ffc5a08-00af-4407-8e27-881e5ac48b51\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403930-4xqpd" Nov 27 09:30:00 crc kubenswrapper[4971]: I1127 09:30:00.303130 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-4ddwh" podUID="cb1df5af-eef3-4367-aa86-a48187caf2f3" containerName="registry-server" containerID="cri-o://3cf9a7c5f05946bedb9ef622fedb2db104154afc279a7aaafd5c02078da35cfa" gracePeriod=2 Nov 27 09:30:00 crc kubenswrapper[4971]: I1127 09:30:00.372132 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1ffc5a08-00af-4407-8e27-881e5ac48b51-secret-volume\") pod \"collect-profiles-29403930-4xqpd\" (UID: \"1ffc5a08-00af-4407-8e27-881e5ac48b51\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403930-4xqpd" Nov 27 09:30:00 crc kubenswrapper[4971]: I1127 09:30:00.373057 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1ffc5a08-00af-4407-8e27-881e5ac48b51-config-volume\") pod \"collect-profiles-29403930-4xqpd\" (UID: \"1ffc5a08-00af-4407-8e27-881e5ac48b51\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403930-4xqpd" Nov 27 09:30:00 crc kubenswrapper[4971]: I1127 09:30:00.373346 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2tn5\" (UniqueName: \"kubernetes.io/projected/1ffc5a08-00af-4407-8e27-881e5ac48b51-kube-api-access-b2tn5\") pod \"collect-profiles-29403930-4xqpd\" (UID: \"1ffc5a08-00af-4407-8e27-881e5ac48b51\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403930-4xqpd" Nov 27 09:30:00 crc kubenswrapper[4971]: I1127 09:30:00.374097 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1ffc5a08-00af-4407-8e27-881e5ac48b51-config-volume\") pod \"collect-profiles-29403930-4xqpd\" (UID: \"1ffc5a08-00af-4407-8e27-881e5ac48b51\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403930-4xqpd" Nov 27 09:30:00 crc kubenswrapper[4971]: I1127 09:30:00.380985 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1ffc5a08-00af-4407-8e27-881e5ac48b51-secret-volume\") pod \"collect-profiles-29403930-4xqpd\" (UID: \"1ffc5a08-00af-4407-8e27-881e5ac48b51\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403930-4xqpd" Nov 27 09:30:00 crc kubenswrapper[4971]: I1127 09:30:00.404021 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2tn5\" (UniqueName: \"kubernetes.io/projected/1ffc5a08-00af-4407-8e27-881e5ac48b51-kube-api-access-b2tn5\") pod \"collect-profiles-29403930-4xqpd\" (UID: \"1ffc5a08-00af-4407-8e27-881e5ac48b51\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403930-4xqpd" Nov 27 09:30:00 crc kubenswrapper[4971]: I1127 09:30:00.508237 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403930-4xqpd" Nov 27 09:30:00 crc kubenswrapper[4971]: I1127 09:30:00.827189 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4ddwh" Nov 27 09:30:00 crc kubenswrapper[4971]: I1127 09:30:00.991712 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb1df5af-eef3-4367-aa86-a48187caf2f3-catalog-content\") pod \"cb1df5af-eef3-4367-aa86-a48187caf2f3\" (UID: \"cb1df5af-eef3-4367-aa86-a48187caf2f3\") " Nov 27 09:30:00 crc kubenswrapper[4971]: I1127 09:30:00.991974 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ktjkw\" (UniqueName: \"kubernetes.io/projected/cb1df5af-eef3-4367-aa86-a48187caf2f3-kube-api-access-ktjkw\") pod \"cb1df5af-eef3-4367-aa86-a48187caf2f3\" (UID: \"cb1df5af-eef3-4367-aa86-a48187caf2f3\") " Nov 27 09:30:00 crc kubenswrapper[4971]: I1127 09:30:00.992205 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb1df5af-eef3-4367-aa86-a48187caf2f3-utilities\") pod \"cb1df5af-eef3-4367-aa86-a48187caf2f3\" (UID: \"cb1df5af-eef3-4367-aa86-a48187caf2f3\") " Nov 27 09:30:00 crc kubenswrapper[4971]: I1127 09:30:00.993836 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb1df5af-eef3-4367-aa86-a48187caf2f3-utilities" (OuterVolumeSpecName: "utilities") pod "cb1df5af-eef3-4367-aa86-a48187caf2f3" (UID: "cb1df5af-eef3-4367-aa86-a48187caf2f3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:30:00 crc kubenswrapper[4971]: I1127 09:30:00.999201 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb1df5af-eef3-4367-aa86-a48187caf2f3-kube-api-access-ktjkw" (OuterVolumeSpecName: "kube-api-access-ktjkw") pod "cb1df5af-eef3-4367-aa86-a48187caf2f3" (UID: "cb1df5af-eef3-4367-aa86-a48187caf2f3"). InnerVolumeSpecName "kube-api-access-ktjkw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:30:01 crc kubenswrapper[4971]: I1127 09:30:01.014268 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb1df5af-eef3-4367-aa86-a48187caf2f3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cb1df5af-eef3-4367-aa86-a48187caf2f3" (UID: "cb1df5af-eef3-4367-aa86-a48187caf2f3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:30:01 crc kubenswrapper[4971]: I1127 09:30:01.064801 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403930-4xqpd"] Nov 27 09:30:01 crc kubenswrapper[4971]: W1127 09:30:01.067281 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1ffc5a08_00af_4407_8e27_881e5ac48b51.slice/crio-143c2e6e410bd91f7035f94a9e11cebebebe0b7f2b919167e1955f90567ef2c0 WatchSource:0}: Error finding container 143c2e6e410bd91f7035f94a9e11cebebebe0b7f2b919167e1955f90567ef2c0: Status 404 returned error can't find the container with id 143c2e6e410bd91f7035f94a9e11cebebebe0b7f2b919167e1955f90567ef2c0 Nov 27 09:30:01 crc kubenswrapper[4971]: I1127 09:30:01.095973 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb1df5af-eef3-4367-aa86-a48187caf2f3-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 09:30:01 crc kubenswrapper[4971]: I1127 09:30:01.096042 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb1df5af-eef3-4367-aa86-a48187caf2f3-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 09:30:01 crc kubenswrapper[4971]: I1127 09:30:01.096057 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ktjkw\" (UniqueName: \"kubernetes.io/projected/cb1df5af-eef3-4367-aa86-a48187caf2f3-kube-api-access-ktjkw\") on node \"crc\" DevicePath \"\"" Nov 27 09:30:01 crc kubenswrapper[4971]: I1127 09:30:01.320014 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403930-4xqpd" event={"ID":"1ffc5a08-00af-4407-8e27-881e5ac48b51","Type":"ContainerStarted","Data":"a1f5538d6b843fa44449ebc1690d1abb44e5b04835009b3cd10aa90035f3de4c"} Nov 27 09:30:01 crc kubenswrapper[4971]: I1127 09:30:01.320476 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403930-4xqpd" event={"ID":"1ffc5a08-00af-4407-8e27-881e5ac48b51","Type":"ContainerStarted","Data":"143c2e6e410bd91f7035f94a9e11cebebebe0b7f2b919167e1955f90567ef2c0"} Nov 27 09:30:01 crc kubenswrapper[4971]: I1127 09:30:01.325186 4971 generic.go:334] "Generic (PLEG): container finished" podID="cb1df5af-eef3-4367-aa86-a48187caf2f3" containerID="3cf9a7c5f05946bedb9ef622fedb2db104154afc279a7aaafd5c02078da35cfa" exitCode=0 Nov 27 09:30:01 crc kubenswrapper[4971]: I1127 09:30:01.325257 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4ddwh" event={"ID":"cb1df5af-eef3-4367-aa86-a48187caf2f3","Type":"ContainerDied","Data":"3cf9a7c5f05946bedb9ef622fedb2db104154afc279a7aaafd5c02078da35cfa"} Nov 27 09:30:01 crc kubenswrapper[4971]: I1127 09:30:01.325291 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4ddwh" event={"ID":"cb1df5af-eef3-4367-aa86-a48187caf2f3","Type":"ContainerDied","Data":"d4018a5a360ad4bfdd0ce22bbb35a817cea98bf916c7a4e305bd775a47ee9621"} Nov 27 09:30:01 crc kubenswrapper[4971]: I1127 09:30:01.325311 4971 scope.go:117] "RemoveContainer" containerID="3cf9a7c5f05946bedb9ef622fedb2db104154afc279a7aaafd5c02078da35cfa" Nov 27 09:30:01 crc kubenswrapper[4971]: I1127 09:30:01.325594 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4ddwh" Nov 27 09:30:01 crc kubenswrapper[4971]: I1127 09:30:01.356399 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29403930-4xqpd" podStartSLOduration=1.356363454 podStartE2EDuration="1.356363454s" podCreationTimestamp="2025-11-27 09:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 09:30:01.343600641 +0000 UTC m=+9439.535644579" watchObservedRunningTime="2025-11-27 09:30:01.356363454 +0000 UTC m=+9439.548407382" Nov 27 09:30:01 crc kubenswrapper[4971]: I1127 09:30:01.385731 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4ddwh"] Nov 27 09:30:01 crc kubenswrapper[4971]: I1127 09:30:01.390073 4971 scope.go:117] "RemoveContainer" containerID="385a770dd8e51ea5add685e69b9c9e5ab7ac48bdb42a416843ddb31096d56aa5" Nov 27 09:30:01 crc kubenswrapper[4971]: I1127 09:30:01.401649 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-4ddwh"] Nov 27 09:30:01 crc kubenswrapper[4971]: I1127 09:30:01.481793 4971 scope.go:117] "RemoveContainer" containerID="c9141cf74cfc43fc7e7071948aafe2d93fef60b0082d26af66a11b68a48fe7c2" Nov 27 09:30:01 crc kubenswrapper[4971]: I1127 09:30:01.550102 4971 scope.go:117] "RemoveContainer" containerID="3cf9a7c5f05946bedb9ef622fedb2db104154afc279a7aaafd5c02078da35cfa" Nov 27 09:30:01 crc kubenswrapper[4971]: E1127 09:30:01.552426 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3cf9a7c5f05946bedb9ef622fedb2db104154afc279a7aaafd5c02078da35cfa\": container with ID starting with 3cf9a7c5f05946bedb9ef622fedb2db104154afc279a7aaafd5c02078da35cfa not found: ID does not exist" containerID="3cf9a7c5f05946bedb9ef622fedb2db104154afc279a7aaafd5c02078da35cfa" Nov 27 09:30:01 crc kubenswrapper[4971]: I1127 09:30:01.552481 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3cf9a7c5f05946bedb9ef622fedb2db104154afc279a7aaafd5c02078da35cfa"} err="failed to get container status \"3cf9a7c5f05946bedb9ef622fedb2db104154afc279a7aaafd5c02078da35cfa\": rpc error: code = NotFound desc = could not find container \"3cf9a7c5f05946bedb9ef622fedb2db104154afc279a7aaafd5c02078da35cfa\": container with ID starting with 3cf9a7c5f05946bedb9ef622fedb2db104154afc279a7aaafd5c02078da35cfa not found: ID does not exist" Nov 27 09:30:01 crc kubenswrapper[4971]: I1127 09:30:01.552507 4971 scope.go:117] "RemoveContainer" containerID="385a770dd8e51ea5add685e69b9c9e5ab7ac48bdb42a416843ddb31096d56aa5" Nov 27 09:30:01 crc kubenswrapper[4971]: E1127 09:30:01.552806 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"385a770dd8e51ea5add685e69b9c9e5ab7ac48bdb42a416843ddb31096d56aa5\": container with ID starting with 385a770dd8e51ea5add685e69b9c9e5ab7ac48bdb42a416843ddb31096d56aa5 not found: ID does not exist" containerID="385a770dd8e51ea5add685e69b9c9e5ab7ac48bdb42a416843ddb31096d56aa5" Nov 27 09:30:01 crc kubenswrapper[4971]: I1127 09:30:01.552846 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"385a770dd8e51ea5add685e69b9c9e5ab7ac48bdb42a416843ddb31096d56aa5"} err="failed to get container status \"385a770dd8e51ea5add685e69b9c9e5ab7ac48bdb42a416843ddb31096d56aa5\": rpc error: code = NotFound desc = could not find container \"385a770dd8e51ea5add685e69b9c9e5ab7ac48bdb42a416843ddb31096d56aa5\": container with ID starting with 385a770dd8e51ea5add685e69b9c9e5ab7ac48bdb42a416843ddb31096d56aa5 not found: ID does not exist" Nov 27 09:30:01 crc kubenswrapper[4971]: I1127 09:30:01.552866 4971 scope.go:117] "RemoveContainer" containerID="c9141cf74cfc43fc7e7071948aafe2d93fef60b0082d26af66a11b68a48fe7c2" Nov 27 09:30:01 crc kubenswrapper[4971]: E1127 09:30:01.553141 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9141cf74cfc43fc7e7071948aafe2d93fef60b0082d26af66a11b68a48fe7c2\": container with ID starting with c9141cf74cfc43fc7e7071948aafe2d93fef60b0082d26af66a11b68a48fe7c2 not found: ID does not exist" containerID="c9141cf74cfc43fc7e7071948aafe2d93fef60b0082d26af66a11b68a48fe7c2" Nov 27 09:30:01 crc kubenswrapper[4971]: I1127 09:30:01.553180 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9141cf74cfc43fc7e7071948aafe2d93fef60b0082d26af66a11b68a48fe7c2"} err="failed to get container status \"c9141cf74cfc43fc7e7071948aafe2d93fef60b0082d26af66a11b68a48fe7c2\": rpc error: code = NotFound desc = could not find container \"c9141cf74cfc43fc7e7071948aafe2d93fef60b0082d26af66a11b68a48fe7c2\": container with ID starting with c9141cf74cfc43fc7e7071948aafe2d93fef60b0082d26af66a11b68a48fe7c2 not found: ID does not exist" Nov 27 09:30:02 crc kubenswrapper[4971]: I1127 09:30:02.341889 4971 generic.go:334] "Generic (PLEG): container finished" podID="1ffc5a08-00af-4407-8e27-881e5ac48b51" containerID="a1f5538d6b843fa44449ebc1690d1abb44e5b04835009b3cd10aa90035f3de4c" exitCode=0 Nov 27 09:30:02 crc kubenswrapper[4971]: I1127 09:30:02.342015 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403930-4xqpd" event={"ID":"1ffc5a08-00af-4407-8e27-881e5ac48b51","Type":"ContainerDied","Data":"a1f5538d6b843fa44449ebc1690d1abb44e5b04835009b3cd10aa90035f3de4c"} Nov 27 09:30:02 crc kubenswrapper[4971]: I1127 09:30:02.566084 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb1df5af-eef3-4367-aa86-a48187caf2f3" path="/var/lib/kubelet/pods/cb1df5af-eef3-4367-aa86-a48187caf2f3/volumes" Nov 27 09:30:03 crc kubenswrapper[4971]: I1127 09:30:03.778290 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403930-4xqpd" Nov 27 09:30:03 crc kubenswrapper[4971]: I1127 09:30:03.899829 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1ffc5a08-00af-4407-8e27-881e5ac48b51-config-volume\") pod \"1ffc5a08-00af-4407-8e27-881e5ac48b51\" (UID: \"1ffc5a08-00af-4407-8e27-881e5ac48b51\") " Nov 27 09:30:03 crc kubenswrapper[4971]: I1127 09:30:03.899925 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b2tn5\" (UniqueName: \"kubernetes.io/projected/1ffc5a08-00af-4407-8e27-881e5ac48b51-kube-api-access-b2tn5\") pod \"1ffc5a08-00af-4407-8e27-881e5ac48b51\" (UID: \"1ffc5a08-00af-4407-8e27-881e5ac48b51\") " Nov 27 09:30:03 crc kubenswrapper[4971]: I1127 09:30:03.900325 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1ffc5a08-00af-4407-8e27-881e5ac48b51-secret-volume\") pod \"1ffc5a08-00af-4407-8e27-881e5ac48b51\" (UID: \"1ffc5a08-00af-4407-8e27-881e5ac48b51\") " Nov 27 09:30:03 crc kubenswrapper[4971]: I1127 09:30:03.905384 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ffc5a08-00af-4407-8e27-881e5ac48b51-config-volume" (OuterVolumeSpecName: "config-volume") pod "1ffc5a08-00af-4407-8e27-881e5ac48b51" (UID: "1ffc5a08-00af-4407-8e27-881e5ac48b51"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:30:03 crc kubenswrapper[4971]: I1127 09:30:03.915044 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ffc5a08-00af-4407-8e27-881e5ac48b51-kube-api-access-b2tn5" (OuterVolumeSpecName: "kube-api-access-b2tn5") pod "1ffc5a08-00af-4407-8e27-881e5ac48b51" (UID: "1ffc5a08-00af-4407-8e27-881e5ac48b51"). InnerVolumeSpecName "kube-api-access-b2tn5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:30:03 crc kubenswrapper[4971]: I1127 09:30:03.916026 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ffc5a08-00af-4407-8e27-881e5ac48b51-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "1ffc5a08-00af-4407-8e27-881e5ac48b51" (UID: "1ffc5a08-00af-4407-8e27-881e5ac48b51"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:30:04 crc kubenswrapper[4971]: I1127 09:30:04.004432 4971 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1ffc5a08-00af-4407-8e27-881e5ac48b51-config-volume\") on node \"crc\" DevicePath \"\"" Nov 27 09:30:04 crc kubenswrapper[4971]: I1127 09:30:04.004481 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b2tn5\" (UniqueName: \"kubernetes.io/projected/1ffc5a08-00af-4407-8e27-881e5ac48b51-kube-api-access-b2tn5\") on node \"crc\" DevicePath \"\"" Nov 27 09:30:04 crc kubenswrapper[4971]: I1127 09:30:04.004492 4971 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1ffc5a08-00af-4407-8e27-881e5ac48b51-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 27 09:30:04 crc kubenswrapper[4971]: I1127 09:30:04.367943 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403930-4xqpd" event={"ID":"1ffc5a08-00af-4407-8e27-881e5ac48b51","Type":"ContainerDied","Data":"143c2e6e410bd91f7035f94a9e11cebebebe0b7f2b919167e1955f90567ef2c0"} Nov 27 09:30:04 crc kubenswrapper[4971]: I1127 09:30:04.368009 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="143c2e6e410bd91f7035f94a9e11cebebebe0b7f2b919167e1955f90567ef2c0" Nov 27 09:30:04 crc kubenswrapper[4971]: I1127 09:30:04.368040 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403930-4xqpd" Nov 27 09:30:04 crc kubenswrapper[4971]: I1127 09:30:04.442216 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403885-zdqlh"] Nov 27 09:30:04 crc kubenswrapper[4971]: I1127 09:30:04.453809 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403885-zdqlh"] Nov 27 09:30:04 crc kubenswrapper[4971]: I1127 09:30:04.569152 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9f5d1b5-794a-4b95-893a-f7036a07cf5a" path="/var/lib/kubelet/pods/e9f5d1b5-794a-4b95-893a-f7036a07cf5a/volumes" Nov 27 09:30:50 crc kubenswrapper[4971]: I1127 09:30:50.913240 4971 generic.go:334] "Generic (PLEG): container finished" podID="780772cd-49b3-471d-b7a6-cdbde5c28c7a" containerID="83746bf3e172651d3a4195169fee15c919cc61a386b437d22d421b06ccf2cdaa" exitCode=0 Nov 27 09:30:50 crc kubenswrapper[4971]: I1127 09:30:50.913417 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-hhpn7" event={"ID":"780772cd-49b3-471d-b7a6-cdbde5c28c7a","Type":"ContainerDied","Data":"83746bf3e172651d3a4195169fee15c919cc61a386b437d22d421b06ccf2cdaa"} Nov 27 09:30:52 crc kubenswrapper[4971]: I1127 09:30:52.483741 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-hhpn7" Nov 27 09:30:52 crc kubenswrapper[4971]: I1127 09:30:52.523939 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/780772cd-49b3-471d-b7a6-cdbde5c28c7a-ssh-key\") pod \"780772cd-49b3-471d-b7a6-cdbde5c28c7a\" (UID: \"780772cd-49b3-471d-b7a6-cdbde5c28c7a\") " Nov 27 09:30:52 crc kubenswrapper[4971]: I1127 09:30:52.524092 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/780772cd-49b3-471d-b7a6-cdbde5c28c7a-inventory\") pod \"780772cd-49b3-471d-b7a6-cdbde5c28c7a\" (UID: \"780772cd-49b3-471d-b7a6-cdbde5c28c7a\") " Nov 27 09:30:52 crc kubenswrapper[4971]: I1127 09:30:52.524177 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q2ngz\" (UniqueName: \"kubernetes.io/projected/780772cd-49b3-471d-b7a6-cdbde5c28c7a-kube-api-access-q2ngz\") pod \"780772cd-49b3-471d-b7a6-cdbde5c28c7a\" (UID: \"780772cd-49b3-471d-b7a6-cdbde5c28c7a\") " Nov 27 09:30:52 crc kubenswrapper[4971]: I1127 09:30:52.524290 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/780772cd-49b3-471d-b7a6-cdbde5c28c7a-libvirt-combined-ca-bundle\") pod \"780772cd-49b3-471d-b7a6-cdbde5c28c7a\" (UID: \"780772cd-49b3-471d-b7a6-cdbde5c28c7a\") " Nov 27 09:30:52 crc kubenswrapper[4971]: I1127 09:30:52.524443 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/780772cd-49b3-471d-b7a6-cdbde5c28c7a-libvirt-secret-0\") pod \"780772cd-49b3-471d-b7a6-cdbde5c28c7a\" (UID: \"780772cd-49b3-471d-b7a6-cdbde5c28c7a\") " Nov 27 09:30:52 crc kubenswrapper[4971]: I1127 09:30:52.524494 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/780772cd-49b3-471d-b7a6-cdbde5c28c7a-ceph\") pod \"780772cd-49b3-471d-b7a6-cdbde5c28c7a\" (UID: \"780772cd-49b3-471d-b7a6-cdbde5c28c7a\") " Nov 27 09:30:52 crc kubenswrapper[4971]: I1127 09:30:52.532165 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/780772cd-49b3-471d-b7a6-cdbde5c28c7a-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "780772cd-49b3-471d-b7a6-cdbde5c28c7a" (UID: "780772cd-49b3-471d-b7a6-cdbde5c28c7a"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:30:52 crc kubenswrapper[4971]: I1127 09:30:52.532289 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/780772cd-49b3-471d-b7a6-cdbde5c28c7a-kube-api-access-q2ngz" (OuterVolumeSpecName: "kube-api-access-q2ngz") pod "780772cd-49b3-471d-b7a6-cdbde5c28c7a" (UID: "780772cd-49b3-471d-b7a6-cdbde5c28c7a"). InnerVolumeSpecName "kube-api-access-q2ngz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:30:52 crc kubenswrapper[4971]: I1127 09:30:52.532755 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/780772cd-49b3-471d-b7a6-cdbde5c28c7a-ceph" (OuterVolumeSpecName: "ceph") pod "780772cd-49b3-471d-b7a6-cdbde5c28c7a" (UID: "780772cd-49b3-471d-b7a6-cdbde5c28c7a"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:30:52 crc kubenswrapper[4971]: I1127 09:30:52.568905 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/780772cd-49b3-471d-b7a6-cdbde5c28c7a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "780772cd-49b3-471d-b7a6-cdbde5c28c7a" (UID: "780772cd-49b3-471d-b7a6-cdbde5c28c7a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:30:52 crc kubenswrapper[4971]: I1127 09:30:52.569047 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/780772cd-49b3-471d-b7a6-cdbde5c28c7a-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "780772cd-49b3-471d-b7a6-cdbde5c28c7a" (UID: "780772cd-49b3-471d-b7a6-cdbde5c28c7a"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:30:52 crc kubenswrapper[4971]: I1127 09:30:52.593142 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/780772cd-49b3-471d-b7a6-cdbde5c28c7a-inventory" (OuterVolumeSpecName: "inventory") pod "780772cd-49b3-471d-b7a6-cdbde5c28c7a" (UID: "780772cd-49b3-471d-b7a6-cdbde5c28c7a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:30:52 crc kubenswrapper[4971]: I1127 09:30:52.627439 4971 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/780772cd-49b3-471d-b7a6-cdbde5c28c7a-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Nov 27 09:30:52 crc kubenswrapper[4971]: I1127 09:30:52.627481 4971 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/780772cd-49b3-471d-b7a6-cdbde5c28c7a-ceph\") on node \"crc\" DevicePath \"\"" Nov 27 09:30:52 crc kubenswrapper[4971]: I1127 09:30:52.627496 4971 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/780772cd-49b3-471d-b7a6-cdbde5c28c7a-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 27 09:30:52 crc kubenswrapper[4971]: I1127 09:30:52.627507 4971 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/780772cd-49b3-471d-b7a6-cdbde5c28c7a-inventory\") on node \"crc\" DevicePath \"\"" Nov 27 09:30:52 crc kubenswrapper[4971]: I1127 09:30:52.627521 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q2ngz\" (UniqueName: \"kubernetes.io/projected/780772cd-49b3-471d-b7a6-cdbde5c28c7a-kube-api-access-q2ngz\") on node \"crc\" DevicePath \"\"" Nov 27 09:30:52 crc kubenswrapper[4971]: I1127 09:30:52.627553 4971 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/780772cd-49b3-471d-b7a6-cdbde5c28c7a-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:30:52 crc kubenswrapper[4971]: I1127 09:30:52.941606 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-hhpn7" event={"ID":"780772cd-49b3-471d-b7a6-cdbde5c28c7a","Type":"ContainerDied","Data":"770811c78b6be84ee6f16c0f6c8b66ae68fcbb3d54208a7e1692f2460890c115"} Nov 27 09:30:52 crc kubenswrapper[4971]: I1127 09:30:52.942272 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="770811c78b6be84ee6f16c0f6c8b66ae68fcbb3d54208a7e1692f2460890c115" Nov 27 09:30:52 crc kubenswrapper[4971]: I1127 09:30:52.941774 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-hhpn7" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.129923 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-openstack-openstack-cell1-vmfjt"] Nov 27 09:30:53 crc kubenswrapper[4971]: E1127 09:30:53.130636 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb1df5af-eef3-4367-aa86-a48187caf2f3" containerName="extract-utilities" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.130659 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb1df5af-eef3-4367-aa86-a48187caf2f3" containerName="extract-utilities" Nov 27 09:30:53 crc kubenswrapper[4971]: E1127 09:30:53.130682 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb1df5af-eef3-4367-aa86-a48187caf2f3" containerName="registry-server" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.130692 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb1df5af-eef3-4367-aa86-a48187caf2f3" containerName="registry-server" Nov 27 09:30:53 crc kubenswrapper[4971]: E1127 09:30:53.130720 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ffc5a08-00af-4407-8e27-881e5ac48b51" containerName="collect-profiles" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.130731 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ffc5a08-00af-4407-8e27-881e5ac48b51" containerName="collect-profiles" Nov 27 09:30:53 crc kubenswrapper[4971]: E1127 09:30:53.130763 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb1df5af-eef3-4367-aa86-a48187caf2f3" containerName="extract-content" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.130771 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb1df5af-eef3-4367-aa86-a48187caf2f3" containerName="extract-content" Nov 27 09:30:53 crc kubenswrapper[4971]: E1127 09:30:53.130787 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="780772cd-49b3-471d-b7a6-cdbde5c28c7a" containerName="libvirt-openstack-openstack-cell1" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.130797 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="780772cd-49b3-471d-b7a6-cdbde5c28c7a" containerName="libvirt-openstack-openstack-cell1" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.131079 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="780772cd-49b3-471d-b7a6-cdbde5c28c7a" containerName="libvirt-openstack-openstack-cell1" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.131105 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb1df5af-eef3-4367-aa86-a48187caf2f3" containerName="registry-server" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.131125 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ffc5a08-00af-4407-8e27-881e5ac48b51" containerName="collect-profiles" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.132385 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.137729 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.138945 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-jnkbm" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.139066 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.138945 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-cells-global-config" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.139035 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.140008 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.142022 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.150343 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-openstack-cell1-vmfjt"] Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.245423 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-ceph\") pod \"nova-cell1-openstack-openstack-cell1-vmfjt\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.245483 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-cells-global-config-1\") pod \"nova-cell1-openstack-openstack-cell1-vmfjt\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.245553 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-inventory\") pod \"nova-cell1-openstack-openstack-cell1-vmfjt\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.245621 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-openstack-cell1-vmfjt\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.245802 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-openstack-cell1-vmfjt\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.245858 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-openstack-cell1-vmfjt\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.246135 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-openstack-cell1-vmfjt\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.246202 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5z7zw\" (UniqueName: \"kubernetes.io/projected/a8268ae2-f26c-4829-9552-75e2645d7ffa-kube-api-access-5z7zw\") pod \"nova-cell1-openstack-openstack-cell1-vmfjt\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.246259 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-ssh-key\") pod \"nova-cell1-openstack-openstack-cell1-vmfjt\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.246392 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-openstack-cell1-vmfjt\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.246628 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-cells-global-config-0\") pod \"nova-cell1-openstack-openstack-cell1-vmfjt\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.349315 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-openstack-cell1-vmfjt\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.349906 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-openstack-cell1-vmfjt\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.350135 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-openstack-cell1-vmfjt\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.350253 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5z7zw\" (UniqueName: \"kubernetes.io/projected/a8268ae2-f26c-4829-9552-75e2645d7ffa-kube-api-access-5z7zw\") pod \"nova-cell1-openstack-openstack-cell1-vmfjt\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.350382 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-ssh-key\") pod \"nova-cell1-openstack-openstack-cell1-vmfjt\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.350544 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-openstack-cell1-vmfjt\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.350785 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-cells-global-config-0\") pod \"nova-cell1-openstack-openstack-cell1-vmfjt\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.350977 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-ceph\") pod \"nova-cell1-openstack-openstack-cell1-vmfjt\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.351102 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-cells-global-config-1\") pod \"nova-cell1-openstack-openstack-cell1-vmfjt\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.351238 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-inventory\") pod \"nova-cell1-openstack-openstack-cell1-vmfjt\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.351375 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-openstack-cell1-vmfjt\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.352457 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-cells-global-config-1\") pod \"nova-cell1-openstack-openstack-cell1-vmfjt\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.354693 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-cells-global-config-0\") pod \"nova-cell1-openstack-openstack-cell1-vmfjt\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.356513 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-openstack-cell1-vmfjt\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.357384 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-openstack-cell1-vmfjt\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.357816 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-inventory\") pod \"nova-cell1-openstack-openstack-cell1-vmfjt\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.358886 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-ceph\") pod \"nova-cell1-openstack-openstack-cell1-vmfjt\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.359100 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-openstack-cell1-vmfjt\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.359267 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-openstack-cell1-vmfjt\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.359503 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-ssh-key\") pod \"nova-cell1-openstack-openstack-cell1-vmfjt\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.361234 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-openstack-cell1-vmfjt\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.373239 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5z7zw\" (UniqueName: \"kubernetes.io/projected/a8268ae2-f26c-4829-9552-75e2645d7ffa-kube-api-access-5z7zw\") pod \"nova-cell1-openstack-openstack-cell1-vmfjt\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" Nov 27 09:30:53 crc kubenswrapper[4971]: I1127 09:30:53.458525 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" Nov 27 09:30:54 crc kubenswrapper[4971]: I1127 09:30:54.054242 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-openstack-cell1-vmfjt"] Nov 27 09:30:54 crc kubenswrapper[4971]: I1127 09:30:54.987724 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" event={"ID":"a8268ae2-f26c-4829-9552-75e2645d7ffa","Type":"ContainerStarted","Data":"181a0201fbbfade40115745ec2b5a10ae39ca157ff36e2d98d99c7cf07f366ef"} Nov 27 09:30:56 crc kubenswrapper[4971]: I1127 09:30:56.000954 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" event={"ID":"a8268ae2-f26c-4829-9552-75e2645d7ffa","Type":"ContainerStarted","Data":"92b7b4079e3fbabf71f119081483af9fb84631e658a215a23b74ad6498a1986a"} Nov 27 09:30:56 crc kubenswrapper[4971]: I1127 09:30:56.035160 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" podStartSLOduration=2.217134015 podStartE2EDuration="3.035134015s" podCreationTimestamp="2025-11-27 09:30:53 +0000 UTC" firstStartedPulling="2025-11-27 09:30:54.060683824 +0000 UTC m=+9492.252727752" lastFinishedPulling="2025-11-27 09:30:54.878683834 +0000 UTC m=+9493.070727752" observedRunningTime="2025-11-27 09:30:56.031694288 +0000 UTC m=+9494.223738206" watchObservedRunningTime="2025-11-27 09:30:56.035134015 +0000 UTC m=+9494.227177933" Nov 27 09:31:00 crc kubenswrapper[4971]: I1127 09:31:00.021471 4971 scope.go:117] "RemoveContainer" containerID="3668981b28bec3efabb0d7b967d4cdfb5e13ee8e721c4248037eb7cbffa352d0" Nov 27 09:32:26 crc kubenswrapper[4971]: I1127 09:32:26.413246 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 09:32:26 crc kubenswrapper[4971]: I1127 09:32:26.414003 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 09:32:56 crc kubenswrapper[4971]: I1127 09:32:56.413145 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 09:32:56 crc kubenswrapper[4971]: I1127 09:32:56.414192 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 09:33:26 crc kubenswrapper[4971]: I1127 09:33:26.412933 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 09:33:26 crc kubenswrapper[4971]: I1127 09:33:26.413857 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 09:33:26 crc kubenswrapper[4971]: I1127 09:33:26.413922 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 09:33:26 crc kubenswrapper[4971]: I1127 09:33:26.415165 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5ffc606bf634b42b1e0a76f2a24b1cf5412e73d845ab4a6a20fd731e6c6294ae"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 09:33:26 crc kubenswrapper[4971]: I1127 09:33:26.415253 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://5ffc606bf634b42b1e0a76f2a24b1cf5412e73d845ab4a6a20fd731e6c6294ae" gracePeriod=600 Nov 27 09:33:26 crc kubenswrapper[4971]: I1127 09:33:26.986124 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="5ffc606bf634b42b1e0a76f2a24b1cf5412e73d845ab4a6a20fd731e6c6294ae" exitCode=0 Nov 27 09:33:26 crc kubenswrapper[4971]: I1127 09:33:26.986206 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"5ffc606bf634b42b1e0a76f2a24b1cf5412e73d845ab4a6a20fd731e6c6294ae"} Nov 27 09:33:26 crc kubenswrapper[4971]: I1127 09:33:26.986894 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"5fce088764c403e267f5aed900d4424b9d5037db83a894ca8739f5f5af0bea73"} Nov 27 09:33:26 crc kubenswrapper[4971]: I1127 09:33:26.986959 4971 scope.go:117] "RemoveContainer" containerID="34cb8bf867d08698d2d333be44c85498b1aa2a1b0a6e242c6c9357ab60f2fb63" Nov 27 09:34:36 crc kubenswrapper[4971]: I1127 09:34:36.827404 4971 generic.go:334] "Generic (PLEG): container finished" podID="a8268ae2-f26c-4829-9552-75e2645d7ffa" containerID="92b7b4079e3fbabf71f119081483af9fb84631e658a215a23b74ad6498a1986a" exitCode=0 Nov 27 09:34:36 crc kubenswrapper[4971]: I1127 09:34:36.827555 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" event={"ID":"a8268ae2-f26c-4829-9552-75e2645d7ffa","Type":"ContainerDied","Data":"92b7b4079e3fbabf71f119081483af9fb84631e658a215a23b74ad6498a1986a"} Nov 27 09:34:38 crc kubenswrapper[4971]: I1127 09:34:38.354503 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" Nov 27 09:34:38 crc kubenswrapper[4971]: I1127 09:34:38.463901 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-cell1-combined-ca-bundle\") pod \"a8268ae2-f26c-4829-9552-75e2645d7ffa\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " Nov 27 09:34:38 crc kubenswrapper[4971]: I1127 09:34:38.463999 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-ssh-key\") pod \"a8268ae2-f26c-4829-9552-75e2645d7ffa\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " Nov 27 09:34:38 crc kubenswrapper[4971]: I1127 09:34:38.464032 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-migration-ssh-key-1\") pod \"a8268ae2-f26c-4829-9552-75e2645d7ffa\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " Nov 27 09:34:38 crc kubenswrapper[4971]: I1127 09:34:38.464073 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-cells-global-config-1\") pod \"a8268ae2-f26c-4829-9552-75e2645d7ffa\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " Nov 27 09:34:38 crc kubenswrapper[4971]: I1127 09:34:38.464185 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-cells-global-config-0\") pod \"a8268ae2-f26c-4829-9552-75e2645d7ffa\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " Nov 27 09:34:38 crc kubenswrapper[4971]: I1127 09:34:38.466360 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-inventory\") pod \"a8268ae2-f26c-4829-9552-75e2645d7ffa\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " Nov 27 09:34:38 crc kubenswrapper[4971]: I1127 09:34:38.466804 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5z7zw\" (UniqueName: \"kubernetes.io/projected/a8268ae2-f26c-4829-9552-75e2645d7ffa-kube-api-access-5z7zw\") pod \"a8268ae2-f26c-4829-9552-75e2645d7ffa\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " Nov 27 09:34:38 crc kubenswrapper[4971]: I1127 09:34:38.466887 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-cell1-compute-config-0\") pod \"a8268ae2-f26c-4829-9552-75e2645d7ffa\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " Nov 27 09:34:38 crc kubenswrapper[4971]: I1127 09:34:38.466959 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-cell1-compute-config-1\") pod \"a8268ae2-f26c-4829-9552-75e2645d7ffa\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " Nov 27 09:34:38 crc kubenswrapper[4971]: I1127 09:34:38.467386 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-ceph\") pod \"a8268ae2-f26c-4829-9552-75e2645d7ffa\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " Nov 27 09:34:38 crc kubenswrapper[4971]: I1127 09:34:38.467475 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-migration-ssh-key-0\") pod \"a8268ae2-f26c-4829-9552-75e2645d7ffa\" (UID: \"a8268ae2-f26c-4829-9552-75e2645d7ffa\") " Nov 27 09:34:38 crc kubenswrapper[4971]: I1127 09:34:38.470744 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-cell1-combined-ca-bundle" (OuterVolumeSpecName: "nova-cell1-combined-ca-bundle") pod "a8268ae2-f26c-4829-9552-75e2645d7ffa" (UID: "a8268ae2-f26c-4829-9552-75e2645d7ffa"). InnerVolumeSpecName "nova-cell1-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:34:38 crc kubenswrapper[4971]: I1127 09:34:38.471523 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8268ae2-f26c-4829-9552-75e2645d7ffa-kube-api-access-5z7zw" (OuterVolumeSpecName: "kube-api-access-5z7zw") pod "a8268ae2-f26c-4829-9552-75e2645d7ffa" (UID: "a8268ae2-f26c-4829-9552-75e2645d7ffa"). InnerVolumeSpecName "kube-api-access-5z7zw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:34:38 crc kubenswrapper[4971]: I1127 09:34:38.485650 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-ceph" (OuterVolumeSpecName: "ceph") pod "a8268ae2-f26c-4829-9552-75e2645d7ffa" (UID: "a8268ae2-f26c-4829-9552-75e2645d7ffa"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:34:38 crc kubenswrapper[4971]: I1127 09:34:38.500799 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a8268ae2-f26c-4829-9552-75e2645d7ffa" (UID: "a8268ae2-f26c-4829-9552-75e2645d7ffa"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:34:38 crc kubenswrapper[4971]: I1127 09:34:38.509032 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "a8268ae2-f26c-4829-9552-75e2645d7ffa" (UID: "a8268ae2-f26c-4829-9552-75e2645d7ffa"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:34:38 crc kubenswrapper[4971]: I1127 09:34:38.509841 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-cells-global-config-1" (OuterVolumeSpecName: "nova-cells-global-config-1") pod "a8268ae2-f26c-4829-9552-75e2645d7ffa" (UID: "a8268ae2-f26c-4829-9552-75e2645d7ffa"). InnerVolumeSpecName "nova-cells-global-config-1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:34:38 crc kubenswrapper[4971]: I1127 09:34:38.511288 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-cells-global-config-0" (OuterVolumeSpecName: "nova-cells-global-config-0") pod "a8268ae2-f26c-4829-9552-75e2645d7ffa" (UID: "a8268ae2-f26c-4829-9552-75e2645d7ffa"). InnerVolumeSpecName "nova-cells-global-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:34:38 crc kubenswrapper[4971]: I1127 09:34:38.514142 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "a8268ae2-f26c-4829-9552-75e2645d7ffa" (UID: "a8268ae2-f26c-4829-9552-75e2645d7ffa"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:34:38 crc kubenswrapper[4971]: I1127 09:34:38.515780 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "a8268ae2-f26c-4829-9552-75e2645d7ffa" (UID: "a8268ae2-f26c-4829-9552-75e2645d7ffa"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:34:38 crc kubenswrapper[4971]: I1127 09:34:38.516003 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-inventory" (OuterVolumeSpecName: "inventory") pod "a8268ae2-f26c-4829-9552-75e2645d7ffa" (UID: "a8268ae2-f26c-4829-9552-75e2645d7ffa"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:34:38 crc kubenswrapper[4971]: I1127 09:34:38.517570 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "a8268ae2-f26c-4829-9552-75e2645d7ffa" (UID: "a8268ae2-f26c-4829-9552-75e2645d7ffa"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:34:38 crc kubenswrapper[4971]: I1127 09:34:38.571476 4971 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-cell1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:34:38 crc kubenswrapper[4971]: I1127 09:34:38.571519 4971 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 27 09:34:38 crc kubenswrapper[4971]: I1127 09:34:38.571538 4971 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Nov 27 09:34:38 crc kubenswrapper[4971]: I1127 09:34:38.571551 4971 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-cells-global-config-1\") on node \"crc\" DevicePath \"\"" Nov 27 09:34:38 crc kubenswrapper[4971]: I1127 09:34:38.571563 4971 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-cells-global-config-0\") on node \"crc\" DevicePath \"\"" Nov 27 09:34:38 crc kubenswrapper[4971]: I1127 09:34:38.571578 4971 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-inventory\") on node \"crc\" DevicePath \"\"" Nov 27 09:34:38 crc kubenswrapper[4971]: I1127 09:34:38.571605 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5z7zw\" (UniqueName: \"kubernetes.io/projected/a8268ae2-f26c-4829-9552-75e2645d7ffa-kube-api-access-5z7zw\") on node \"crc\" DevicePath \"\"" Nov 27 09:34:38 crc kubenswrapper[4971]: I1127 09:34:38.571617 4971 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Nov 27 09:34:38 crc kubenswrapper[4971]: I1127 09:34:38.571628 4971 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Nov 27 09:34:38 crc kubenswrapper[4971]: I1127 09:34:38.571639 4971 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-ceph\") on node \"crc\" DevicePath \"\"" Nov 27 09:34:38 crc kubenswrapper[4971]: I1127 09:34:38.571654 4971 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a8268ae2-f26c-4829-9552-75e2645d7ffa-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Nov 27 09:34:38 crc kubenswrapper[4971]: I1127 09:34:38.854679 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" event={"ID":"a8268ae2-f26c-4829-9552-75e2645d7ffa","Type":"ContainerDied","Data":"181a0201fbbfade40115745ec2b5a10ae39ca157ff36e2d98d99c7cf07f366ef"} Nov 27 09:34:38 crc kubenswrapper[4971]: I1127 09:34:38.854760 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="181a0201fbbfade40115745ec2b5a10ae39ca157ff36e2d98d99c7cf07f366ef" Nov 27 09:34:38 crc kubenswrapper[4971]: I1127 09:34:38.854899 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-vmfjt" Nov 27 09:34:39 crc kubenswrapper[4971]: I1127 09:34:39.049938 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-openstack-openstack-cell1-xbpj6"] Nov 27 09:34:39 crc kubenswrapper[4971]: E1127 09:34:39.050648 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8268ae2-f26c-4829-9552-75e2645d7ffa" containerName="nova-cell1-openstack-openstack-cell1" Nov 27 09:34:39 crc kubenswrapper[4971]: I1127 09:34:39.050674 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8268ae2-f26c-4829-9552-75e2645d7ffa" containerName="nova-cell1-openstack-openstack-cell1" Nov 27 09:34:39 crc kubenswrapper[4971]: I1127 09:34:39.051014 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8268ae2-f26c-4829-9552-75e2645d7ffa" containerName="nova-cell1-openstack-openstack-cell1" Nov 27 09:34:39 crc kubenswrapper[4971]: I1127 09:34:39.053869 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-xbpj6" Nov 27 09:34:39 crc kubenswrapper[4971]: I1127 09:34:39.057389 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Nov 27 09:34:39 crc kubenswrapper[4971]: I1127 09:34:39.057819 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-jnkbm" Nov 27 09:34:39 crc kubenswrapper[4971]: I1127 09:34:39.057885 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 27 09:34:39 crc kubenswrapper[4971]: I1127 09:34:39.058510 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 27 09:34:39 crc kubenswrapper[4971]: I1127 09:34:39.059047 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 27 09:34:39 crc kubenswrapper[4971]: I1127 09:34:39.083551 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-openstack-openstack-cell1-xbpj6"] Nov 27 09:34:39 crc kubenswrapper[4971]: I1127 09:34:39.188172 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-telemetry-combined-ca-bundle\") pod \"telemetry-openstack-openstack-cell1-xbpj6\" (UID: \"35017a9d-382f-4e37-9652-77e0a5aec05e\") " pod="openstack/telemetry-openstack-openstack-cell1-xbpj6" Nov 27 09:34:39 crc kubenswrapper[4971]: I1127 09:34:39.188650 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-inventory\") pod \"telemetry-openstack-openstack-cell1-xbpj6\" (UID: \"35017a9d-382f-4e37-9652-77e0a5aec05e\") " pod="openstack/telemetry-openstack-openstack-cell1-xbpj6" Nov 27 09:34:39 crc kubenswrapper[4971]: I1127 09:34:39.188685 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8rhm6\" (UniqueName: \"kubernetes.io/projected/35017a9d-382f-4e37-9652-77e0a5aec05e-kube-api-access-8rhm6\") pod \"telemetry-openstack-openstack-cell1-xbpj6\" (UID: \"35017a9d-382f-4e37-9652-77e0a5aec05e\") " pod="openstack/telemetry-openstack-openstack-cell1-xbpj6" Nov 27 09:34:39 crc kubenswrapper[4971]: I1127 09:34:39.188777 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-ssh-key\") pod \"telemetry-openstack-openstack-cell1-xbpj6\" (UID: \"35017a9d-382f-4e37-9652-77e0a5aec05e\") " pod="openstack/telemetry-openstack-openstack-cell1-xbpj6" Nov 27 09:34:39 crc kubenswrapper[4971]: I1127 09:34:39.188823 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-ceilometer-compute-config-data-1\") pod \"telemetry-openstack-openstack-cell1-xbpj6\" (UID: \"35017a9d-382f-4e37-9652-77e0a5aec05e\") " pod="openstack/telemetry-openstack-openstack-cell1-xbpj6" Nov 27 09:34:39 crc kubenswrapper[4971]: I1127 09:34:39.188857 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-ceph\") pod \"telemetry-openstack-openstack-cell1-xbpj6\" (UID: \"35017a9d-382f-4e37-9652-77e0a5aec05e\") " pod="openstack/telemetry-openstack-openstack-cell1-xbpj6" Nov 27 09:34:39 crc kubenswrapper[4971]: I1127 09:34:39.188964 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-ceilometer-compute-config-data-2\") pod \"telemetry-openstack-openstack-cell1-xbpj6\" (UID: \"35017a9d-382f-4e37-9652-77e0a5aec05e\") " pod="openstack/telemetry-openstack-openstack-cell1-xbpj6" Nov 27 09:34:39 crc kubenswrapper[4971]: I1127 09:34:39.189067 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-ceilometer-compute-config-data-0\") pod \"telemetry-openstack-openstack-cell1-xbpj6\" (UID: \"35017a9d-382f-4e37-9652-77e0a5aec05e\") " pod="openstack/telemetry-openstack-openstack-cell1-xbpj6" Nov 27 09:34:39 crc kubenswrapper[4971]: I1127 09:34:39.291679 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-ssh-key\") pod \"telemetry-openstack-openstack-cell1-xbpj6\" (UID: \"35017a9d-382f-4e37-9652-77e0a5aec05e\") " pod="openstack/telemetry-openstack-openstack-cell1-xbpj6" Nov 27 09:34:39 crc kubenswrapper[4971]: I1127 09:34:39.291764 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-ceilometer-compute-config-data-1\") pod \"telemetry-openstack-openstack-cell1-xbpj6\" (UID: \"35017a9d-382f-4e37-9652-77e0a5aec05e\") " pod="openstack/telemetry-openstack-openstack-cell1-xbpj6" Nov 27 09:34:39 crc kubenswrapper[4971]: I1127 09:34:39.291809 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-ceph\") pod \"telemetry-openstack-openstack-cell1-xbpj6\" (UID: \"35017a9d-382f-4e37-9652-77e0a5aec05e\") " pod="openstack/telemetry-openstack-openstack-cell1-xbpj6" Nov 27 09:34:39 crc kubenswrapper[4971]: I1127 09:34:39.291859 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-ceilometer-compute-config-data-2\") pod \"telemetry-openstack-openstack-cell1-xbpj6\" (UID: \"35017a9d-382f-4e37-9652-77e0a5aec05e\") " pod="openstack/telemetry-openstack-openstack-cell1-xbpj6" Nov 27 09:34:39 crc kubenswrapper[4971]: I1127 09:34:39.291920 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-ceilometer-compute-config-data-0\") pod \"telemetry-openstack-openstack-cell1-xbpj6\" (UID: \"35017a9d-382f-4e37-9652-77e0a5aec05e\") " pod="openstack/telemetry-openstack-openstack-cell1-xbpj6" Nov 27 09:34:39 crc kubenswrapper[4971]: I1127 09:34:39.292844 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-telemetry-combined-ca-bundle\") pod \"telemetry-openstack-openstack-cell1-xbpj6\" (UID: \"35017a9d-382f-4e37-9652-77e0a5aec05e\") " pod="openstack/telemetry-openstack-openstack-cell1-xbpj6" Nov 27 09:34:39 crc kubenswrapper[4971]: I1127 09:34:39.292908 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-inventory\") pod \"telemetry-openstack-openstack-cell1-xbpj6\" (UID: \"35017a9d-382f-4e37-9652-77e0a5aec05e\") " pod="openstack/telemetry-openstack-openstack-cell1-xbpj6" Nov 27 09:34:39 crc kubenswrapper[4971]: I1127 09:34:39.292942 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8rhm6\" (UniqueName: \"kubernetes.io/projected/35017a9d-382f-4e37-9652-77e0a5aec05e-kube-api-access-8rhm6\") pod \"telemetry-openstack-openstack-cell1-xbpj6\" (UID: \"35017a9d-382f-4e37-9652-77e0a5aec05e\") " pod="openstack/telemetry-openstack-openstack-cell1-xbpj6" Nov 27 09:34:39 crc kubenswrapper[4971]: I1127 09:34:39.296286 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-ceilometer-compute-config-data-0\") pod \"telemetry-openstack-openstack-cell1-xbpj6\" (UID: \"35017a9d-382f-4e37-9652-77e0a5aec05e\") " pod="openstack/telemetry-openstack-openstack-cell1-xbpj6" Nov 27 09:34:39 crc kubenswrapper[4971]: I1127 09:34:39.296421 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-ssh-key\") pod \"telemetry-openstack-openstack-cell1-xbpj6\" (UID: \"35017a9d-382f-4e37-9652-77e0a5aec05e\") " pod="openstack/telemetry-openstack-openstack-cell1-xbpj6" Nov 27 09:34:39 crc kubenswrapper[4971]: I1127 09:34:39.297608 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-ceph\") pod \"telemetry-openstack-openstack-cell1-xbpj6\" (UID: \"35017a9d-382f-4e37-9652-77e0a5aec05e\") " pod="openstack/telemetry-openstack-openstack-cell1-xbpj6" Nov 27 09:34:39 crc kubenswrapper[4971]: I1127 09:34:39.297722 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-telemetry-combined-ca-bundle\") pod \"telemetry-openstack-openstack-cell1-xbpj6\" (UID: \"35017a9d-382f-4e37-9652-77e0a5aec05e\") " pod="openstack/telemetry-openstack-openstack-cell1-xbpj6" Nov 27 09:34:39 crc kubenswrapper[4971]: I1127 09:34:39.298286 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-ceilometer-compute-config-data-2\") pod \"telemetry-openstack-openstack-cell1-xbpj6\" (UID: \"35017a9d-382f-4e37-9652-77e0a5aec05e\") " pod="openstack/telemetry-openstack-openstack-cell1-xbpj6" Nov 27 09:34:39 crc kubenswrapper[4971]: I1127 09:34:39.299099 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-inventory\") pod \"telemetry-openstack-openstack-cell1-xbpj6\" (UID: \"35017a9d-382f-4e37-9652-77e0a5aec05e\") " pod="openstack/telemetry-openstack-openstack-cell1-xbpj6" Nov 27 09:34:39 crc kubenswrapper[4971]: I1127 09:34:39.299277 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-ceilometer-compute-config-data-1\") pod \"telemetry-openstack-openstack-cell1-xbpj6\" (UID: \"35017a9d-382f-4e37-9652-77e0a5aec05e\") " pod="openstack/telemetry-openstack-openstack-cell1-xbpj6" Nov 27 09:34:39 crc kubenswrapper[4971]: I1127 09:34:39.312451 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8rhm6\" (UniqueName: \"kubernetes.io/projected/35017a9d-382f-4e37-9652-77e0a5aec05e-kube-api-access-8rhm6\") pod \"telemetry-openstack-openstack-cell1-xbpj6\" (UID: \"35017a9d-382f-4e37-9652-77e0a5aec05e\") " pod="openstack/telemetry-openstack-openstack-cell1-xbpj6" Nov 27 09:34:39 crc kubenswrapper[4971]: I1127 09:34:39.388324 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-xbpj6" Nov 27 09:34:39 crc kubenswrapper[4971]: I1127 09:34:39.972056 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-openstack-openstack-cell1-xbpj6"] Nov 27 09:34:40 crc kubenswrapper[4971]: I1127 09:34:40.884733 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-xbpj6" event={"ID":"35017a9d-382f-4e37-9652-77e0a5aec05e","Type":"ContainerStarted","Data":"83431857a82ac6403e69e9148a0f1517f6a8bf543b182bc8f49b8af8d21cfba1"} Nov 27 09:34:42 crc kubenswrapper[4971]: I1127 09:34:42.921934 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-xbpj6" event={"ID":"35017a9d-382f-4e37-9652-77e0a5aec05e","Type":"ContainerStarted","Data":"972fcaa0f61242c3ff448f5586646da5890fba5bf99c47a6caf83e3e9e0f8f4f"} Nov 27 09:34:42 crc kubenswrapper[4971]: I1127 09:34:42.951313 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-openstack-openstack-cell1-xbpj6" podStartSLOduration=3.377009581 podStartE2EDuration="4.9512798s" podCreationTimestamp="2025-11-27 09:34:38 +0000 UTC" firstStartedPulling="2025-11-27 09:34:40.15876123 +0000 UTC m=+9718.350805158" lastFinishedPulling="2025-11-27 09:34:41.733031459 +0000 UTC m=+9719.925075377" observedRunningTime="2025-11-27 09:34:42.944816205 +0000 UTC m=+9721.136860143" watchObservedRunningTime="2025-11-27 09:34:42.9512798 +0000 UTC m=+9721.143323738" Nov 27 09:35:26 crc kubenswrapper[4971]: I1127 09:35:26.413098 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 09:35:26 crc kubenswrapper[4971]: I1127 09:35:26.414033 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 09:35:34 crc kubenswrapper[4971]: I1127 09:35:34.762668 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2m25z"] Nov 27 09:35:34 crc kubenswrapper[4971]: I1127 09:35:34.766235 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2m25z" Nov 27 09:35:34 crc kubenswrapper[4971]: I1127 09:35:34.777749 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2m25z"] Nov 27 09:35:34 crc kubenswrapper[4971]: I1127 09:35:34.899677 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb2f9e79-c33f-434d-b113-1644d573345d-catalog-content\") pod \"community-operators-2m25z\" (UID: \"eb2f9e79-c33f-434d-b113-1644d573345d\") " pod="openshift-marketplace/community-operators-2m25z" Nov 27 09:35:34 crc kubenswrapper[4971]: I1127 09:35:34.900240 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb2f9e79-c33f-434d-b113-1644d573345d-utilities\") pod \"community-operators-2m25z\" (UID: \"eb2f9e79-c33f-434d-b113-1644d573345d\") " pod="openshift-marketplace/community-operators-2m25z" Nov 27 09:35:34 crc kubenswrapper[4971]: I1127 09:35:34.900402 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dgnhx\" (UniqueName: \"kubernetes.io/projected/eb2f9e79-c33f-434d-b113-1644d573345d-kube-api-access-dgnhx\") pod \"community-operators-2m25z\" (UID: \"eb2f9e79-c33f-434d-b113-1644d573345d\") " pod="openshift-marketplace/community-operators-2m25z" Nov 27 09:35:35 crc kubenswrapper[4971]: I1127 09:35:35.002883 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb2f9e79-c33f-434d-b113-1644d573345d-catalog-content\") pod \"community-operators-2m25z\" (UID: \"eb2f9e79-c33f-434d-b113-1644d573345d\") " pod="openshift-marketplace/community-operators-2m25z" Nov 27 09:35:35 crc kubenswrapper[4971]: I1127 09:35:35.002966 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb2f9e79-c33f-434d-b113-1644d573345d-utilities\") pod \"community-operators-2m25z\" (UID: \"eb2f9e79-c33f-434d-b113-1644d573345d\") " pod="openshift-marketplace/community-operators-2m25z" Nov 27 09:35:35 crc kubenswrapper[4971]: I1127 09:35:35.003013 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dgnhx\" (UniqueName: \"kubernetes.io/projected/eb2f9e79-c33f-434d-b113-1644d573345d-kube-api-access-dgnhx\") pod \"community-operators-2m25z\" (UID: \"eb2f9e79-c33f-434d-b113-1644d573345d\") " pod="openshift-marketplace/community-operators-2m25z" Nov 27 09:35:35 crc kubenswrapper[4971]: I1127 09:35:35.003798 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb2f9e79-c33f-434d-b113-1644d573345d-catalog-content\") pod \"community-operators-2m25z\" (UID: \"eb2f9e79-c33f-434d-b113-1644d573345d\") " pod="openshift-marketplace/community-operators-2m25z" Nov 27 09:35:35 crc kubenswrapper[4971]: I1127 09:35:35.003949 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb2f9e79-c33f-434d-b113-1644d573345d-utilities\") pod \"community-operators-2m25z\" (UID: \"eb2f9e79-c33f-434d-b113-1644d573345d\") " pod="openshift-marketplace/community-operators-2m25z" Nov 27 09:35:35 crc kubenswrapper[4971]: I1127 09:35:35.034078 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dgnhx\" (UniqueName: \"kubernetes.io/projected/eb2f9e79-c33f-434d-b113-1644d573345d-kube-api-access-dgnhx\") pod \"community-operators-2m25z\" (UID: \"eb2f9e79-c33f-434d-b113-1644d573345d\") " pod="openshift-marketplace/community-operators-2m25z" Nov 27 09:35:35 crc kubenswrapper[4971]: I1127 09:35:35.093903 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2m25z" Nov 27 09:35:35 crc kubenswrapper[4971]: I1127 09:35:35.724221 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2m25z"] Nov 27 09:35:36 crc kubenswrapper[4971]: I1127 09:35:36.565561 4971 generic.go:334] "Generic (PLEG): container finished" podID="eb2f9e79-c33f-434d-b113-1644d573345d" containerID="7734e433acc4d7b5347cfe20d3b58e05806c6f90175d230958376d0ba8e7727d" exitCode=0 Nov 27 09:35:36 crc kubenswrapper[4971]: I1127 09:35:36.565606 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2m25z" event={"ID":"eb2f9e79-c33f-434d-b113-1644d573345d","Type":"ContainerDied","Data":"7734e433acc4d7b5347cfe20d3b58e05806c6f90175d230958376d0ba8e7727d"} Nov 27 09:35:36 crc kubenswrapper[4971]: I1127 09:35:36.566029 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2m25z" event={"ID":"eb2f9e79-c33f-434d-b113-1644d573345d","Type":"ContainerStarted","Data":"fde703589fc74260c7c6a467e4930c2f2e7c2766cf0e0cd6d9f63533156e3d15"} Nov 27 09:35:36 crc kubenswrapper[4971]: I1127 09:35:36.572654 4971 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 27 09:35:37 crc kubenswrapper[4971]: I1127 09:35:37.579622 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2m25z" event={"ID":"eb2f9e79-c33f-434d-b113-1644d573345d","Type":"ContainerStarted","Data":"6711169737fa4cb07f59324add83a1d09c9c34527f29e0fc7140098fc52d4117"} Nov 27 09:35:38 crc kubenswrapper[4971]: I1127 09:35:38.591034 4971 generic.go:334] "Generic (PLEG): container finished" podID="eb2f9e79-c33f-434d-b113-1644d573345d" containerID="6711169737fa4cb07f59324add83a1d09c9c34527f29e0fc7140098fc52d4117" exitCode=0 Nov 27 09:35:38 crc kubenswrapper[4971]: I1127 09:35:38.591094 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2m25z" event={"ID":"eb2f9e79-c33f-434d-b113-1644d573345d","Type":"ContainerDied","Data":"6711169737fa4cb07f59324add83a1d09c9c34527f29e0fc7140098fc52d4117"} Nov 27 09:35:39 crc kubenswrapper[4971]: I1127 09:35:39.606129 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2m25z" event={"ID":"eb2f9e79-c33f-434d-b113-1644d573345d","Type":"ContainerStarted","Data":"ab35ed65001be3e0ddee62372d0f23b08692c8e250a4bbefaa39d790ec0f9145"} Nov 27 09:35:39 crc kubenswrapper[4971]: I1127 09:35:39.624311 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2m25z" podStartSLOduration=3.021201099 podStartE2EDuration="5.624290726s" podCreationTimestamp="2025-11-27 09:35:34 +0000 UTC" firstStartedPulling="2025-11-27 09:35:36.572319549 +0000 UTC m=+9774.764363467" lastFinishedPulling="2025-11-27 09:35:39.175409176 +0000 UTC m=+9777.367453094" observedRunningTime="2025-11-27 09:35:39.623085001 +0000 UTC m=+9777.815128949" watchObservedRunningTime="2025-11-27 09:35:39.624290726 +0000 UTC m=+9777.816334644" Nov 27 09:35:45 crc kubenswrapper[4971]: I1127 09:35:45.094864 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2m25z" Nov 27 09:35:45 crc kubenswrapper[4971]: I1127 09:35:45.095766 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2m25z" Nov 27 09:35:45 crc kubenswrapper[4971]: I1127 09:35:45.158551 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2m25z" Nov 27 09:35:45 crc kubenswrapper[4971]: I1127 09:35:45.757023 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2m25z" Nov 27 09:35:45 crc kubenswrapper[4971]: I1127 09:35:45.823495 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2m25z"] Nov 27 09:35:47 crc kubenswrapper[4971]: I1127 09:35:47.713582 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-2m25z" podUID="eb2f9e79-c33f-434d-b113-1644d573345d" containerName="registry-server" containerID="cri-o://ab35ed65001be3e0ddee62372d0f23b08692c8e250a4bbefaa39d790ec0f9145" gracePeriod=2 Nov 27 09:35:48 crc kubenswrapper[4971]: I1127 09:35:48.233682 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2m25z" Nov 27 09:35:48 crc kubenswrapper[4971]: I1127 09:35:48.381735 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb2f9e79-c33f-434d-b113-1644d573345d-catalog-content\") pod \"eb2f9e79-c33f-434d-b113-1644d573345d\" (UID: \"eb2f9e79-c33f-434d-b113-1644d573345d\") " Nov 27 09:35:48 crc kubenswrapper[4971]: I1127 09:35:48.381932 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dgnhx\" (UniqueName: \"kubernetes.io/projected/eb2f9e79-c33f-434d-b113-1644d573345d-kube-api-access-dgnhx\") pod \"eb2f9e79-c33f-434d-b113-1644d573345d\" (UID: \"eb2f9e79-c33f-434d-b113-1644d573345d\") " Nov 27 09:35:48 crc kubenswrapper[4971]: I1127 09:35:48.382109 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb2f9e79-c33f-434d-b113-1644d573345d-utilities\") pod \"eb2f9e79-c33f-434d-b113-1644d573345d\" (UID: \"eb2f9e79-c33f-434d-b113-1644d573345d\") " Nov 27 09:35:48 crc kubenswrapper[4971]: I1127 09:35:48.383163 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb2f9e79-c33f-434d-b113-1644d573345d-utilities" (OuterVolumeSpecName: "utilities") pod "eb2f9e79-c33f-434d-b113-1644d573345d" (UID: "eb2f9e79-c33f-434d-b113-1644d573345d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:35:48 crc kubenswrapper[4971]: I1127 09:35:48.384060 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb2f9e79-c33f-434d-b113-1644d573345d-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 09:35:48 crc kubenswrapper[4971]: I1127 09:35:48.391817 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb2f9e79-c33f-434d-b113-1644d573345d-kube-api-access-dgnhx" (OuterVolumeSpecName: "kube-api-access-dgnhx") pod "eb2f9e79-c33f-434d-b113-1644d573345d" (UID: "eb2f9e79-c33f-434d-b113-1644d573345d"). InnerVolumeSpecName "kube-api-access-dgnhx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:35:48 crc kubenswrapper[4971]: I1127 09:35:48.485638 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dgnhx\" (UniqueName: \"kubernetes.io/projected/eb2f9e79-c33f-434d-b113-1644d573345d-kube-api-access-dgnhx\") on node \"crc\" DevicePath \"\"" Nov 27 09:35:48 crc kubenswrapper[4971]: I1127 09:35:48.735037 4971 generic.go:334] "Generic (PLEG): container finished" podID="eb2f9e79-c33f-434d-b113-1644d573345d" containerID="ab35ed65001be3e0ddee62372d0f23b08692c8e250a4bbefaa39d790ec0f9145" exitCode=0 Nov 27 09:35:48 crc kubenswrapper[4971]: I1127 09:35:48.735211 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2m25z" event={"ID":"eb2f9e79-c33f-434d-b113-1644d573345d","Type":"ContainerDied","Data":"ab35ed65001be3e0ddee62372d0f23b08692c8e250a4bbefaa39d790ec0f9145"} Nov 27 09:35:48 crc kubenswrapper[4971]: I1127 09:35:48.736847 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2m25z" event={"ID":"eb2f9e79-c33f-434d-b113-1644d573345d","Type":"ContainerDied","Data":"fde703589fc74260c7c6a467e4930c2f2e7c2766cf0e0cd6d9f63533156e3d15"} Nov 27 09:35:48 crc kubenswrapper[4971]: I1127 09:35:48.736940 4971 scope.go:117] "RemoveContainer" containerID="ab35ed65001be3e0ddee62372d0f23b08692c8e250a4bbefaa39d790ec0f9145" Nov 27 09:35:48 crc kubenswrapper[4971]: I1127 09:35:48.735426 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2m25z" Nov 27 09:35:48 crc kubenswrapper[4971]: I1127 09:35:48.768043 4971 scope.go:117] "RemoveContainer" containerID="6711169737fa4cb07f59324add83a1d09c9c34527f29e0fc7140098fc52d4117" Nov 27 09:35:48 crc kubenswrapper[4971]: I1127 09:35:48.795077 4971 scope.go:117] "RemoveContainer" containerID="7734e433acc4d7b5347cfe20d3b58e05806c6f90175d230958376d0ba8e7727d" Nov 27 09:35:48 crc kubenswrapper[4971]: I1127 09:35:48.861960 4971 scope.go:117] "RemoveContainer" containerID="ab35ed65001be3e0ddee62372d0f23b08692c8e250a4bbefaa39d790ec0f9145" Nov 27 09:35:48 crc kubenswrapper[4971]: E1127 09:35:48.863073 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab35ed65001be3e0ddee62372d0f23b08692c8e250a4bbefaa39d790ec0f9145\": container with ID starting with ab35ed65001be3e0ddee62372d0f23b08692c8e250a4bbefaa39d790ec0f9145 not found: ID does not exist" containerID="ab35ed65001be3e0ddee62372d0f23b08692c8e250a4bbefaa39d790ec0f9145" Nov 27 09:35:48 crc kubenswrapper[4971]: I1127 09:35:48.863329 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab35ed65001be3e0ddee62372d0f23b08692c8e250a4bbefaa39d790ec0f9145"} err="failed to get container status \"ab35ed65001be3e0ddee62372d0f23b08692c8e250a4bbefaa39d790ec0f9145\": rpc error: code = NotFound desc = could not find container \"ab35ed65001be3e0ddee62372d0f23b08692c8e250a4bbefaa39d790ec0f9145\": container with ID starting with ab35ed65001be3e0ddee62372d0f23b08692c8e250a4bbefaa39d790ec0f9145 not found: ID does not exist" Nov 27 09:35:48 crc kubenswrapper[4971]: I1127 09:35:48.863464 4971 scope.go:117] "RemoveContainer" containerID="6711169737fa4cb07f59324add83a1d09c9c34527f29e0fc7140098fc52d4117" Nov 27 09:35:48 crc kubenswrapper[4971]: E1127 09:35:48.864103 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6711169737fa4cb07f59324add83a1d09c9c34527f29e0fc7140098fc52d4117\": container with ID starting with 6711169737fa4cb07f59324add83a1d09c9c34527f29e0fc7140098fc52d4117 not found: ID does not exist" containerID="6711169737fa4cb07f59324add83a1d09c9c34527f29e0fc7140098fc52d4117" Nov 27 09:35:48 crc kubenswrapper[4971]: I1127 09:35:48.864267 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6711169737fa4cb07f59324add83a1d09c9c34527f29e0fc7140098fc52d4117"} err="failed to get container status \"6711169737fa4cb07f59324add83a1d09c9c34527f29e0fc7140098fc52d4117\": rpc error: code = NotFound desc = could not find container \"6711169737fa4cb07f59324add83a1d09c9c34527f29e0fc7140098fc52d4117\": container with ID starting with 6711169737fa4cb07f59324add83a1d09c9c34527f29e0fc7140098fc52d4117 not found: ID does not exist" Nov 27 09:35:48 crc kubenswrapper[4971]: I1127 09:35:48.864368 4971 scope.go:117] "RemoveContainer" containerID="7734e433acc4d7b5347cfe20d3b58e05806c6f90175d230958376d0ba8e7727d" Nov 27 09:35:48 crc kubenswrapper[4971]: E1127 09:35:48.864947 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7734e433acc4d7b5347cfe20d3b58e05806c6f90175d230958376d0ba8e7727d\": container with ID starting with 7734e433acc4d7b5347cfe20d3b58e05806c6f90175d230958376d0ba8e7727d not found: ID does not exist" containerID="7734e433acc4d7b5347cfe20d3b58e05806c6f90175d230958376d0ba8e7727d" Nov 27 09:35:48 crc kubenswrapper[4971]: I1127 09:35:48.865012 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7734e433acc4d7b5347cfe20d3b58e05806c6f90175d230958376d0ba8e7727d"} err="failed to get container status \"7734e433acc4d7b5347cfe20d3b58e05806c6f90175d230958376d0ba8e7727d\": rpc error: code = NotFound desc = could not find container \"7734e433acc4d7b5347cfe20d3b58e05806c6f90175d230958376d0ba8e7727d\": container with ID starting with 7734e433acc4d7b5347cfe20d3b58e05806c6f90175d230958376d0ba8e7727d not found: ID does not exist" Nov 27 09:35:48 crc kubenswrapper[4971]: I1127 09:35:48.881728 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb2f9e79-c33f-434d-b113-1644d573345d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "eb2f9e79-c33f-434d-b113-1644d573345d" (UID: "eb2f9e79-c33f-434d-b113-1644d573345d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:35:48 crc kubenswrapper[4971]: I1127 09:35:48.899239 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb2f9e79-c33f-434d-b113-1644d573345d-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 09:35:49 crc kubenswrapper[4971]: I1127 09:35:49.085781 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2m25z"] Nov 27 09:35:49 crc kubenswrapper[4971]: I1127 09:35:49.100230 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-2m25z"] Nov 27 09:35:50 crc kubenswrapper[4971]: I1127 09:35:50.566948 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb2f9e79-c33f-434d-b113-1644d573345d" path="/var/lib/kubelet/pods/eb2f9e79-c33f-434d-b113-1644d573345d/volumes" Nov 27 09:35:56 crc kubenswrapper[4971]: I1127 09:35:56.413103 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 09:35:56 crc kubenswrapper[4971]: I1127 09:35:56.414108 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 09:36:26 crc kubenswrapper[4971]: I1127 09:36:26.412902 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 09:36:26 crc kubenswrapper[4971]: I1127 09:36:26.413778 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 09:36:26 crc kubenswrapper[4971]: I1127 09:36:26.413846 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 09:36:26 crc kubenswrapper[4971]: I1127 09:36:26.415291 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5fce088764c403e267f5aed900d4424b9d5037db83a894ca8739f5f5af0bea73"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 09:36:26 crc kubenswrapper[4971]: I1127 09:36:26.415377 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://5fce088764c403e267f5aed900d4424b9d5037db83a894ca8739f5f5af0bea73" gracePeriod=600 Nov 27 09:36:26 crc kubenswrapper[4971]: E1127 09:36:26.555630 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:36:27 crc kubenswrapper[4971]: I1127 09:36:27.310549 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="5fce088764c403e267f5aed900d4424b9d5037db83a894ca8739f5f5af0bea73" exitCode=0 Nov 27 09:36:27 crc kubenswrapper[4971]: I1127 09:36:27.310745 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"5fce088764c403e267f5aed900d4424b9d5037db83a894ca8739f5f5af0bea73"} Nov 27 09:36:27 crc kubenswrapper[4971]: I1127 09:36:27.310979 4971 scope.go:117] "RemoveContainer" containerID="5ffc606bf634b42b1e0a76f2a24b1cf5412e73d845ab4a6a20fd731e6c6294ae" Nov 27 09:36:27 crc kubenswrapper[4971]: I1127 09:36:27.312189 4971 scope.go:117] "RemoveContainer" containerID="5fce088764c403e267f5aed900d4424b9d5037db83a894ca8739f5f5af0bea73" Nov 27 09:36:27 crc kubenswrapper[4971]: E1127 09:36:27.312584 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:36:39 crc kubenswrapper[4971]: I1127 09:36:39.550990 4971 scope.go:117] "RemoveContainer" containerID="5fce088764c403e267f5aed900d4424b9d5037db83a894ca8739f5f5af0bea73" Nov 27 09:36:39 crc kubenswrapper[4971]: E1127 09:36:39.552246 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:36:51 crc kubenswrapper[4971]: I1127 09:36:51.550314 4971 scope.go:117] "RemoveContainer" containerID="5fce088764c403e267f5aed900d4424b9d5037db83a894ca8739f5f5af0bea73" Nov 27 09:36:51 crc kubenswrapper[4971]: E1127 09:36:51.551226 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:37:03 crc kubenswrapper[4971]: I1127 09:37:03.552197 4971 scope.go:117] "RemoveContainer" containerID="5fce088764c403e267f5aed900d4424b9d5037db83a894ca8739f5f5af0bea73" Nov 27 09:37:03 crc kubenswrapper[4971]: E1127 09:37:03.553022 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:37:16 crc kubenswrapper[4971]: I1127 09:37:16.551060 4971 scope.go:117] "RemoveContainer" containerID="5fce088764c403e267f5aed900d4424b9d5037db83a894ca8739f5f5af0bea73" Nov 27 09:37:16 crc kubenswrapper[4971]: E1127 09:37:16.552188 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:37:30 crc kubenswrapper[4971]: I1127 09:37:30.551440 4971 scope.go:117] "RemoveContainer" containerID="5fce088764c403e267f5aed900d4424b9d5037db83a894ca8739f5f5af0bea73" Nov 27 09:37:30 crc kubenswrapper[4971]: E1127 09:37:30.554128 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:37:44 crc kubenswrapper[4971]: I1127 09:37:44.551404 4971 scope.go:117] "RemoveContainer" containerID="5fce088764c403e267f5aed900d4424b9d5037db83a894ca8739f5f5af0bea73" Nov 27 09:37:44 crc kubenswrapper[4971]: E1127 09:37:44.552521 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:37:55 crc kubenswrapper[4971]: I1127 09:37:55.550455 4971 scope.go:117] "RemoveContainer" containerID="5fce088764c403e267f5aed900d4424b9d5037db83a894ca8739f5f5af0bea73" Nov 27 09:37:55 crc kubenswrapper[4971]: E1127 09:37:55.551772 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:38:07 crc kubenswrapper[4971]: I1127 09:38:07.550649 4971 scope.go:117] "RemoveContainer" containerID="5fce088764c403e267f5aed900d4424b9d5037db83a894ca8739f5f5af0bea73" Nov 27 09:38:07 crc kubenswrapper[4971]: E1127 09:38:07.551676 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:38:20 crc kubenswrapper[4971]: I1127 09:38:20.551137 4971 scope.go:117] "RemoveContainer" containerID="5fce088764c403e267f5aed900d4424b9d5037db83a894ca8739f5f5af0bea73" Nov 27 09:38:20 crc kubenswrapper[4971]: E1127 09:38:20.552176 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:38:32 crc kubenswrapper[4971]: I1127 09:38:32.568035 4971 scope.go:117] "RemoveContainer" containerID="5fce088764c403e267f5aed900d4424b9d5037db83a894ca8739f5f5af0bea73" Nov 27 09:38:32 crc kubenswrapper[4971]: E1127 09:38:32.569225 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:38:45 crc kubenswrapper[4971]: I1127 09:38:45.551798 4971 scope.go:117] "RemoveContainer" containerID="5fce088764c403e267f5aed900d4424b9d5037db83a894ca8739f5f5af0bea73" Nov 27 09:38:45 crc kubenswrapper[4971]: E1127 09:38:45.553243 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:39:00 crc kubenswrapper[4971]: I1127 09:39:00.550431 4971 scope.go:117] "RemoveContainer" containerID="5fce088764c403e267f5aed900d4424b9d5037db83a894ca8739f5f5af0bea73" Nov 27 09:39:00 crc kubenswrapper[4971]: E1127 09:39:00.551591 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:39:12 crc kubenswrapper[4971]: I1127 09:39:12.562672 4971 scope.go:117] "RemoveContainer" containerID="5fce088764c403e267f5aed900d4424b9d5037db83a894ca8739f5f5af0bea73" Nov 27 09:39:12 crc kubenswrapper[4971]: E1127 09:39:12.564093 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:39:12 crc kubenswrapper[4971]: I1127 09:39:12.873751 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-jq27t"] Nov 27 09:39:12 crc kubenswrapper[4971]: E1127 09:39:12.874821 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb2f9e79-c33f-434d-b113-1644d573345d" containerName="extract-utilities" Nov 27 09:39:12 crc kubenswrapper[4971]: I1127 09:39:12.874844 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb2f9e79-c33f-434d-b113-1644d573345d" containerName="extract-utilities" Nov 27 09:39:12 crc kubenswrapper[4971]: E1127 09:39:12.874890 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb2f9e79-c33f-434d-b113-1644d573345d" containerName="extract-content" Nov 27 09:39:12 crc kubenswrapper[4971]: I1127 09:39:12.874898 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb2f9e79-c33f-434d-b113-1644d573345d" containerName="extract-content" Nov 27 09:39:12 crc kubenswrapper[4971]: E1127 09:39:12.874918 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb2f9e79-c33f-434d-b113-1644d573345d" containerName="registry-server" Nov 27 09:39:12 crc kubenswrapper[4971]: I1127 09:39:12.874926 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb2f9e79-c33f-434d-b113-1644d573345d" containerName="registry-server" Nov 27 09:39:12 crc kubenswrapper[4971]: I1127 09:39:12.880688 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb2f9e79-c33f-434d-b113-1644d573345d" containerName="registry-server" Nov 27 09:39:12 crc kubenswrapper[4971]: I1127 09:39:12.883560 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jq27t" Nov 27 09:39:12 crc kubenswrapper[4971]: I1127 09:39:12.901901 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jq27t"] Nov 27 09:39:12 crc kubenswrapper[4971]: I1127 09:39:12.987242 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d57323c1-cb19-4672-a803-2bb652ea3a6e-utilities\") pod \"redhat-operators-jq27t\" (UID: \"d57323c1-cb19-4672-a803-2bb652ea3a6e\") " pod="openshift-marketplace/redhat-operators-jq27t" Nov 27 09:39:12 crc kubenswrapper[4971]: I1127 09:39:12.987398 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d57323c1-cb19-4672-a803-2bb652ea3a6e-catalog-content\") pod \"redhat-operators-jq27t\" (UID: \"d57323c1-cb19-4672-a803-2bb652ea3a6e\") " pod="openshift-marketplace/redhat-operators-jq27t" Nov 27 09:39:12 crc kubenswrapper[4971]: I1127 09:39:12.987453 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jxvx\" (UniqueName: \"kubernetes.io/projected/d57323c1-cb19-4672-a803-2bb652ea3a6e-kube-api-access-7jxvx\") pod \"redhat-operators-jq27t\" (UID: \"d57323c1-cb19-4672-a803-2bb652ea3a6e\") " pod="openshift-marketplace/redhat-operators-jq27t" Nov 27 09:39:13 crc kubenswrapper[4971]: I1127 09:39:13.089737 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d57323c1-cb19-4672-a803-2bb652ea3a6e-catalog-content\") pod \"redhat-operators-jq27t\" (UID: \"d57323c1-cb19-4672-a803-2bb652ea3a6e\") " pod="openshift-marketplace/redhat-operators-jq27t" Nov 27 09:39:13 crc kubenswrapper[4971]: I1127 09:39:13.089843 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jxvx\" (UniqueName: \"kubernetes.io/projected/d57323c1-cb19-4672-a803-2bb652ea3a6e-kube-api-access-7jxvx\") pod \"redhat-operators-jq27t\" (UID: \"d57323c1-cb19-4672-a803-2bb652ea3a6e\") " pod="openshift-marketplace/redhat-operators-jq27t" Nov 27 09:39:13 crc kubenswrapper[4971]: I1127 09:39:13.089911 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d57323c1-cb19-4672-a803-2bb652ea3a6e-utilities\") pod \"redhat-operators-jq27t\" (UID: \"d57323c1-cb19-4672-a803-2bb652ea3a6e\") " pod="openshift-marketplace/redhat-operators-jq27t" Nov 27 09:39:13 crc kubenswrapper[4971]: I1127 09:39:13.090503 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d57323c1-cb19-4672-a803-2bb652ea3a6e-utilities\") pod \"redhat-operators-jq27t\" (UID: \"d57323c1-cb19-4672-a803-2bb652ea3a6e\") " pod="openshift-marketplace/redhat-operators-jq27t" Nov 27 09:39:13 crc kubenswrapper[4971]: I1127 09:39:13.091118 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d57323c1-cb19-4672-a803-2bb652ea3a6e-catalog-content\") pod \"redhat-operators-jq27t\" (UID: \"d57323c1-cb19-4672-a803-2bb652ea3a6e\") " pod="openshift-marketplace/redhat-operators-jq27t" Nov 27 09:39:13 crc kubenswrapper[4971]: I1127 09:39:13.118122 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jxvx\" (UniqueName: \"kubernetes.io/projected/d57323c1-cb19-4672-a803-2bb652ea3a6e-kube-api-access-7jxvx\") pod \"redhat-operators-jq27t\" (UID: \"d57323c1-cb19-4672-a803-2bb652ea3a6e\") " pod="openshift-marketplace/redhat-operators-jq27t" Nov 27 09:39:13 crc kubenswrapper[4971]: I1127 09:39:13.219553 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jq27t" Nov 27 09:39:13 crc kubenswrapper[4971]: I1127 09:39:13.780915 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jq27t"] Nov 27 09:39:14 crc kubenswrapper[4971]: I1127 09:39:14.497267 4971 generic.go:334] "Generic (PLEG): container finished" podID="d57323c1-cb19-4672-a803-2bb652ea3a6e" containerID="a9d7605c134d217e85619d056b1e77c981e6aec7289978adf0c637bf96e357eb" exitCode=0 Nov 27 09:39:14 crc kubenswrapper[4971]: I1127 09:39:14.497386 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jq27t" event={"ID":"d57323c1-cb19-4672-a803-2bb652ea3a6e","Type":"ContainerDied","Data":"a9d7605c134d217e85619d056b1e77c981e6aec7289978adf0c637bf96e357eb"} Nov 27 09:39:14 crc kubenswrapper[4971]: I1127 09:39:14.497725 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jq27t" event={"ID":"d57323c1-cb19-4672-a803-2bb652ea3a6e","Type":"ContainerStarted","Data":"9d387e7fab95ff8e7d3e812aa930a17190ba3d84c61887be03491e2e07f248b3"} Nov 27 09:39:15 crc kubenswrapper[4971]: I1127 09:39:15.514437 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jq27t" event={"ID":"d57323c1-cb19-4672-a803-2bb652ea3a6e","Type":"ContainerStarted","Data":"bbe879dfb8449b8ff9014cc24de6a0a61ceb36650ccd545b24e8fb59e29652b8"} Nov 27 09:39:17 crc kubenswrapper[4971]: I1127 09:39:17.538316 4971 generic.go:334] "Generic (PLEG): container finished" podID="d57323c1-cb19-4672-a803-2bb652ea3a6e" containerID="bbe879dfb8449b8ff9014cc24de6a0a61ceb36650ccd545b24e8fb59e29652b8" exitCode=0 Nov 27 09:39:17 crc kubenswrapper[4971]: I1127 09:39:17.538390 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jq27t" event={"ID":"d57323c1-cb19-4672-a803-2bb652ea3a6e","Type":"ContainerDied","Data":"bbe879dfb8449b8ff9014cc24de6a0a61ceb36650ccd545b24e8fb59e29652b8"} Nov 27 09:39:19 crc kubenswrapper[4971]: I1127 09:39:19.573369 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jq27t" event={"ID":"d57323c1-cb19-4672-a803-2bb652ea3a6e","Type":"ContainerStarted","Data":"02f51ee55f681da96f59ff5229ac51c32ecd34a98cfee4cfd827b674c478c5a1"} Nov 27 09:39:19 crc kubenswrapper[4971]: I1127 09:39:19.595652 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-jq27t" podStartSLOduration=3.7288793350000002 podStartE2EDuration="7.595632991s" podCreationTimestamp="2025-11-27 09:39:12 +0000 UTC" firstStartedPulling="2025-11-27 09:39:14.500824402 +0000 UTC m=+9992.692868310" lastFinishedPulling="2025-11-27 09:39:18.367578048 +0000 UTC m=+9996.559621966" observedRunningTime="2025-11-27 09:39:19.593231642 +0000 UTC m=+9997.785275580" watchObservedRunningTime="2025-11-27 09:39:19.595632991 +0000 UTC m=+9997.787676899" Nov 27 09:39:23 crc kubenswrapper[4971]: I1127 09:39:23.220855 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-jq27t" Nov 27 09:39:23 crc kubenswrapper[4971]: I1127 09:39:23.222208 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-jq27t" Nov 27 09:39:24 crc kubenswrapper[4971]: I1127 09:39:24.273209 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-jq27t" podUID="d57323c1-cb19-4672-a803-2bb652ea3a6e" containerName="registry-server" probeResult="failure" output=< Nov 27 09:39:24 crc kubenswrapper[4971]: timeout: failed to connect service ":50051" within 1s Nov 27 09:39:24 crc kubenswrapper[4971]: > Nov 27 09:39:27 crc kubenswrapper[4971]: I1127 09:39:27.551411 4971 scope.go:117] "RemoveContainer" containerID="5fce088764c403e267f5aed900d4424b9d5037db83a894ca8739f5f5af0bea73" Nov 27 09:39:27 crc kubenswrapper[4971]: E1127 09:39:27.552036 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:39:33 crc kubenswrapper[4971]: I1127 09:39:33.281365 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-jq27t" Nov 27 09:39:33 crc kubenswrapper[4971]: I1127 09:39:33.345259 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-jq27t" Nov 27 09:39:33 crc kubenswrapper[4971]: I1127 09:39:33.533611 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jq27t"] Nov 27 09:39:34 crc kubenswrapper[4971]: I1127 09:39:34.771140 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-jq27t" podUID="d57323c1-cb19-4672-a803-2bb652ea3a6e" containerName="registry-server" containerID="cri-o://02f51ee55f681da96f59ff5229ac51c32ecd34a98cfee4cfd827b674c478c5a1" gracePeriod=2 Nov 27 09:39:35 crc kubenswrapper[4971]: I1127 09:39:35.324800 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jq27t" Nov 27 09:39:35 crc kubenswrapper[4971]: I1127 09:39:35.454153 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d57323c1-cb19-4672-a803-2bb652ea3a6e-catalog-content\") pod \"d57323c1-cb19-4672-a803-2bb652ea3a6e\" (UID: \"d57323c1-cb19-4672-a803-2bb652ea3a6e\") " Nov 27 09:39:35 crc kubenswrapper[4971]: I1127 09:39:35.454276 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7jxvx\" (UniqueName: \"kubernetes.io/projected/d57323c1-cb19-4672-a803-2bb652ea3a6e-kube-api-access-7jxvx\") pod \"d57323c1-cb19-4672-a803-2bb652ea3a6e\" (UID: \"d57323c1-cb19-4672-a803-2bb652ea3a6e\") " Nov 27 09:39:35 crc kubenswrapper[4971]: I1127 09:39:35.454718 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d57323c1-cb19-4672-a803-2bb652ea3a6e-utilities\") pod \"d57323c1-cb19-4672-a803-2bb652ea3a6e\" (UID: \"d57323c1-cb19-4672-a803-2bb652ea3a6e\") " Nov 27 09:39:35 crc kubenswrapper[4971]: I1127 09:39:35.456274 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d57323c1-cb19-4672-a803-2bb652ea3a6e-utilities" (OuterVolumeSpecName: "utilities") pod "d57323c1-cb19-4672-a803-2bb652ea3a6e" (UID: "d57323c1-cb19-4672-a803-2bb652ea3a6e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:39:35 crc kubenswrapper[4971]: I1127 09:39:35.474438 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d57323c1-cb19-4672-a803-2bb652ea3a6e-kube-api-access-7jxvx" (OuterVolumeSpecName: "kube-api-access-7jxvx") pod "d57323c1-cb19-4672-a803-2bb652ea3a6e" (UID: "d57323c1-cb19-4672-a803-2bb652ea3a6e"). InnerVolumeSpecName "kube-api-access-7jxvx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:39:35 crc kubenswrapper[4971]: I1127 09:39:35.557678 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d57323c1-cb19-4672-a803-2bb652ea3a6e-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 09:39:35 crc kubenswrapper[4971]: I1127 09:39:35.557720 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7jxvx\" (UniqueName: \"kubernetes.io/projected/d57323c1-cb19-4672-a803-2bb652ea3a6e-kube-api-access-7jxvx\") on node \"crc\" DevicePath \"\"" Nov 27 09:39:35 crc kubenswrapper[4971]: I1127 09:39:35.559216 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d57323c1-cb19-4672-a803-2bb652ea3a6e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d57323c1-cb19-4672-a803-2bb652ea3a6e" (UID: "d57323c1-cb19-4672-a803-2bb652ea3a6e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:39:35 crc kubenswrapper[4971]: I1127 09:39:35.659969 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d57323c1-cb19-4672-a803-2bb652ea3a6e-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 09:39:35 crc kubenswrapper[4971]: I1127 09:39:35.786455 4971 generic.go:334] "Generic (PLEG): container finished" podID="d57323c1-cb19-4672-a803-2bb652ea3a6e" containerID="02f51ee55f681da96f59ff5229ac51c32ecd34a98cfee4cfd827b674c478c5a1" exitCode=0 Nov 27 09:39:35 crc kubenswrapper[4971]: I1127 09:39:35.786551 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jq27t" event={"ID":"d57323c1-cb19-4672-a803-2bb652ea3a6e","Type":"ContainerDied","Data":"02f51ee55f681da96f59ff5229ac51c32ecd34a98cfee4cfd827b674c478c5a1"} Nov 27 09:39:35 crc kubenswrapper[4971]: I1127 09:39:35.786607 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jq27t" Nov 27 09:39:35 crc kubenswrapper[4971]: I1127 09:39:35.786651 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jq27t" event={"ID":"d57323c1-cb19-4672-a803-2bb652ea3a6e","Type":"ContainerDied","Data":"9d387e7fab95ff8e7d3e812aa930a17190ba3d84c61887be03491e2e07f248b3"} Nov 27 09:39:35 crc kubenswrapper[4971]: I1127 09:39:35.786688 4971 scope.go:117] "RemoveContainer" containerID="02f51ee55f681da96f59ff5229ac51c32ecd34a98cfee4cfd827b674c478c5a1" Nov 27 09:39:35 crc kubenswrapper[4971]: I1127 09:39:35.819851 4971 scope.go:117] "RemoveContainer" containerID="bbe879dfb8449b8ff9014cc24de6a0a61ceb36650ccd545b24e8fb59e29652b8" Nov 27 09:39:35 crc kubenswrapper[4971]: I1127 09:39:35.836903 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jq27t"] Nov 27 09:39:35 crc kubenswrapper[4971]: I1127 09:39:35.848001 4971 scope.go:117] "RemoveContainer" containerID="a9d7605c134d217e85619d056b1e77c981e6aec7289978adf0c637bf96e357eb" Nov 27 09:39:35 crc kubenswrapper[4971]: I1127 09:39:35.851133 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-jq27t"] Nov 27 09:39:36 crc kubenswrapper[4971]: I1127 09:39:36.569681 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d57323c1-cb19-4672-a803-2bb652ea3a6e" path="/var/lib/kubelet/pods/d57323c1-cb19-4672-a803-2bb652ea3a6e/volumes" Nov 27 09:39:36 crc kubenswrapper[4971]: I1127 09:39:36.631980 4971 scope.go:117] "RemoveContainer" containerID="02f51ee55f681da96f59ff5229ac51c32ecd34a98cfee4cfd827b674c478c5a1" Nov 27 09:39:36 crc kubenswrapper[4971]: E1127 09:39:36.632683 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"02f51ee55f681da96f59ff5229ac51c32ecd34a98cfee4cfd827b674c478c5a1\": container with ID starting with 02f51ee55f681da96f59ff5229ac51c32ecd34a98cfee4cfd827b674c478c5a1 not found: ID does not exist" containerID="02f51ee55f681da96f59ff5229ac51c32ecd34a98cfee4cfd827b674c478c5a1" Nov 27 09:39:36 crc kubenswrapper[4971]: I1127 09:39:36.632753 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02f51ee55f681da96f59ff5229ac51c32ecd34a98cfee4cfd827b674c478c5a1"} err="failed to get container status \"02f51ee55f681da96f59ff5229ac51c32ecd34a98cfee4cfd827b674c478c5a1\": rpc error: code = NotFound desc = could not find container \"02f51ee55f681da96f59ff5229ac51c32ecd34a98cfee4cfd827b674c478c5a1\": container with ID starting with 02f51ee55f681da96f59ff5229ac51c32ecd34a98cfee4cfd827b674c478c5a1 not found: ID does not exist" Nov 27 09:39:36 crc kubenswrapper[4971]: I1127 09:39:36.632790 4971 scope.go:117] "RemoveContainer" containerID="bbe879dfb8449b8ff9014cc24de6a0a61ceb36650ccd545b24e8fb59e29652b8" Nov 27 09:39:36 crc kubenswrapper[4971]: E1127 09:39:36.633339 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bbe879dfb8449b8ff9014cc24de6a0a61ceb36650ccd545b24e8fb59e29652b8\": container with ID starting with bbe879dfb8449b8ff9014cc24de6a0a61ceb36650ccd545b24e8fb59e29652b8 not found: ID does not exist" containerID="bbe879dfb8449b8ff9014cc24de6a0a61ceb36650ccd545b24e8fb59e29652b8" Nov 27 09:39:36 crc kubenswrapper[4971]: I1127 09:39:36.633394 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bbe879dfb8449b8ff9014cc24de6a0a61ceb36650ccd545b24e8fb59e29652b8"} err="failed to get container status \"bbe879dfb8449b8ff9014cc24de6a0a61ceb36650ccd545b24e8fb59e29652b8\": rpc error: code = NotFound desc = could not find container \"bbe879dfb8449b8ff9014cc24de6a0a61ceb36650ccd545b24e8fb59e29652b8\": container with ID starting with bbe879dfb8449b8ff9014cc24de6a0a61ceb36650ccd545b24e8fb59e29652b8 not found: ID does not exist" Nov 27 09:39:36 crc kubenswrapper[4971]: I1127 09:39:36.633425 4971 scope.go:117] "RemoveContainer" containerID="a9d7605c134d217e85619d056b1e77c981e6aec7289978adf0c637bf96e357eb" Nov 27 09:39:36 crc kubenswrapper[4971]: E1127 09:39:36.633750 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a9d7605c134d217e85619d056b1e77c981e6aec7289978adf0c637bf96e357eb\": container with ID starting with a9d7605c134d217e85619d056b1e77c981e6aec7289978adf0c637bf96e357eb not found: ID does not exist" containerID="a9d7605c134d217e85619d056b1e77c981e6aec7289978adf0c637bf96e357eb" Nov 27 09:39:36 crc kubenswrapper[4971]: I1127 09:39:36.633786 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9d7605c134d217e85619d056b1e77c981e6aec7289978adf0c637bf96e357eb"} err="failed to get container status \"a9d7605c134d217e85619d056b1e77c981e6aec7289978adf0c637bf96e357eb\": rpc error: code = NotFound desc = could not find container \"a9d7605c134d217e85619d056b1e77c981e6aec7289978adf0c637bf96e357eb\": container with ID starting with a9d7605c134d217e85619d056b1e77c981e6aec7289978adf0c637bf96e357eb not found: ID does not exist" Nov 27 09:39:41 crc kubenswrapper[4971]: I1127 09:39:41.551030 4971 scope.go:117] "RemoveContainer" containerID="5fce088764c403e267f5aed900d4424b9d5037db83a894ca8739f5f5af0bea73" Nov 27 09:39:41 crc kubenswrapper[4971]: E1127 09:39:41.552323 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:39:56 crc kubenswrapper[4971]: I1127 09:39:56.550307 4971 scope.go:117] "RemoveContainer" containerID="5fce088764c403e267f5aed900d4424b9d5037db83a894ca8739f5f5af0bea73" Nov 27 09:39:56 crc kubenswrapper[4971]: E1127 09:39:56.551542 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:40:07 crc kubenswrapper[4971]: I1127 09:40:07.551111 4971 scope.go:117] "RemoveContainer" containerID="5fce088764c403e267f5aed900d4424b9d5037db83a894ca8739f5f5af0bea73" Nov 27 09:40:07 crc kubenswrapper[4971]: E1127 09:40:07.552611 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:40:09 crc kubenswrapper[4971]: I1127 09:40:09.237507 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2lx5q"] Nov 27 09:40:09 crc kubenswrapper[4971]: E1127 09:40:09.238468 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d57323c1-cb19-4672-a803-2bb652ea3a6e" containerName="extract-content" Nov 27 09:40:09 crc kubenswrapper[4971]: I1127 09:40:09.238484 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="d57323c1-cb19-4672-a803-2bb652ea3a6e" containerName="extract-content" Nov 27 09:40:09 crc kubenswrapper[4971]: E1127 09:40:09.238512 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d57323c1-cb19-4672-a803-2bb652ea3a6e" containerName="registry-server" Nov 27 09:40:09 crc kubenswrapper[4971]: I1127 09:40:09.238520 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="d57323c1-cb19-4672-a803-2bb652ea3a6e" containerName="registry-server" Nov 27 09:40:09 crc kubenswrapper[4971]: E1127 09:40:09.238555 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d57323c1-cb19-4672-a803-2bb652ea3a6e" containerName="extract-utilities" Nov 27 09:40:09 crc kubenswrapper[4971]: I1127 09:40:09.238563 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="d57323c1-cb19-4672-a803-2bb652ea3a6e" containerName="extract-utilities" Nov 27 09:40:09 crc kubenswrapper[4971]: I1127 09:40:09.238820 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="d57323c1-cb19-4672-a803-2bb652ea3a6e" containerName="registry-server" Nov 27 09:40:09 crc kubenswrapper[4971]: I1127 09:40:09.241117 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2lx5q" Nov 27 09:40:09 crc kubenswrapper[4971]: I1127 09:40:09.319840 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2lx5q"] Nov 27 09:40:09 crc kubenswrapper[4971]: I1127 09:40:09.332504 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8835b0f2-9588-4250-b6bb-355e55445496-utilities\") pod \"certified-operators-2lx5q\" (UID: \"8835b0f2-9588-4250-b6bb-355e55445496\") " pod="openshift-marketplace/certified-operators-2lx5q" Nov 27 09:40:09 crc kubenswrapper[4971]: I1127 09:40:09.332643 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8835b0f2-9588-4250-b6bb-355e55445496-catalog-content\") pod \"certified-operators-2lx5q\" (UID: \"8835b0f2-9588-4250-b6bb-355e55445496\") " pod="openshift-marketplace/certified-operators-2lx5q" Nov 27 09:40:09 crc kubenswrapper[4971]: I1127 09:40:09.332729 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hpmdc\" (UniqueName: \"kubernetes.io/projected/8835b0f2-9588-4250-b6bb-355e55445496-kube-api-access-hpmdc\") pod \"certified-operators-2lx5q\" (UID: \"8835b0f2-9588-4250-b6bb-355e55445496\") " pod="openshift-marketplace/certified-operators-2lx5q" Nov 27 09:40:09 crc kubenswrapper[4971]: I1127 09:40:09.435513 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8835b0f2-9588-4250-b6bb-355e55445496-utilities\") pod \"certified-operators-2lx5q\" (UID: \"8835b0f2-9588-4250-b6bb-355e55445496\") " pod="openshift-marketplace/certified-operators-2lx5q" Nov 27 09:40:09 crc kubenswrapper[4971]: I1127 09:40:09.435636 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8835b0f2-9588-4250-b6bb-355e55445496-catalog-content\") pod \"certified-operators-2lx5q\" (UID: \"8835b0f2-9588-4250-b6bb-355e55445496\") " pod="openshift-marketplace/certified-operators-2lx5q" Nov 27 09:40:09 crc kubenswrapper[4971]: I1127 09:40:09.435757 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hpmdc\" (UniqueName: \"kubernetes.io/projected/8835b0f2-9588-4250-b6bb-355e55445496-kube-api-access-hpmdc\") pod \"certified-operators-2lx5q\" (UID: \"8835b0f2-9588-4250-b6bb-355e55445496\") " pod="openshift-marketplace/certified-operators-2lx5q" Nov 27 09:40:09 crc kubenswrapper[4971]: I1127 09:40:09.436378 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8835b0f2-9588-4250-b6bb-355e55445496-utilities\") pod \"certified-operators-2lx5q\" (UID: \"8835b0f2-9588-4250-b6bb-355e55445496\") " pod="openshift-marketplace/certified-operators-2lx5q" Nov 27 09:40:09 crc kubenswrapper[4971]: I1127 09:40:09.437032 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8835b0f2-9588-4250-b6bb-355e55445496-catalog-content\") pod \"certified-operators-2lx5q\" (UID: \"8835b0f2-9588-4250-b6bb-355e55445496\") " pod="openshift-marketplace/certified-operators-2lx5q" Nov 27 09:40:09 crc kubenswrapper[4971]: I1127 09:40:09.486255 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hpmdc\" (UniqueName: \"kubernetes.io/projected/8835b0f2-9588-4250-b6bb-355e55445496-kube-api-access-hpmdc\") pod \"certified-operators-2lx5q\" (UID: \"8835b0f2-9588-4250-b6bb-355e55445496\") " pod="openshift-marketplace/certified-operators-2lx5q" Nov 27 09:40:09 crc kubenswrapper[4971]: I1127 09:40:09.611193 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2lx5q" Nov 27 09:40:10 crc kubenswrapper[4971]: I1127 09:40:10.188196 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2lx5q"] Nov 27 09:40:10 crc kubenswrapper[4971]: I1127 09:40:10.245709 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2lx5q" event={"ID":"8835b0f2-9588-4250-b6bb-355e55445496","Type":"ContainerStarted","Data":"c449ac3e46caf5059f1814d309f9937826b3bacc088730cde25844f278b9b3ed"} Nov 27 09:40:11 crc kubenswrapper[4971]: I1127 09:40:11.258444 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2lx5q" event={"ID":"8835b0f2-9588-4250-b6bb-355e55445496","Type":"ContainerDied","Data":"f95ad27a6961ac15f3709f7c772365138ca44db4d46aebcd26e2800236fb457a"} Nov 27 09:40:11 crc kubenswrapper[4971]: I1127 09:40:11.258374 4971 generic.go:334] "Generic (PLEG): container finished" podID="8835b0f2-9588-4250-b6bb-355e55445496" containerID="f95ad27a6961ac15f3709f7c772365138ca44db4d46aebcd26e2800236fb457a" exitCode=0 Nov 27 09:40:16 crc kubenswrapper[4971]: I1127 09:40:16.348412 4971 generic.go:334] "Generic (PLEG): container finished" podID="8835b0f2-9588-4250-b6bb-355e55445496" containerID="9421bc666f55a135534b3921685d6a16caf27cbb0fafc48cd81b83d07387f7b5" exitCode=0 Nov 27 09:40:16 crc kubenswrapper[4971]: I1127 09:40:16.348499 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2lx5q" event={"ID":"8835b0f2-9588-4250-b6bb-355e55445496","Type":"ContainerDied","Data":"9421bc666f55a135534b3921685d6a16caf27cbb0fafc48cd81b83d07387f7b5"} Nov 27 09:40:17 crc kubenswrapper[4971]: I1127 09:40:17.367000 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2lx5q" event={"ID":"8835b0f2-9588-4250-b6bb-355e55445496","Type":"ContainerStarted","Data":"e869c4321dcea8b5a584c30ef3c80863a15e82374528595cecf9959b40c6c86c"} Nov 27 09:40:17 crc kubenswrapper[4971]: I1127 09:40:17.400305 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2lx5q" podStartSLOduration=2.750256608 podStartE2EDuration="8.400279657s" podCreationTimestamp="2025-11-27 09:40:09 +0000 UTC" firstStartedPulling="2025-11-27 09:40:11.261522538 +0000 UTC m=+10049.453566456" lastFinishedPulling="2025-11-27 09:40:16.911545577 +0000 UTC m=+10055.103589505" observedRunningTime="2025-11-27 09:40:17.389022185 +0000 UTC m=+10055.581066143" watchObservedRunningTime="2025-11-27 09:40:17.400279657 +0000 UTC m=+10055.592323585" Nov 27 09:40:18 crc kubenswrapper[4971]: I1127 09:40:18.551170 4971 scope.go:117] "RemoveContainer" containerID="5fce088764c403e267f5aed900d4424b9d5037db83a894ca8739f5f5af0bea73" Nov 27 09:40:18 crc kubenswrapper[4971]: E1127 09:40:18.551518 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:40:19 crc kubenswrapper[4971]: I1127 09:40:19.611740 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2lx5q" Nov 27 09:40:19 crc kubenswrapper[4971]: I1127 09:40:19.612335 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2lx5q" Nov 27 09:40:19 crc kubenswrapper[4971]: I1127 09:40:19.665351 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2lx5q" Nov 27 09:40:29 crc kubenswrapper[4971]: I1127 09:40:29.684858 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2lx5q" Nov 27 09:40:29 crc kubenswrapper[4971]: I1127 09:40:29.794792 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2lx5q"] Nov 27 09:40:29 crc kubenswrapper[4971]: I1127 09:40:29.881556 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cdwpg"] Nov 27 09:40:29 crc kubenswrapper[4971]: I1127 09:40:29.881872 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-cdwpg" podUID="0621c077-a96d-4425-a32f-d4a879a67814" containerName="registry-server" containerID="cri-o://0907a72de7e64d12ea692451b6d6a54e274f164526052aa5c74bf8271921a8ec" gracePeriod=2 Nov 27 09:40:30 crc kubenswrapper[4971]: I1127 09:40:30.540585 4971 generic.go:334] "Generic (PLEG): container finished" podID="0621c077-a96d-4425-a32f-d4a879a67814" containerID="0907a72de7e64d12ea692451b6d6a54e274f164526052aa5c74bf8271921a8ec" exitCode=0 Nov 27 09:40:30 crc kubenswrapper[4971]: I1127 09:40:30.540693 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cdwpg" event={"ID":"0621c077-a96d-4425-a32f-d4a879a67814","Type":"ContainerDied","Data":"0907a72de7e64d12ea692451b6d6a54e274f164526052aa5c74bf8271921a8ec"} Nov 27 09:40:30 crc kubenswrapper[4971]: I1127 09:40:30.550782 4971 scope.go:117] "RemoveContainer" containerID="5fce088764c403e267f5aed900d4424b9d5037db83a894ca8739f5f5af0bea73" Nov 27 09:40:30 crc kubenswrapper[4971]: E1127 09:40:30.551069 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:40:30 crc kubenswrapper[4971]: I1127 09:40:30.983923 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cdwpg" Nov 27 09:40:31 crc kubenswrapper[4971]: I1127 09:40:31.093867 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mtbdv\" (UniqueName: \"kubernetes.io/projected/0621c077-a96d-4425-a32f-d4a879a67814-kube-api-access-mtbdv\") pod \"0621c077-a96d-4425-a32f-d4a879a67814\" (UID: \"0621c077-a96d-4425-a32f-d4a879a67814\") " Nov 27 09:40:31 crc kubenswrapper[4971]: I1127 09:40:31.094030 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0621c077-a96d-4425-a32f-d4a879a67814-utilities\") pod \"0621c077-a96d-4425-a32f-d4a879a67814\" (UID: \"0621c077-a96d-4425-a32f-d4a879a67814\") " Nov 27 09:40:31 crc kubenswrapper[4971]: I1127 09:40:31.094183 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0621c077-a96d-4425-a32f-d4a879a67814-catalog-content\") pod \"0621c077-a96d-4425-a32f-d4a879a67814\" (UID: \"0621c077-a96d-4425-a32f-d4a879a67814\") " Nov 27 09:40:31 crc kubenswrapper[4971]: I1127 09:40:31.094931 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0621c077-a96d-4425-a32f-d4a879a67814-utilities" (OuterVolumeSpecName: "utilities") pod "0621c077-a96d-4425-a32f-d4a879a67814" (UID: "0621c077-a96d-4425-a32f-d4a879a67814"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:40:31 crc kubenswrapper[4971]: I1127 09:40:31.104875 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0621c077-a96d-4425-a32f-d4a879a67814-kube-api-access-mtbdv" (OuterVolumeSpecName: "kube-api-access-mtbdv") pod "0621c077-a96d-4425-a32f-d4a879a67814" (UID: "0621c077-a96d-4425-a32f-d4a879a67814"). InnerVolumeSpecName "kube-api-access-mtbdv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:40:31 crc kubenswrapper[4971]: I1127 09:40:31.156344 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0621c077-a96d-4425-a32f-d4a879a67814-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0621c077-a96d-4425-a32f-d4a879a67814" (UID: "0621c077-a96d-4425-a32f-d4a879a67814"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:40:31 crc kubenswrapper[4971]: I1127 09:40:31.197284 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mtbdv\" (UniqueName: \"kubernetes.io/projected/0621c077-a96d-4425-a32f-d4a879a67814-kube-api-access-mtbdv\") on node \"crc\" DevicePath \"\"" Nov 27 09:40:31 crc kubenswrapper[4971]: I1127 09:40:31.197328 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0621c077-a96d-4425-a32f-d4a879a67814-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 09:40:31 crc kubenswrapper[4971]: I1127 09:40:31.197342 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0621c077-a96d-4425-a32f-d4a879a67814-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 09:40:31 crc kubenswrapper[4971]: I1127 09:40:31.553786 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cdwpg" event={"ID":"0621c077-a96d-4425-a32f-d4a879a67814","Type":"ContainerDied","Data":"798f993ccb5debf964f37d1cad756f94b5f329f26122fc6006cbceebda387e40"} Nov 27 09:40:31 crc kubenswrapper[4971]: I1127 09:40:31.553841 4971 scope.go:117] "RemoveContainer" containerID="0907a72de7e64d12ea692451b6d6a54e274f164526052aa5c74bf8271921a8ec" Nov 27 09:40:31 crc kubenswrapper[4971]: I1127 09:40:31.554047 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cdwpg" Nov 27 09:40:31 crc kubenswrapper[4971]: I1127 09:40:31.593496 4971 scope.go:117] "RemoveContainer" containerID="fa09c4121b944be5ecc424054046cccf3c158484c40d783406eb973afc58f586" Nov 27 09:40:31 crc kubenswrapper[4971]: I1127 09:40:31.598669 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cdwpg"] Nov 27 09:40:31 crc kubenswrapper[4971]: I1127 09:40:31.611889 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-cdwpg"] Nov 27 09:40:31 crc kubenswrapper[4971]: I1127 09:40:31.615811 4971 scope.go:117] "RemoveContainer" containerID="6949a7d201ee578f57f11413e8d7599e2b33c4a2fd34a8da7409fbe734114a6c" Nov 27 09:40:32 crc kubenswrapper[4971]: I1127 09:40:32.563704 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0621c077-a96d-4425-a32f-d4a879a67814" path="/var/lib/kubelet/pods/0621c077-a96d-4425-a32f-d4a879a67814/volumes" Nov 27 09:40:35 crc kubenswrapper[4971]: I1127 09:40:35.154513 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-szhm9"] Nov 27 09:40:35 crc kubenswrapper[4971]: E1127 09:40:35.155785 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0621c077-a96d-4425-a32f-d4a879a67814" containerName="registry-server" Nov 27 09:40:35 crc kubenswrapper[4971]: I1127 09:40:35.155801 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="0621c077-a96d-4425-a32f-d4a879a67814" containerName="registry-server" Nov 27 09:40:35 crc kubenswrapper[4971]: E1127 09:40:35.155819 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0621c077-a96d-4425-a32f-d4a879a67814" containerName="extract-content" Nov 27 09:40:35 crc kubenswrapper[4971]: I1127 09:40:35.155826 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="0621c077-a96d-4425-a32f-d4a879a67814" containerName="extract-content" Nov 27 09:40:35 crc kubenswrapper[4971]: E1127 09:40:35.155880 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0621c077-a96d-4425-a32f-d4a879a67814" containerName="extract-utilities" Nov 27 09:40:35 crc kubenswrapper[4971]: I1127 09:40:35.155886 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="0621c077-a96d-4425-a32f-d4a879a67814" containerName="extract-utilities" Nov 27 09:40:35 crc kubenswrapper[4971]: I1127 09:40:35.156104 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="0621c077-a96d-4425-a32f-d4a879a67814" containerName="registry-server" Nov 27 09:40:35 crc kubenswrapper[4971]: I1127 09:40:35.157877 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-szhm9" Nov 27 09:40:35 crc kubenswrapper[4971]: I1127 09:40:35.169317 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-szhm9"] Nov 27 09:40:35 crc kubenswrapper[4971]: I1127 09:40:35.300688 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0315a2a2-6b59-4db6-95e6-0fb58ca084cb-catalog-content\") pod \"redhat-marketplace-szhm9\" (UID: \"0315a2a2-6b59-4db6-95e6-0fb58ca084cb\") " pod="openshift-marketplace/redhat-marketplace-szhm9" Nov 27 09:40:35 crc kubenswrapper[4971]: I1127 09:40:35.300940 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6srvg\" (UniqueName: \"kubernetes.io/projected/0315a2a2-6b59-4db6-95e6-0fb58ca084cb-kube-api-access-6srvg\") pod \"redhat-marketplace-szhm9\" (UID: \"0315a2a2-6b59-4db6-95e6-0fb58ca084cb\") " pod="openshift-marketplace/redhat-marketplace-szhm9" Nov 27 09:40:35 crc kubenswrapper[4971]: I1127 09:40:35.301014 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0315a2a2-6b59-4db6-95e6-0fb58ca084cb-utilities\") pod \"redhat-marketplace-szhm9\" (UID: \"0315a2a2-6b59-4db6-95e6-0fb58ca084cb\") " pod="openshift-marketplace/redhat-marketplace-szhm9" Nov 27 09:40:35 crc kubenswrapper[4971]: I1127 09:40:35.403188 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0315a2a2-6b59-4db6-95e6-0fb58ca084cb-catalog-content\") pod \"redhat-marketplace-szhm9\" (UID: \"0315a2a2-6b59-4db6-95e6-0fb58ca084cb\") " pod="openshift-marketplace/redhat-marketplace-szhm9" Nov 27 09:40:35 crc kubenswrapper[4971]: I1127 09:40:35.403336 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6srvg\" (UniqueName: \"kubernetes.io/projected/0315a2a2-6b59-4db6-95e6-0fb58ca084cb-kube-api-access-6srvg\") pod \"redhat-marketplace-szhm9\" (UID: \"0315a2a2-6b59-4db6-95e6-0fb58ca084cb\") " pod="openshift-marketplace/redhat-marketplace-szhm9" Nov 27 09:40:35 crc kubenswrapper[4971]: I1127 09:40:35.403400 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0315a2a2-6b59-4db6-95e6-0fb58ca084cb-utilities\") pod \"redhat-marketplace-szhm9\" (UID: \"0315a2a2-6b59-4db6-95e6-0fb58ca084cb\") " pod="openshift-marketplace/redhat-marketplace-szhm9" Nov 27 09:40:35 crc kubenswrapper[4971]: I1127 09:40:35.403884 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0315a2a2-6b59-4db6-95e6-0fb58ca084cb-utilities\") pod \"redhat-marketplace-szhm9\" (UID: \"0315a2a2-6b59-4db6-95e6-0fb58ca084cb\") " pod="openshift-marketplace/redhat-marketplace-szhm9" Nov 27 09:40:35 crc kubenswrapper[4971]: I1127 09:40:35.403891 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0315a2a2-6b59-4db6-95e6-0fb58ca084cb-catalog-content\") pod \"redhat-marketplace-szhm9\" (UID: \"0315a2a2-6b59-4db6-95e6-0fb58ca084cb\") " pod="openshift-marketplace/redhat-marketplace-szhm9" Nov 27 09:40:35 crc kubenswrapper[4971]: I1127 09:40:35.436649 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6srvg\" (UniqueName: \"kubernetes.io/projected/0315a2a2-6b59-4db6-95e6-0fb58ca084cb-kube-api-access-6srvg\") pod \"redhat-marketplace-szhm9\" (UID: \"0315a2a2-6b59-4db6-95e6-0fb58ca084cb\") " pod="openshift-marketplace/redhat-marketplace-szhm9" Nov 27 09:40:35 crc kubenswrapper[4971]: I1127 09:40:35.497995 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-szhm9" Nov 27 09:40:36 crc kubenswrapper[4971]: I1127 09:40:36.114083 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-szhm9"] Nov 27 09:40:36 crc kubenswrapper[4971]: I1127 09:40:36.630256 4971 generic.go:334] "Generic (PLEG): container finished" podID="0315a2a2-6b59-4db6-95e6-0fb58ca084cb" containerID="c18dd50f6d1d7d5b9db4f0af5085fe22ccd8c75c9426b071841b1c3487da69ea" exitCode=0 Nov 27 09:40:36 crc kubenswrapper[4971]: I1127 09:40:36.630365 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-szhm9" event={"ID":"0315a2a2-6b59-4db6-95e6-0fb58ca084cb","Type":"ContainerDied","Data":"c18dd50f6d1d7d5b9db4f0af5085fe22ccd8c75c9426b071841b1c3487da69ea"} Nov 27 09:40:36 crc kubenswrapper[4971]: I1127 09:40:36.630682 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-szhm9" event={"ID":"0315a2a2-6b59-4db6-95e6-0fb58ca084cb","Type":"ContainerStarted","Data":"8ff6c81fe11c6340ff70da2634bbb508ba3888b39d3044c1958d7d0b32ed6ca0"} Nov 27 09:40:36 crc kubenswrapper[4971]: I1127 09:40:36.633511 4971 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 27 09:40:38 crc kubenswrapper[4971]: I1127 09:40:38.656394 4971 generic.go:334] "Generic (PLEG): container finished" podID="0315a2a2-6b59-4db6-95e6-0fb58ca084cb" containerID="cca0f324a643360c7244cc49c4aeb4c4e665dd9917b498ec08106fe6d71c024b" exitCode=0 Nov 27 09:40:38 crc kubenswrapper[4971]: I1127 09:40:38.656462 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-szhm9" event={"ID":"0315a2a2-6b59-4db6-95e6-0fb58ca084cb","Type":"ContainerDied","Data":"cca0f324a643360c7244cc49c4aeb4c4e665dd9917b498ec08106fe6d71c024b"} Nov 27 09:40:39 crc kubenswrapper[4971]: I1127 09:40:39.671282 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-szhm9" event={"ID":"0315a2a2-6b59-4db6-95e6-0fb58ca084cb","Type":"ContainerStarted","Data":"ca5cefc9beb56b4540629e95e8861b36dce9352226417afb8d59976c45f7c18a"} Nov 27 09:40:39 crc kubenswrapper[4971]: I1127 09:40:39.706309 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-szhm9" podStartSLOduration=2.207981866 podStartE2EDuration="4.706277917s" podCreationTimestamp="2025-11-27 09:40:35 +0000 UTC" firstStartedPulling="2025-11-27 09:40:36.633211877 +0000 UTC m=+10074.825255795" lastFinishedPulling="2025-11-27 09:40:39.131507928 +0000 UTC m=+10077.323551846" observedRunningTime="2025-11-27 09:40:39.693670947 +0000 UTC m=+10077.885714875" watchObservedRunningTime="2025-11-27 09:40:39.706277917 +0000 UTC m=+10077.898321835" Nov 27 09:40:43 crc kubenswrapper[4971]: I1127 09:40:43.550609 4971 scope.go:117] "RemoveContainer" containerID="5fce088764c403e267f5aed900d4424b9d5037db83a894ca8739f5f5af0bea73" Nov 27 09:40:43 crc kubenswrapper[4971]: E1127 09:40:43.551639 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:40:45 crc kubenswrapper[4971]: I1127 09:40:45.498435 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-szhm9" Nov 27 09:40:45 crc kubenswrapper[4971]: I1127 09:40:45.498947 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-szhm9" Nov 27 09:40:45 crc kubenswrapper[4971]: I1127 09:40:45.560130 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-szhm9" Nov 27 09:40:45 crc kubenswrapper[4971]: I1127 09:40:45.807952 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-szhm9" Nov 27 09:40:46 crc kubenswrapper[4971]: I1127 09:40:46.138353 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-szhm9"] Nov 27 09:40:47 crc kubenswrapper[4971]: I1127 09:40:47.780317 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-szhm9" podUID="0315a2a2-6b59-4db6-95e6-0fb58ca084cb" containerName="registry-server" containerID="cri-o://ca5cefc9beb56b4540629e95e8861b36dce9352226417afb8d59976c45f7c18a" gracePeriod=2 Nov 27 09:40:48 crc kubenswrapper[4971]: I1127 09:40:48.340443 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-szhm9" Nov 27 09:40:48 crc kubenswrapper[4971]: I1127 09:40:48.469490 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6srvg\" (UniqueName: \"kubernetes.io/projected/0315a2a2-6b59-4db6-95e6-0fb58ca084cb-kube-api-access-6srvg\") pod \"0315a2a2-6b59-4db6-95e6-0fb58ca084cb\" (UID: \"0315a2a2-6b59-4db6-95e6-0fb58ca084cb\") " Nov 27 09:40:48 crc kubenswrapper[4971]: I1127 09:40:48.469633 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0315a2a2-6b59-4db6-95e6-0fb58ca084cb-utilities\") pod \"0315a2a2-6b59-4db6-95e6-0fb58ca084cb\" (UID: \"0315a2a2-6b59-4db6-95e6-0fb58ca084cb\") " Nov 27 09:40:48 crc kubenswrapper[4971]: I1127 09:40:48.469737 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0315a2a2-6b59-4db6-95e6-0fb58ca084cb-catalog-content\") pod \"0315a2a2-6b59-4db6-95e6-0fb58ca084cb\" (UID: \"0315a2a2-6b59-4db6-95e6-0fb58ca084cb\") " Nov 27 09:40:48 crc kubenswrapper[4971]: I1127 09:40:48.471002 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0315a2a2-6b59-4db6-95e6-0fb58ca084cb-utilities" (OuterVolumeSpecName: "utilities") pod "0315a2a2-6b59-4db6-95e6-0fb58ca084cb" (UID: "0315a2a2-6b59-4db6-95e6-0fb58ca084cb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:40:48 crc kubenswrapper[4971]: I1127 09:40:48.479646 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0315a2a2-6b59-4db6-95e6-0fb58ca084cb-kube-api-access-6srvg" (OuterVolumeSpecName: "kube-api-access-6srvg") pod "0315a2a2-6b59-4db6-95e6-0fb58ca084cb" (UID: "0315a2a2-6b59-4db6-95e6-0fb58ca084cb"). InnerVolumeSpecName "kube-api-access-6srvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:40:48 crc kubenswrapper[4971]: I1127 09:40:48.497126 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0315a2a2-6b59-4db6-95e6-0fb58ca084cb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0315a2a2-6b59-4db6-95e6-0fb58ca084cb" (UID: "0315a2a2-6b59-4db6-95e6-0fb58ca084cb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:40:48 crc kubenswrapper[4971]: I1127 09:40:48.573476 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6srvg\" (UniqueName: \"kubernetes.io/projected/0315a2a2-6b59-4db6-95e6-0fb58ca084cb-kube-api-access-6srvg\") on node \"crc\" DevicePath \"\"" Nov 27 09:40:48 crc kubenswrapper[4971]: I1127 09:40:48.573824 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0315a2a2-6b59-4db6-95e6-0fb58ca084cb-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 09:40:48 crc kubenswrapper[4971]: I1127 09:40:48.573882 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0315a2a2-6b59-4db6-95e6-0fb58ca084cb-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 09:40:48 crc kubenswrapper[4971]: I1127 09:40:48.797348 4971 generic.go:334] "Generic (PLEG): container finished" podID="0315a2a2-6b59-4db6-95e6-0fb58ca084cb" containerID="ca5cefc9beb56b4540629e95e8861b36dce9352226417afb8d59976c45f7c18a" exitCode=0 Nov 27 09:40:48 crc kubenswrapper[4971]: I1127 09:40:48.797430 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-szhm9" event={"ID":"0315a2a2-6b59-4db6-95e6-0fb58ca084cb","Type":"ContainerDied","Data":"ca5cefc9beb56b4540629e95e8861b36dce9352226417afb8d59976c45f7c18a"} Nov 27 09:40:48 crc kubenswrapper[4971]: I1127 09:40:48.797475 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-szhm9" event={"ID":"0315a2a2-6b59-4db6-95e6-0fb58ca084cb","Type":"ContainerDied","Data":"8ff6c81fe11c6340ff70da2634bbb508ba3888b39d3044c1958d7d0b32ed6ca0"} Nov 27 09:40:48 crc kubenswrapper[4971]: I1127 09:40:48.797515 4971 scope.go:117] "RemoveContainer" containerID="ca5cefc9beb56b4540629e95e8861b36dce9352226417afb8d59976c45f7c18a" Nov 27 09:40:48 crc kubenswrapper[4971]: I1127 09:40:48.797829 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-szhm9" Nov 27 09:40:48 crc kubenswrapper[4971]: I1127 09:40:48.827420 4971 scope.go:117] "RemoveContainer" containerID="cca0f324a643360c7244cc49c4aeb4c4e665dd9917b498ec08106fe6d71c024b" Nov 27 09:40:48 crc kubenswrapper[4971]: I1127 09:40:48.836794 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-szhm9"] Nov 27 09:40:48 crc kubenswrapper[4971]: I1127 09:40:48.850184 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-szhm9"] Nov 27 09:40:48 crc kubenswrapper[4971]: I1127 09:40:48.853319 4971 scope.go:117] "RemoveContainer" containerID="c18dd50f6d1d7d5b9db4f0af5085fe22ccd8c75c9426b071841b1c3487da69ea" Nov 27 09:40:48 crc kubenswrapper[4971]: I1127 09:40:48.911030 4971 scope.go:117] "RemoveContainer" containerID="ca5cefc9beb56b4540629e95e8861b36dce9352226417afb8d59976c45f7c18a" Nov 27 09:40:48 crc kubenswrapper[4971]: E1127 09:40:48.911788 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca5cefc9beb56b4540629e95e8861b36dce9352226417afb8d59976c45f7c18a\": container with ID starting with ca5cefc9beb56b4540629e95e8861b36dce9352226417afb8d59976c45f7c18a not found: ID does not exist" containerID="ca5cefc9beb56b4540629e95e8861b36dce9352226417afb8d59976c45f7c18a" Nov 27 09:40:48 crc kubenswrapper[4971]: I1127 09:40:48.911823 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca5cefc9beb56b4540629e95e8861b36dce9352226417afb8d59976c45f7c18a"} err="failed to get container status \"ca5cefc9beb56b4540629e95e8861b36dce9352226417afb8d59976c45f7c18a\": rpc error: code = NotFound desc = could not find container \"ca5cefc9beb56b4540629e95e8861b36dce9352226417afb8d59976c45f7c18a\": container with ID starting with ca5cefc9beb56b4540629e95e8861b36dce9352226417afb8d59976c45f7c18a not found: ID does not exist" Nov 27 09:40:48 crc kubenswrapper[4971]: I1127 09:40:48.911865 4971 scope.go:117] "RemoveContainer" containerID="cca0f324a643360c7244cc49c4aeb4c4e665dd9917b498ec08106fe6d71c024b" Nov 27 09:40:48 crc kubenswrapper[4971]: E1127 09:40:48.912245 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cca0f324a643360c7244cc49c4aeb4c4e665dd9917b498ec08106fe6d71c024b\": container with ID starting with cca0f324a643360c7244cc49c4aeb4c4e665dd9917b498ec08106fe6d71c024b not found: ID does not exist" containerID="cca0f324a643360c7244cc49c4aeb4c4e665dd9917b498ec08106fe6d71c024b" Nov 27 09:40:48 crc kubenswrapper[4971]: I1127 09:40:48.912305 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cca0f324a643360c7244cc49c4aeb4c4e665dd9917b498ec08106fe6d71c024b"} err="failed to get container status \"cca0f324a643360c7244cc49c4aeb4c4e665dd9917b498ec08106fe6d71c024b\": rpc error: code = NotFound desc = could not find container \"cca0f324a643360c7244cc49c4aeb4c4e665dd9917b498ec08106fe6d71c024b\": container with ID starting with cca0f324a643360c7244cc49c4aeb4c4e665dd9917b498ec08106fe6d71c024b not found: ID does not exist" Nov 27 09:40:48 crc kubenswrapper[4971]: I1127 09:40:48.912328 4971 scope.go:117] "RemoveContainer" containerID="c18dd50f6d1d7d5b9db4f0af5085fe22ccd8c75c9426b071841b1c3487da69ea" Nov 27 09:40:48 crc kubenswrapper[4971]: E1127 09:40:48.912865 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c18dd50f6d1d7d5b9db4f0af5085fe22ccd8c75c9426b071841b1c3487da69ea\": container with ID starting with c18dd50f6d1d7d5b9db4f0af5085fe22ccd8c75c9426b071841b1c3487da69ea not found: ID does not exist" containerID="c18dd50f6d1d7d5b9db4f0af5085fe22ccd8c75c9426b071841b1c3487da69ea" Nov 27 09:40:48 crc kubenswrapper[4971]: I1127 09:40:48.912910 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c18dd50f6d1d7d5b9db4f0af5085fe22ccd8c75c9426b071841b1c3487da69ea"} err="failed to get container status \"c18dd50f6d1d7d5b9db4f0af5085fe22ccd8c75c9426b071841b1c3487da69ea\": rpc error: code = NotFound desc = could not find container \"c18dd50f6d1d7d5b9db4f0af5085fe22ccd8c75c9426b071841b1c3487da69ea\": container with ID starting with c18dd50f6d1d7d5b9db4f0af5085fe22ccd8c75c9426b071841b1c3487da69ea not found: ID does not exist" Nov 27 09:40:50 crc kubenswrapper[4971]: I1127 09:40:50.575484 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0315a2a2-6b59-4db6-95e6-0fb58ca084cb" path="/var/lib/kubelet/pods/0315a2a2-6b59-4db6-95e6-0fb58ca084cb/volumes" Nov 27 09:40:57 crc kubenswrapper[4971]: I1127 09:40:57.553098 4971 scope.go:117] "RemoveContainer" containerID="5fce088764c403e267f5aed900d4424b9d5037db83a894ca8739f5f5af0bea73" Nov 27 09:40:57 crc kubenswrapper[4971]: E1127 09:40:57.555521 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:41:12 crc kubenswrapper[4971]: I1127 09:41:12.558050 4971 scope.go:117] "RemoveContainer" containerID="5fce088764c403e267f5aed900d4424b9d5037db83a894ca8739f5f5af0bea73" Nov 27 09:41:12 crc kubenswrapper[4971]: E1127 09:41:12.559100 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:41:25 crc kubenswrapper[4971]: I1127 09:41:25.550556 4971 scope.go:117] "RemoveContainer" containerID="5fce088764c403e267f5aed900d4424b9d5037db83a894ca8739f5f5af0bea73" Nov 27 09:41:25 crc kubenswrapper[4971]: E1127 09:41:25.551500 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:41:36 crc kubenswrapper[4971]: I1127 09:41:36.550816 4971 scope.go:117] "RemoveContainer" containerID="5fce088764c403e267f5aed900d4424b9d5037db83a894ca8739f5f5af0bea73" Nov 27 09:41:37 crc kubenswrapper[4971]: I1127 09:41:37.653766 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"31b85eb2444b7a4c49f418f3febd731ee00e77aa6a823a8826a6dfb293a04ccf"} Nov 27 09:41:40 crc kubenswrapper[4971]: I1127 09:41:40.703075 4971 generic.go:334] "Generic (PLEG): container finished" podID="35017a9d-382f-4e37-9652-77e0a5aec05e" containerID="972fcaa0f61242c3ff448f5586646da5890fba5bf99c47a6caf83e3e9e0f8f4f" exitCode=0 Nov 27 09:41:40 crc kubenswrapper[4971]: I1127 09:41:40.703180 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-xbpj6" event={"ID":"35017a9d-382f-4e37-9652-77e0a5aec05e","Type":"ContainerDied","Data":"972fcaa0f61242c3ff448f5586646da5890fba5bf99c47a6caf83e3e9e0f8f4f"} Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.235211 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-xbpj6" Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.334366 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-telemetry-combined-ca-bundle\") pod \"35017a9d-382f-4e37-9652-77e0a5aec05e\" (UID: \"35017a9d-382f-4e37-9652-77e0a5aec05e\") " Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.334450 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8rhm6\" (UniqueName: \"kubernetes.io/projected/35017a9d-382f-4e37-9652-77e0a5aec05e-kube-api-access-8rhm6\") pod \"35017a9d-382f-4e37-9652-77e0a5aec05e\" (UID: \"35017a9d-382f-4e37-9652-77e0a5aec05e\") " Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.334672 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-ceilometer-compute-config-data-0\") pod \"35017a9d-382f-4e37-9652-77e0a5aec05e\" (UID: \"35017a9d-382f-4e37-9652-77e0a5aec05e\") " Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.334776 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-ceilometer-compute-config-data-2\") pod \"35017a9d-382f-4e37-9652-77e0a5aec05e\" (UID: \"35017a9d-382f-4e37-9652-77e0a5aec05e\") " Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.334904 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-ceilometer-compute-config-data-1\") pod \"35017a9d-382f-4e37-9652-77e0a5aec05e\" (UID: \"35017a9d-382f-4e37-9652-77e0a5aec05e\") " Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.336034 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-ssh-key\") pod \"35017a9d-382f-4e37-9652-77e0a5aec05e\" (UID: \"35017a9d-382f-4e37-9652-77e0a5aec05e\") " Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.336111 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-ceph\") pod \"35017a9d-382f-4e37-9652-77e0a5aec05e\" (UID: \"35017a9d-382f-4e37-9652-77e0a5aec05e\") " Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.336142 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-inventory\") pod \"35017a9d-382f-4e37-9652-77e0a5aec05e\" (UID: \"35017a9d-382f-4e37-9652-77e0a5aec05e\") " Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.342617 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "35017a9d-382f-4e37-9652-77e0a5aec05e" (UID: "35017a9d-382f-4e37-9652-77e0a5aec05e"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.343824 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35017a9d-382f-4e37-9652-77e0a5aec05e-kube-api-access-8rhm6" (OuterVolumeSpecName: "kube-api-access-8rhm6") pod "35017a9d-382f-4e37-9652-77e0a5aec05e" (UID: "35017a9d-382f-4e37-9652-77e0a5aec05e"). InnerVolumeSpecName "kube-api-access-8rhm6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.355192 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-ceph" (OuterVolumeSpecName: "ceph") pod "35017a9d-382f-4e37-9652-77e0a5aec05e" (UID: "35017a9d-382f-4e37-9652-77e0a5aec05e"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.381033 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "35017a9d-382f-4e37-9652-77e0a5aec05e" (UID: "35017a9d-382f-4e37-9652-77e0a5aec05e"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.395918 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-inventory" (OuterVolumeSpecName: "inventory") pod "35017a9d-382f-4e37-9652-77e0a5aec05e" (UID: "35017a9d-382f-4e37-9652-77e0a5aec05e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.404221 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "35017a9d-382f-4e37-9652-77e0a5aec05e" (UID: "35017a9d-382f-4e37-9652-77e0a5aec05e"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.406976 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "35017a9d-382f-4e37-9652-77e0a5aec05e" (UID: "35017a9d-382f-4e37-9652-77e0a5aec05e"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.420979 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "35017a9d-382f-4e37-9652-77e0a5aec05e" (UID: "35017a9d-382f-4e37-9652-77e0a5aec05e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.440131 4971 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.440174 4971 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.440370 4971 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.440386 4971 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.440398 4971 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-ceph\") on node \"crc\" DevicePath \"\"" Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.440407 4971 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-inventory\") on node \"crc\" DevicePath \"\"" Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.440419 4971 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35017a9d-382f-4e37-9652-77e0a5aec05e-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.440430 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8rhm6\" (UniqueName: \"kubernetes.io/projected/35017a9d-382f-4e37-9652-77e0a5aec05e-kube-api-access-8rhm6\") on node \"crc\" DevicePath \"\"" Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.733932 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-xbpj6" event={"ID":"35017a9d-382f-4e37-9652-77e0a5aec05e","Type":"ContainerDied","Data":"83431857a82ac6403e69e9148a0f1517f6a8bf543b182bc8f49b8af8d21cfba1"} Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.734381 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="83431857a82ac6403e69e9148a0f1517f6a8bf543b182bc8f49b8af8d21cfba1" Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.733973 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-xbpj6" Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.905402 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-sriov-openstack-openstack-cell1-ztg25"] Nov 27 09:41:42 crc kubenswrapper[4971]: E1127 09:41:42.906178 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0315a2a2-6b59-4db6-95e6-0fb58ca084cb" containerName="extract-utilities" Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.906214 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="0315a2a2-6b59-4db6-95e6-0fb58ca084cb" containerName="extract-utilities" Nov 27 09:41:42 crc kubenswrapper[4971]: E1127 09:41:42.906244 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0315a2a2-6b59-4db6-95e6-0fb58ca084cb" containerName="extract-content" Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.906254 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="0315a2a2-6b59-4db6-95e6-0fb58ca084cb" containerName="extract-content" Nov 27 09:41:42 crc kubenswrapper[4971]: E1127 09:41:42.906306 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0315a2a2-6b59-4db6-95e6-0fb58ca084cb" containerName="registry-server" Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.906315 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="0315a2a2-6b59-4db6-95e6-0fb58ca084cb" containerName="registry-server" Nov 27 09:41:42 crc kubenswrapper[4971]: E1127 09:41:42.906337 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35017a9d-382f-4e37-9652-77e0a5aec05e" containerName="telemetry-openstack-openstack-cell1" Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.906347 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="35017a9d-382f-4e37-9652-77e0a5aec05e" containerName="telemetry-openstack-openstack-cell1" Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.906643 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="0315a2a2-6b59-4db6-95e6-0fb58ca084cb" containerName="registry-server" Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.906671 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="35017a9d-382f-4e37-9652-77e0a5aec05e" containerName="telemetry-openstack-openstack-cell1" Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.907743 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-ztg25" Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.910171 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.910290 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-sriov-agent-neutron-config" Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.910585 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.911308 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.912820 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-jnkbm" Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.920187 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-sriov-openstack-openstack-cell1-ztg25"] Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.953138 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3e206d9e-acba-4218-9b29-370e5c829461-inventory\") pod \"neutron-sriov-openstack-openstack-cell1-ztg25\" (UID: \"3e206d9e-acba-4218-9b29-370e5c829461\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-ztg25" Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.953329 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ljwcz\" (UniqueName: \"kubernetes.io/projected/3e206d9e-acba-4218-9b29-370e5c829461-kube-api-access-ljwcz\") pod \"neutron-sriov-openstack-openstack-cell1-ztg25\" (UID: \"3e206d9e-acba-4218-9b29-370e5c829461\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-ztg25" Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.953361 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3e206d9e-acba-4218-9b29-370e5c829461-ceph\") pod \"neutron-sriov-openstack-openstack-cell1-ztg25\" (UID: \"3e206d9e-acba-4218-9b29-370e5c829461\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-ztg25" Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.953395 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3e206d9e-acba-4218-9b29-370e5c829461-ssh-key\") pod \"neutron-sriov-openstack-openstack-cell1-ztg25\" (UID: \"3e206d9e-acba-4218-9b29-370e5c829461\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-ztg25" Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.953435 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e206d9e-acba-4218-9b29-370e5c829461-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-openstack-openstack-cell1-ztg25\" (UID: \"3e206d9e-acba-4218-9b29-370e5c829461\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-ztg25" Nov 27 09:41:42 crc kubenswrapper[4971]: I1127 09:41:42.953460 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/3e206d9e-acba-4218-9b29-370e5c829461-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-openstack-openstack-cell1-ztg25\" (UID: \"3e206d9e-acba-4218-9b29-370e5c829461\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-ztg25" Nov 27 09:41:43 crc kubenswrapper[4971]: I1127 09:41:43.056537 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ljwcz\" (UniqueName: \"kubernetes.io/projected/3e206d9e-acba-4218-9b29-370e5c829461-kube-api-access-ljwcz\") pod \"neutron-sriov-openstack-openstack-cell1-ztg25\" (UID: \"3e206d9e-acba-4218-9b29-370e5c829461\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-ztg25" Nov 27 09:41:43 crc kubenswrapper[4971]: I1127 09:41:43.056647 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3e206d9e-acba-4218-9b29-370e5c829461-ceph\") pod \"neutron-sriov-openstack-openstack-cell1-ztg25\" (UID: \"3e206d9e-acba-4218-9b29-370e5c829461\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-ztg25" Nov 27 09:41:43 crc kubenswrapper[4971]: I1127 09:41:43.056694 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3e206d9e-acba-4218-9b29-370e5c829461-ssh-key\") pod \"neutron-sriov-openstack-openstack-cell1-ztg25\" (UID: \"3e206d9e-acba-4218-9b29-370e5c829461\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-ztg25" Nov 27 09:41:43 crc kubenswrapper[4971]: I1127 09:41:43.056735 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e206d9e-acba-4218-9b29-370e5c829461-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-openstack-openstack-cell1-ztg25\" (UID: \"3e206d9e-acba-4218-9b29-370e5c829461\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-ztg25" Nov 27 09:41:43 crc kubenswrapper[4971]: I1127 09:41:43.056770 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/3e206d9e-acba-4218-9b29-370e5c829461-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-openstack-openstack-cell1-ztg25\" (UID: \"3e206d9e-acba-4218-9b29-370e5c829461\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-ztg25" Nov 27 09:41:43 crc kubenswrapper[4971]: I1127 09:41:43.056838 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3e206d9e-acba-4218-9b29-370e5c829461-inventory\") pod \"neutron-sriov-openstack-openstack-cell1-ztg25\" (UID: \"3e206d9e-acba-4218-9b29-370e5c829461\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-ztg25" Nov 27 09:41:43 crc kubenswrapper[4971]: I1127 09:41:43.062309 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3e206d9e-acba-4218-9b29-370e5c829461-ssh-key\") pod \"neutron-sriov-openstack-openstack-cell1-ztg25\" (UID: \"3e206d9e-acba-4218-9b29-370e5c829461\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-ztg25" Nov 27 09:41:43 crc kubenswrapper[4971]: I1127 09:41:43.062988 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/3e206d9e-acba-4218-9b29-370e5c829461-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-openstack-openstack-cell1-ztg25\" (UID: \"3e206d9e-acba-4218-9b29-370e5c829461\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-ztg25" Nov 27 09:41:43 crc kubenswrapper[4971]: I1127 09:41:43.064375 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3e206d9e-acba-4218-9b29-370e5c829461-ceph\") pod \"neutron-sriov-openstack-openstack-cell1-ztg25\" (UID: \"3e206d9e-acba-4218-9b29-370e5c829461\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-ztg25" Nov 27 09:41:43 crc kubenswrapper[4971]: I1127 09:41:43.064485 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3e206d9e-acba-4218-9b29-370e5c829461-inventory\") pod \"neutron-sriov-openstack-openstack-cell1-ztg25\" (UID: \"3e206d9e-acba-4218-9b29-370e5c829461\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-ztg25" Nov 27 09:41:43 crc kubenswrapper[4971]: I1127 09:41:43.066311 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e206d9e-acba-4218-9b29-370e5c829461-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-openstack-openstack-cell1-ztg25\" (UID: \"3e206d9e-acba-4218-9b29-370e5c829461\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-ztg25" Nov 27 09:41:43 crc kubenswrapper[4971]: I1127 09:41:43.073378 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ljwcz\" (UniqueName: \"kubernetes.io/projected/3e206d9e-acba-4218-9b29-370e5c829461-kube-api-access-ljwcz\") pod \"neutron-sriov-openstack-openstack-cell1-ztg25\" (UID: \"3e206d9e-acba-4218-9b29-370e5c829461\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-ztg25" Nov 27 09:41:43 crc kubenswrapper[4971]: I1127 09:41:43.234877 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-ztg25" Nov 27 09:41:43 crc kubenswrapper[4971]: E1127 09:41:43.781190 4971 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod35017a9d_382f_4e37_9652_77e0a5aec05e.slice/crio-83431857a82ac6403e69e9148a0f1517f6a8bf543b182bc8f49b8af8d21cfba1\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod35017a9d_382f_4e37_9652_77e0a5aec05e.slice\": RecentStats: unable to find data in memory cache]" Nov 27 09:41:43 crc kubenswrapper[4971]: I1127 09:41:43.856337 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-sriov-openstack-openstack-cell1-ztg25"] Nov 27 09:41:43 crc kubenswrapper[4971]: W1127 09:41:43.860633 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3e206d9e_acba_4218_9b29_370e5c829461.slice/crio-c5443030f4a9c11a0649ffabee629d92f5b1fd9a9cecceee70e17e91f202a663 WatchSource:0}: Error finding container c5443030f4a9c11a0649ffabee629d92f5b1fd9a9cecceee70e17e91f202a663: Status 404 returned error can't find the container with id c5443030f4a9c11a0649ffabee629d92f5b1fd9a9cecceee70e17e91f202a663 Nov 27 09:41:44 crc kubenswrapper[4971]: I1127 09:41:44.767921 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-ztg25" event={"ID":"3e206d9e-acba-4218-9b29-370e5c829461","Type":"ContainerStarted","Data":"f31b0b84a4da209c9f5a6f2b9c4bfa820bbc7bb595d21e923c1a19e1b01f422c"} Nov 27 09:41:44 crc kubenswrapper[4971]: I1127 09:41:44.768428 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-ztg25" event={"ID":"3e206d9e-acba-4218-9b29-370e5c829461","Type":"ContainerStarted","Data":"c5443030f4a9c11a0649ffabee629d92f5b1fd9a9cecceee70e17e91f202a663"} Nov 27 09:41:44 crc kubenswrapper[4971]: I1127 09:41:44.790606 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-sriov-openstack-openstack-cell1-ztg25" podStartSLOduration=2.248012933 podStartE2EDuration="2.790575542s" podCreationTimestamp="2025-11-27 09:41:42 +0000 UTC" firstStartedPulling="2025-11-27 09:41:43.863698729 +0000 UTC m=+10142.055742657" lastFinishedPulling="2025-11-27 09:41:44.406261308 +0000 UTC m=+10142.598305266" observedRunningTime="2025-11-27 09:41:44.786705112 +0000 UTC m=+10142.978749040" watchObservedRunningTime="2025-11-27 09:41:44.790575542 +0000 UTC m=+10142.982619460" Nov 27 09:41:54 crc kubenswrapper[4971]: E1127 09:41:54.096869 4971 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod35017a9d_382f_4e37_9652_77e0a5aec05e.slice/crio-83431857a82ac6403e69e9148a0f1517f6a8bf543b182bc8f49b8af8d21cfba1\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod35017a9d_382f_4e37_9652_77e0a5aec05e.slice\": RecentStats: unable to find data in memory cache]" Nov 27 09:42:04 crc kubenswrapper[4971]: E1127 09:42:04.398584 4971 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod35017a9d_382f_4e37_9652_77e0a5aec05e.slice/crio-83431857a82ac6403e69e9148a0f1517f6a8bf543b182bc8f49b8af8d21cfba1\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod35017a9d_382f_4e37_9652_77e0a5aec05e.slice\": RecentStats: unable to find data in memory cache]" Nov 27 09:42:14 crc kubenswrapper[4971]: E1127 09:42:14.694429 4971 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod35017a9d_382f_4e37_9652_77e0a5aec05e.slice/crio-83431857a82ac6403e69e9148a0f1517f6a8bf543b182bc8f49b8af8d21cfba1\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod35017a9d_382f_4e37_9652_77e0a5aec05e.slice\": RecentStats: unable to find data in memory cache]" Nov 27 09:42:24 crc kubenswrapper[4971]: E1127 09:42:24.955846 4971 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod35017a9d_382f_4e37_9652_77e0a5aec05e.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod35017a9d_382f_4e37_9652_77e0a5aec05e.slice/crio-83431857a82ac6403e69e9148a0f1517f6a8bf543b182bc8f49b8af8d21cfba1\": RecentStats: unable to find data in memory cache]" Nov 27 09:42:29 crc kubenswrapper[4971]: I1127 09:42:29.432153 4971 generic.go:334] "Generic (PLEG): container finished" podID="3e206d9e-acba-4218-9b29-370e5c829461" containerID="f31b0b84a4da209c9f5a6f2b9c4bfa820bbc7bb595d21e923c1a19e1b01f422c" exitCode=0 Nov 27 09:42:29 crc kubenswrapper[4971]: I1127 09:42:29.432286 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-ztg25" event={"ID":"3e206d9e-acba-4218-9b29-370e5c829461","Type":"ContainerDied","Data":"f31b0b84a4da209c9f5a6f2b9c4bfa820bbc7bb595d21e923c1a19e1b01f422c"} Nov 27 09:42:30 crc kubenswrapper[4971]: I1127 09:42:30.980717 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-ztg25" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.047752 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3e206d9e-acba-4218-9b29-370e5c829461-ssh-key\") pod \"3e206d9e-acba-4218-9b29-370e5c829461\" (UID: \"3e206d9e-acba-4218-9b29-370e5c829461\") " Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.047882 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e206d9e-acba-4218-9b29-370e5c829461-neutron-sriov-combined-ca-bundle\") pod \"3e206d9e-acba-4218-9b29-370e5c829461\" (UID: \"3e206d9e-acba-4218-9b29-370e5c829461\") " Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.047981 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/3e206d9e-acba-4218-9b29-370e5c829461-neutron-sriov-agent-neutron-config-0\") pod \"3e206d9e-acba-4218-9b29-370e5c829461\" (UID: \"3e206d9e-acba-4218-9b29-370e5c829461\") " Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.048135 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3e206d9e-acba-4218-9b29-370e5c829461-inventory\") pod \"3e206d9e-acba-4218-9b29-370e5c829461\" (UID: \"3e206d9e-acba-4218-9b29-370e5c829461\") " Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.048206 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3e206d9e-acba-4218-9b29-370e5c829461-ceph\") pod \"3e206d9e-acba-4218-9b29-370e5c829461\" (UID: \"3e206d9e-acba-4218-9b29-370e5c829461\") " Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.048264 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ljwcz\" (UniqueName: \"kubernetes.io/projected/3e206d9e-acba-4218-9b29-370e5c829461-kube-api-access-ljwcz\") pod \"3e206d9e-acba-4218-9b29-370e5c829461\" (UID: \"3e206d9e-acba-4218-9b29-370e5c829461\") " Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.064628 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e206d9e-acba-4218-9b29-370e5c829461-neutron-sriov-combined-ca-bundle" (OuterVolumeSpecName: "neutron-sriov-combined-ca-bundle") pod "3e206d9e-acba-4218-9b29-370e5c829461" (UID: "3e206d9e-acba-4218-9b29-370e5c829461"). InnerVolumeSpecName "neutron-sriov-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.066925 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e206d9e-acba-4218-9b29-370e5c829461-kube-api-access-ljwcz" (OuterVolumeSpecName: "kube-api-access-ljwcz") pod "3e206d9e-acba-4218-9b29-370e5c829461" (UID: "3e206d9e-acba-4218-9b29-370e5c829461"). InnerVolumeSpecName "kube-api-access-ljwcz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.074402 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e206d9e-acba-4218-9b29-370e5c829461-ceph" (OuterVolumeSpecName: "ceph") pod "3e206d9e-acba-4218-9b29-370e5c829461" (UID: "3e206d9e-acba-4218-9b29-370e5c829461"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.080960 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e206d9e-acba-4218-9b29-370e5c829461-inventory" (OuterVolumeSpecName: "inventory") pod "3e206d9e-acba-4218-9b29-370e5c829461" (UID: "3e206d9e-acba-4218-9b29-370e5c829461"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.091577 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e206d9e-acba-4218-9b29-370e5c829461-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3e206d9e-acba-4218-9b29-370e5c829461" (UID: "3e206d9e-acba-4218-9b29-370e5c829461"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.112881 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e206d9e-acba-4218-9b29-370e5c829461-neutron-sriov-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-sriov-agent-neutron-config-0") pod "3e206d9e-acba-4218-9b29-370e5c829461" (UID: "3e206d9e-acba-4218-9b29-370e5c829461"). InnerVolumeSpecName "neutron-sriov-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.150718 4971 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/3e206d9e-acba-4218-9b29-370e5c829461-neutron-sriov-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.150754 4971 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3e206d9e-acba-4218-9b29-370e5c829461-inventory\") on node \"crc\" DevicePath \"\"" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.150768 4971 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3e206d9e-acba-4218-9b29-370e5c829461-ceph\") on node \"crc\" DevicePath \"\"" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.150777 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ljwcz\" (UniqueName: \"kubernetes.io/projected/3e206d9e-acba-4218-9b29-370e5c829461-kube-api-access-ljwcz\") on node \"crc\" DevicePath \"\"" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.150787 4971 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3e206d9e-acba-4218-9b29-370e5c829461-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.150796 4971 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e206d9e-acba-4218-9b29-370e5c829461-neutron-sriov-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.480087 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-ztg25" event={"ID":"3e206d9e-acba-4218-9b29-370e5c829461","Type":"ContainerDied","Data":"c5443030f4a9c11a0649ffabee629d92f5b1fd9a9cecceee70e17e91f202a663"} Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.480147 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c5443030f4a9c11a0649ffabee629d92f5b1fd9a9cecceee70e17e91f202a663" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.480603 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-ztg25" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.599602 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-dhcp-openstack-openstack-cell1-hw9g8"] Nov 27 09:42:31 crc kubenswrapper[4971]: E1127 09:42:31.600240 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e206d9e-acba-4218-9b29-370e5c829461" containerName="neutron-sriov-openstack-openstack-cell1" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.600259 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e206d9e-acba-4218-9b29-370e5c829461" containerName="neutron-sriov-openstack-openstack-cell1" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.600523 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e206d9e-acba-4218-9b29-370e5c829461" containerName="neutron-sriov-openstack-openstack-cell1" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.601497 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-hw9g8" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.603679 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-dhcp-agent-neutron-config" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.603837 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.604351 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.604383 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.604480 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-jnkbm" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.614170 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-dhcp-openstack-openstack-cell1-hw9g8"] Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.669586 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34e270a8-c0d0-4ace-a1f8-b7ba36869a70-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-openstack-openstack-cell1-hw9g8\" (UID: \"34e270a8-c0d0-4ace-a1f8-b7ba36869a70\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-hw9g8" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.669999 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/34e270a8-c0d0-4ace-a1f8-b7ba36869a70-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-openstack-openstack-cell1-hw9g8\" (UID: \"34e270a8-c0d0-4ace-a1f8-b7ba36869a70\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-hw9g8" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.670115 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2psd\" (UniqueName: \"kubernetes.io/projected/34e270a8-c0d0-4ace-a1f8-b7ba36869a70-kube-api-access-q2psd\") pod \"neutron-dhcp-openstack-openstack-cell1-hw9g8\" (UID: \"34e270a8-c0d0-4ace-a1f8-b7ba36869a70\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-hw9g8" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.670245 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/34e270a8-c0d0-4ace-a1f8-b7ba36869a70-ssh-key\") pod \"neutron-dhcp-openstack-openstack-cell1-hw9g8\" (UID: \"34e270a8-c0d0-4ace-a1f8-b7ba36869a70\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-hw9g8" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.670386 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34e270a8-c0d0-4ace-a1f8-b7ba36869a70-inventory\") pod \"neutron-dhcp-openstack-openstack-cell1-hw9g8\" (UID: \"34e270a8-c0d0-4ace-a1f8-b7ba36869a70\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-hw9g8" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.670485 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/34e270a8-c0d0-4ace-a1f8-b7ba36869a70-ceph\") pod \"neutron-dhcp-openstack-openstack-cell1-hw9g8\" (UID: \"34e270a8-c0d0-4ace-a1f8-b7ba36869a70\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-hw9g8" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.773083 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34e270a8-c0d0-4ace-a1f8-b7ba36869a70-inventory\") pod \"neutron-dhcp-openstack-openstack-cell1-hw9g8\" (UID: \"34e270a8-c0d0-4ace-a1f8-b7ba36869a70\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-hw9g8" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.773151 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/34e270a8-c0d0-4ace-a1f8-b7ba36869a70-ceph\") pod \"neutron-dhcp-openstack-openstack-cell1-hw9g8\" (UID: \"34e270a8-c0d0-4ace-a1f8-b7ba36869a70\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-hw9g8" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.773300 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34e270a8-c0d0-4ace-a1f8-b7ba36869a70-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-openstack-openstack-cell1-hw9g8\" (UID: \"34e270a8-c0d0-4ace-a1f8-b7ba36869a70\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-hw9g8" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.773362 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/34e270a8-c0d0-4ace-a1f8-b7ba36869a70-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-openstack-openstack-cell1-hw9g8\" (UID: \"34e270a8-c0d0-4ace-a1f8-b7ba36869a70\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-hw9g8" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.773402 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2psd\" (UniqueName: \"kubernetes.io/projected/34e270a8-c0d0-4ace-a1f8-b7ba36869a70-kube-api-access-q2psd\") pod \"neutron-dhcp-openstack-openstack-cell1-hw9g8\" (UID: \"34e270a8-c0d0-4ace-a1f8-b7ba36869a70\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-hw9g8" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.773439 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/34e270a8-c0d0-4ace-a1f8-b7ba36869a70-ssh-key\") pod \"neutron-dhcp-openstack-openstack-cell1-hw9g8\" (UID: \"34e270a8-c0d0-4ace-a1f8-b7ba36869a70\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-hw9g8" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.778268 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34e270a8-c0d0-4ace-a1f8-b7ba36869a70-inventory\") pod \"neutron-dhcp-openstack-openstack-cell1-hw9g8\" (UID: \"34e270a8-c0d0-4ace-a1f8-b7ba36869a70\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-hw9g8" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.779613 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/34e270a8-c0d0-4ace-a1f8-b7ba36869a70-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-openstack-openstack-cell1-hw9g8\" (UID: \"34e270a8-c0d0-4ace-a1f8-b7ba36869a70\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-hw9g8" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.780565 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/34e270a8-c0d0-4ace-a1f8-b7ba36869a70-ssh-key\") pod \"neutron-dhcp-openstack-openstack-cell1-hw9g8\" (UID: \"34e270a8-c0d0-4ace-a1f8-b7ba36869a70\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-hw9g8" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.781236 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/34e270a8-c0d0-4ace-a1f8-b7ba36869a70-ceph\") pod \"neutron-dhcp-openstack-openstack-cell1-hw9g8\" (UID: \"34e270a8-c0d0-4ace-a1f8-b7ba36869a70\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-hw9g8" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.782114 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34e270a8-c0d0-4ace-a1f8-b7ba36869a70-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-openstack-openstack-cell1-hw9g8\" (UID: \"34e270a8-c0d0-4ace-a1f8-b7ba36869a70\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-hw9g8" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.805901 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2psd\" (UniqueName: \"kubernetes.io/projected/34e270a8-c0d0-4ace-a1f8-b7ba36869a70-kube-api-access-q2psd\") pod \"neutron-dhcp-openstack-openstack-cell1-hw9g8\" (UID: \"34e270a8-c0d0-4ace-a1f8-b7ba36869a70\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-hw9g8" Nov 27 09:42:31 crc kubenswrapper[4971]: I1127 09:42:31.923279 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-hw9g8" Nov 27 09:42:32 crc kubenswrapper[4971]: I1127 09:42:32.583930 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-dhcp-openstack-openstack-cell1-hw9g8"] Nov 27 09:42:33 crc kubenswrapper[4971]: I1127 09:42:33.511331 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-hw9g8" event={"ID":"34e270a8-c0d0-4ace-a1f8-b7ba36869a70","Type":"ContainerStarted","Data":"bc04a4731f040db525c7d38566fa83ba40731cfc24c242faa833338d219d6617"} Nov 27 09:42:33 crc kubenswrapper[4971]: I1127 09:42:33.511868 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-hw9g8" event={"ID":"34e270a8-c0d0-4ace-a1f8-b7ba36869a70","Type":"ContainerStarted","Data":"0d20827280ad09b07e643f3ccd4a2fa555b1b9c6da6552e888351e2b36bb606e"} Nov 27 09:42:33 crc kubenswrapper[4971]: I1127 09:42:33.545000 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-dhcp-openstack-openstack-cell1-hw9g8" podStartSLOduration=2.068541999 podStartE2EDuration="2.544975987s" podCreationTimestamp="2025-11-27 09:42:31 +0000 UTC" firstStartedPulling="2025-11-27 09:42:32.586298175 +0000 UTC m=+10190.778342093" lastFinishedPulling="2025-11-27 09:42:33.062732163 +0000 UTC m=+10191.254776081" observedRunningTime="2025-11-27 09:42:33.538279236 +0000 UTC m=+10191.730323174" watchObservedRunningTime="2025-11-27 09:42:33.544975987 +0000 UTC m=+10191.737019905" Nov 27 09:42:35 crc kubenswrapper[4971]: E1127 09:42:35.331638 4971 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod35017a9d_382f_4e37_9652_77e0a5aec05e.slice/crio-83431857a82ac6403e69e9148a0f1517f6a8bf543b182bc8f49b8af8d21cfba1\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod35017a9d_382f_4e37_9652_77e0a5aec05e.slice\": RecentStats: unable to find data in memory cache]" Nov 27 09:43:39 crc kubenswrapper[4971]: I1127 09:43:39.389944 4971 generic.go:334] "Generic (PLEG): container finished" podID="34e270a8-c0d0-4ace-a1f8-b7ba36869a70" containerID="bc04a4731f040db525c7d38566fa83ba40731cfc24c242faa833338d219d6617" exitCode=0 Nov 27 09:43:39 crc kubenswrapper[4971]: I1127 09:43:39.390051 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-hw9g8" event={"ID":"34e270a8-c0d0-4ace-a1f8-b7ba36869a70","Type":"ContainerDied","Data":"bc04a4731f040db525c7d38566fa83ba40731cfc24c242faa833338d219d6617"} Nov 27 09:43:40 crc kubenswrapper[4971]: I1127 09:43:40.977850 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-hw9g8" Nov 27 09:43:41 crc kubenswrapper[4971]: I1127 09:43:41.050282 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/34e270a8-c0d0-4ace-a1f8-b7ba36869a70-ssh-key\") pod \"34e270a8-c0d0-4ace-a1f8-b7ba36869a70\" (UID: \"34e270a8-c0d0-4ace-a1f8-b7ba36869a70\") " Nov 27 09:43:41 crc kubenswrapper[4971]: I1127 09:43:41.050354 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/34e270a8-c0d0-4ace-a1f8-b7ba36869a70-ceph\") pod \"34e270a8-c0d0-4ace-a1f8-b7ba36869a70\" (UID: \"34e270a8-c0d0-4ace-a1f8-b7ba36869a70\") " Nov 27 09:43:41 crc kubenswrapper[4971]: I1127 09:43:41.065937 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34e270a8-c0d0-4ace-a1f8-b7ba36869a70-ceph" (OuterVolumeSpecName: "ceph") pod "34e270a8-c0d0-4ace-a1f8-b7ba36869a70" (UID: "34e270a8-c0d0-4ace-a1f8-b7ba36869a70"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:43:41 crc kubenswrapper[4971]: I1127 09:43:41.083385 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34e270a8-c0d0-4ace-a1f8-b7ba36869a70-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "34e270a8-c0d0-4ace-a1f8-b7ba36869a70" (UID: "34e270a8-c0d0-4ace-a1f8-b7ba36869a70"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:43:41 crc kubenswrapper[4971]: I1127 09:43:41.153062 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q2psd\" (UniqueName: \"kubernetes.io/projected/34e270a8-c0d0-4ace-a1f8-b7ba36869a70-kube-api-access-q2psd\") pod \"34e270a8-c0d0-4ace-a1f8-b7ba36869a70\" (UID: \"34e270a8-c0d0-4ace-a1f8-b7ba36869a70\") " Nov 27 09:43:41 crc kubenswrapper[4971]: I1127 09:43:41.153344 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/34e270a8-c0d0-4ace-a1f8-b7ba36869a70-neutron-dhcp-agent-neutron-config-0\") pod \"34e270a8-c0d0-4ace-a1f8-b7ba36869a70\" (UID: \"34e270a8-c0d0-4ace-a1f8-b7ba36869a70\") " Nov 27 09:43:41 crc kubenswrapper[4971]: I1127 09:43:41.153383 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34e270a8-c0d0-4ace-a1f8-b7ba36869a70-neutron-dhcp-combined-ca-bundle\") pod \"34e270a8-c0d0-4ace-a1f8-b7ba36869a70\" (UID: \"34e270a8-c0d0-4ace-a1f8-b7ba36869a70\") " Nov 27 09:43:41 crc kubenswrapper[4971]: I1127 09:43:41.153503 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34e270a8-c0d0-4ace-a1f8-b7ba36869a70-inventory\") pod \"34e270a8-c0d0-4ace-a1f8-b7ba36869a70\" (UID: \"34e270a8-c0d0-4ace-a1f8-b7ba36869a70\") " Nov 27 09:43:41 crc kubenswrapper[4971]: I1127 09:43:41.154125 4971 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/34e270a8-c0d0-4ace-a1f8-b7ba36869a70-ceph\") on node \"crc\" DevicePath \"\"" Nov 27 09:43:41 crc kubenswrapper[4971]: I1127 09:43:41.154146 4971 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/34e270a8-c0d0-4ace-a1f8-b7ba36869a70-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 27 09:43:41 crc kubenswrapper[4971]: I1127 09:43:41.160809 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34e270a8-c0d0-4ace-a1f8-b7ba36869a70-neutron-dhcp-combined-ca-bundle" (OuterVolumeSpecName: "neutron-dhcp-combined-ca-bundle") pod "34e270a8-c0d0-4ace-a1f8-b7ba36869a70" (UID: "34e270a8-c0d0-4ace-a1f8-b7ba36869a70"). InnerVolumeSpecName "neutron-dhcp-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:43:41 crc kubenswrapper[4971]: I1127 09:43:41.160853 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34e270a8-c0d0-4ace-a1f8-b7ba36869a70-kube-api-access-q2psd" (OuterVolumeSpecName: "kube-api-access-q2psd") pod "34e270a8-c0d0-4ace-a1f8-b7ba36869a70" (UID: "34e270a8-c0d0-4ace-a1f8-b7ba36869a70"). InnerVolumeSpecName "kube-api-access-q2psd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:43:41 crc kubenswrapper[4971]: I1127 09:43:41.182805 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34e270a8-c0d0-4ace-a1f8-b7ba36869a70-neutron-dhcp-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-dhcp-agent-neutron-config-0") pod "34e270a8-c0d0-4ace-a1f8-b7ba36869a70" (UID: "34e270a8-c0d0-4ace-a1f8-b7ba36869a70"). InnerVolumeSpecName "neutron-dhcp-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:43:41 crc kubenswrapper[4971]: I1127 09:43:41.195775 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34e270a8-c0d0-4ace-a1f8-b7ba36869a70-inventory" (OuterVolumeSpecName: "inventory") pod "34e270a8-c0d0-4ace-a1f8-b7ba36869a70" (UID: "34e270a8-c0d0-4ace-a1f8-b7ba36869a70"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:43:41 crc kubenswrapper[4971]: I1127 09:43:41.256101 4971 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/34e270a8-c0d0-4ace-a1f8-b7ba36869a70-neutron-dhcp-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Nov 27 09:43:41 crc kubenswrapper[4971]: I1127 09:43:41.256163 4971 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34e270a8-c0d0-4ace-a1f8-b7ba36869a70-neutron-dhcp-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:43:41 crc kubenswrapper[4971]: I1127 09:43:41.256180 4971 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34e270a8-c0d0-4ace-a1f8-b7ba36869a70-inventory\") on node \"crc\" DevicePath \"\"" Nov 27 09:43:41 crc kubenswrapper[4971]: I1127 09:43:41.256194 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q2psd\" (UniqueName: \"kubernetes.io/projected/34e270a8-c0d0-4ace-a1f8-b7ba36869a70-kube-api-access-q2psd\") on node \"crc\" DevicePath \"\"" Nov 27 09:43:41 crc kubenswrapper[4971]: I1127 09:43:41.421483 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-hw9g8" event={"ID":"34e270a8-c0d0-4ace-a1f8-b7ba36869a70","Type":"ContainerDied","Data":"0d20827280ad09b07e643f3ccd4a2fa555b1b9c6da6552e888351e2b36bb606e"} Nov 27 09:43:41 crc kubenswrapper[4971]: I1127 09:43:41.421573 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0d20827280ad09b07e643f3ccd4a2fa555b1b9c6da6552e888351e2b36bb606e" Nov 27 09:43:41 crc kubenswrapper[4971]: I1127 09:43:41.421629 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-hw9g8" Nov 27 09:43:55 crc kubenswrapper[4971]: I1127 09:43:55.931186 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 27 09:43:55 crc kubenswrapper[4971]: I1127 09:43:55.932250 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f" containerName="nova-cell0-conductor-conductor" containerID="cri-o://ad34b8b5148d73ba7a1d4d233cd94e40511831af3da591fa83dd75b18a287a2b" gracePeriod=30 Nov 27 09:43:55 crc kubenswrapper[4971]: I1127 09:43:55.974701 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 27 09:43:55 crc kubenswrapper[4971]: I1127 09:43:55.975057 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="6d59e37d-03b0-47d2-9e61-7976f397d2a8" containerName="nova-cell1-conductor-conductor" containerID="cri-o://b9495126122a5659954daa77d5bb7bfa0a719ee215ba76a44ce1102e2ca3d70c" gracePeriod=30 Nov 27 09:43:56 crc kubenswrapper[4971]: E1127 09:43:56.018934 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b9495126122a5659954daa77d5bb7bfa0a719ee215ba76a44ce1102e2ca3d70c" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 27 09:43:56 crc kubenswrapper[4971]: E1127 09:43:56.020523 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b9495126122a5659954daa77d5bb7bfa0a719ee215ba76a44ce1102e2ca3d70c" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 27 09:43:56 crc kubenswrapper[4971]: E1127 09:43:56.023594 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b9495126122a5659954daa77d5bb7bfa0a719ee215ba76a44ce1102e2ca3d70c" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 27 09:43:56 crc kubenswrapper[4971]: E1127 09:43:56.023643 4971 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="6d59e37d-03b0-47d2-9e61-7976f397d2a8" containerName="nova-cell1-conductor-conductor" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.412904 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.413448 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.689847 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c"] Nov 27 09:43:56 crc kubenswrapper[4971]: E1127 09:43:56.690494 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34e270a8-c0d0-4ace-a1f8-b7ba36869a70" containerName="neutron-dhcp-openstack-openstack-cell1" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.690517 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="34e270a8-c0d0-4ace-a1f8-b7ba36869a70" containerName="neutron-dhcp-openstack-openstack-cell1" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.690829 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="34e270a8-c0d0-4ace-a1f8-b7ba36869a70" containerName="neutron-dhcp-openstack-openstack-cell1" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.691955 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.703308 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.703683 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.703845 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-cells-global-config" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.704527 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.704712 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.704738 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.716208 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-jnkbm" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.741663 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c"] Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.806895 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-cells-global-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.806971 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-ssh-key\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.807013 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.807087 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.807132 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tksh2\" (UniqueName: \"kubernetes.io/projected/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-kube-api-access-tksh2\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.807163 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.807211 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-ceph\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.807242 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-inventory\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.807309 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.807352 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.807457 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-cells-global-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.912274 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tksh2\" (UniqueName: \"kubernetes.io/projected/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-kube-api-access-tksh2\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.912345 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.912404 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-ceph\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.912438 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-inventory\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.912512 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.912569 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.912648 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-cells-global-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.912708 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-cells-global-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.912737 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-ssh-key\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.912769 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.912833 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.915507 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-cells-global-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.920214 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-cells-global-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.931812 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.937201 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.937643 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-ceph\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.938076 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-inventory\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.938466 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.940514 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.950160 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-ssh-key\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.954401 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tksh2\" (UniqueName: \"kubernetes.io/projected/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-kube-api-access-tksh2\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" Nov 27 09:43:56 crc kubenswrapper[4971]: I1127 09:43:56.957898 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" Nov 27 09:43:57 crc kubenswrapper[4971]: I1127 09:43:57.102061 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" Nov 27 09:43:57 crc kubenswrapper[4971]: I1127 09:43:57.490855 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 09:43:57 crc kubenswrapper[4971]: I1127 09:43:57.491698 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="36b2d44b-b297-4a2a-b166-841b28acf914" containerName="nova-scheduler-scheduler" containerID="cri-o://016d6d9d10b45a1274f9cc99c96b25c645d557075858776c04cdf40371a30253" gracePeriod=30 Nov 27 09:43:57 crc kubenswrapper[4971]: I1127 09:43:57.506889 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 27 09:43:57 crc kubenswrapper[4971]: I1127 09:43:57.507303 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="5c011972-ac6d-497d-9fd5-b5eed11ef507" containerName="nova-api-log" containerID="cri-o://cda5e5686740453b3094623d07ae2dcf3a09ceba6d1b35aa43b64e96455b225e" gracePeriod=30 Nov 27 09:43:57 crc kubenswrapper[4971]: I1127 09:43:57.508192 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="5c011972-ac6d-497d-9fd5-b5eed11ef507" containerName="nova-api-api" containerID="cri-o://5049d8a6c9ddf9e828beac44153eb7f5791e0a7cb1ebf7c2a3377062621684d3" gracePeriod=30 Nov 27 09:43:57 crc kubenswrapper[4971]: I1127 09:43:57.540865 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 09:43:57 crc kubenswrapper[4971]: I1127 09:43:57.541594 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="55562639-e816-4faf-8c2b-7124cd156a85" containerName="nova-metadata-log" containerID="cri-o://1df28c45fcfa92c627f267b10956aa0fae302e93e9a77d39a043fc9873783e9d" gracePeriod=30 Nov 27 09:43:57 crc kubenswrapper[4971]: I1127 09:43:57.541877 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="55562639-e816-4faf-8c2b-7124cd156a85" containerName="nova-metadata-metadata" containerID="cri-o://68315fba2e7808c887d15af24f2991b56487f29af6f65f6f9debb64df02b0d37" gracePeriod=30 Nov 27 09:43:57 crc kubenswrapper[4971]: I1127 09:43:57.662514 4971 generic.go:334] "Generic (PLEG): container finished" podID="5c011972-ac6d-497d-9fd5-b5eed11ef507" containerID="cda5e5686740453b3094623d07ae2dcf3a09ceba6d1b35aa43b64e96455b225e" exitCode=143 Nov 27 09:43:57 crc kubenswrapper[4971]: I1127 09:43:57.662585 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5c011972-ac6d-497d-9fd5-b5eed11ef507","Type":"ContainerDied","Data":"cda5e5686740453b3094623d07ae2dcf3a09ceba6d1b35aa43b64e96455b225e"} Nov 27 09:43:57 crc kubenswrapper[4971]: I1127 09:43:57.794087 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c"] Nov 27 09:43:58 crc kubenswrapper[4971]: I1127 09:43:58.681109 4971 generic.go:334] "Generic (PLEG): container finished" podID="55562639-e816-4faf-8c2b-7124cd156a85" containerID="1df28c45fcfa92c627f267b10956aa0fae302e93e9a77d39a043fc9873783e9d" exitCode=143 Nov 27 09:43:58 crc kubenswrapper[4971]: I1127 09:43:58.682603 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"55562639-e816-4faf-8c2b-7124cd156a85","Type":"ContainerDied","Data":"1df28c45fcfa92c627f267b10956aa0fae302e93e9a77d39a043fc9873783e9d"} Nov 27 09:43:58 crc kubenswrapper[4971]: I1127 09:43:58.688068 4971 generic.go:334] "Generic (PLEG): container finished" podID="6d59e37d-03b0-47d2-9e61-7976f397d2a8" containerID="b9495126122a5659954daa77d5bb7bfa0a719ee215ba76a44ce1102e2ca3d70c" exitCode=0 Nov 27 09:43:58 crc kubenswrapper[4971]: I1127 09:43:58.688217 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"6d59e37d-03b0-47d2-9e61-7976f397d2a8","Type":"ContainerDied","Data":"b9495126122a5659954daa77d5bb7bfa0a719ee215ba76a44ce1102e2ca3d70c"} Nov 27 09:43:58 crc kubenswrapper[4971]: I1127 09:43:58.688258 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"6d59e37d-03b0-47d2-9e61-7976f397d2a8","Type":"ContainerDied","Data":"f7d585826069730538858b97d3e99c8f7b20d29bb40b2e72efc2964f1182a53a"} Nov 27 09:43:58 crc kubenswrapper[4971]: I1127 09:43:58.688273 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f7d585826069730538858b97d3e99c8f7b20d29bb40b2e72efc2964f1182a53a" Nov 27 09:43:58 crc kubenswrapper[4971]: I1127 09:43:58.694294 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" event={"ID":"9cbe8f92-e455-4fc6-b875-1895f7dee6fd","Type":"ContainerStarted","Data":"78f2d9bcfd3508489e6023db9031d7908fa74448050fd66296bea9b050ec3c26"} Nov 27 09:43:58 crc kubenswrapper[4971]: I1127 09:43:58.694488 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 27 09:43:58 crc kubenswrapper[4971]: I1127 09:43:58.771962 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dlhdr\" (UniqueName: \"kubernetes.io/projected/6d59e37d-03b0-47d2-9e61-7976f397d2a8-kube-api-access-dlhdr\") pod \"6d59e37d-03b0-47d2-9e61-7976f397d2a8\" (UID: \"6d59e37d-03b0-47d2-9e61-7976f397d2a8\") " Nov 27 09:43:58 crc kubenswrapper[4971]: I1127 09:43:58.772089 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d59e37d-03b0-47d2-9e61-7976f397d2a8-config-data\") pod \"6d59e37d-03b0-47d2-9e61-7976f397d2a8\" (UID: \"6d59e37d-03b0-47d2-9e61-7976f397d2a8\") " Nov 27 09:43:58 crc kubenswrapper[4971]: I1127 09:43:58.772140 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d59e37d-03b0-47d2-9e61-7976f397d2a8-combined-ca-bundle\") pod \"6d59e37d-03b0-47d2-9e61-7976f397d2a8\" (UID: \"6d59e37d-03b0-47d2-9e61-7976f397d2a8\") " Nov 27 09:43:58 crc kubenswrapper[4971]: I1127 09:43:58.777360 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d59e37d-03b0-47d2-9e61-7976f397d2a8-kube-api-access-dlhdr" (OuterVolumeSpecName: "kube-api-access-dlhdr") pod "6d59e37d-03b0-47d2-9e61-7976f397d2a8" (UID: "6d59e37d-03b0-47d2-9e61-7976f397d2a8"). InnerVolumeSpecName "kube-api-access-dlhdr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:43:58 crc kubenswrapper[4971]: I1127 09:43:58.810415 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d59e37d-03b0-47d2-9e61-7976f397d2a8-config-data" (OuterVolumeSpecName: "config-data") pod "6d59e37d-03b0-47d2-9e61-7976f397d2a8" (UID: "6d59e37d-03b0-47d2-9e61-7976f397d2a8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:43:58 crc kubenswrapper[4971]: I1127 09:43:58.817070 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d59e37d-03b0-47d2-9e61-7976f397d2a8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6d59e37d-03b0-47d2-9e61-7976f397d2a8" (UID: "6d59e37d-03b0-47d2-9e61-7976f397d2a8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:43:58 crc kubenswrapper[4971]: I1127 09:43:58.876816 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dlhdr\" (UniqueName: \"kubernetes.io/projected/6d59e37d-03b0-47d2-9e61-7976f397d2a8-kube-api-access-dlhdr\") on node \"crc\" DevicePath \"\"" Nov 27 09:43:58 crc kubenswrapper[4971]: I1127 09:43:58.876873 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d59e37d-03b0-47d2-9e61-7976f397d2a8-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 09:43:58 crc kubenswrapper[4971]: I1127 09:43:58.876889 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d59e37d-03b0-47d2-9e61-7976f397d2a8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:43:59 crc kubenswrapper[4971]: I1127 09:43:59.713809 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 27 09:43:59 crc kubenswrapper[4971]: I1127 09:43:59.714048 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" event={"ID":"9cbe8f92-e455-4fc6-b875-1895f7dee6fd","Type":"ContainerStarted","Data":"229ca6642f016bc52f7150262c3f40164b2e4bbeac97023cdf91be06cb600a6a"} Nov 27 09:43:59 crc kubenswrapper[4971]: I1127 09:43:59.759490 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" podStartSLOduration=3.113722057 podStartE2EDuration="3.759469545s" podCreationTimestamp="2025-11-27 09:43:56 +0000 UTC" firstStartedPulling="2025-11-27 09:43:57.814580102 +0000 UTC m=+10276.006624020" lastFinishedPulling="2025-11-27 09:43:58.46032759 +0000 UTC m=+10276.652371508" observedRunningTime="2025-11-27 09:43:59.744900908 +0000 UTC m=+10277.936944856" watchObservedRunningTime="2025-11-27 09:43:59.759469545 +0000 UTC m=+10277.951513453" Nov 27 09:43:59 crc kubenswrapper[4971]: I1127 09:43:59.789120 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 27 09:43:59 crc kubenswrapper[4971]: I1127 09:43:59.808393 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 27 09:43:59 crc kubenswrapper[4971]: I1127 09:43:59.868121 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 27 09:43:59 crc kubenswrapper[4971]: E1127 09:43:59.868885 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d59e37d-03b0-47d2-9e61-7976f397d2a8" containerName="nova-cell1-conductor-conductor" Nov 27 09:43:59 crc kubenswrapper[4971]: I1127 09:43:59.868912 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d59e37d-03b0-47d2-9e61-7976f397d2a8" containerName="nova-cell1-conductor-conductor" Nov 27 09:43:59 crc kubenswrapper[4971]: I1127 09:43:59.869243 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d59e37d-03b0-47d2-9e61-7976f397d2a8" containerName="nova-cell1-conductor-conductor" Nov 27 09:43:59 crc kubenswrapper[4971]: I1127 09:43:59.870789 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 27 09:43:59 crc kubenswrapper[4971]: I1127 09:43:59.875814 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Nov 27 09:43:59 crc kubenswrapper[4971]: I1127 09:43:59.886627 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 27 09:43:59 crc kubenswrapper[4971]: I1127 09:43:59.904069 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vwtk\" (UniqueName: \"kubernetes.io/projected/6d70faa6-5591-4e41-9020-2d81ece79575-kube-api-access-9vwtk\") pod \"nova-cell1-conductor-0\" (UID: \"6d70faa6-5591-4e41-9020-2d81ece79575\") " pod="openstack/nova-cell1-conductor-0" Nov 27 09:43:59 crc kubenswrapper[4971]: I1127 09:43:59.904144 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d70faa6-5591-4e41-9020-2d81ece79575-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"6d70faa6-5591-4e41-9020-2d81ece79575\") " pod="openstack/nova-cell1-conductor-0" Nov 27 09:43:59 crc kubenswrapper[4971]: I1127 09:43:59.904221 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d70faa6-5591-4e41-9020-2d81ece79575-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"6d70faa6-5591-4e41-9020-2d81ece79575\") " pod="openstack/nova-cell1-conductor-0" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.006721 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vwtk\" (UniqueName: \"kubernetes.io/projected/6d70faa6-5591-4e41-9020-2d81ece79575-kube-api-access-9vwtk\") pod \"nova-cell1-conductor-0\" (UID: \"6d70faa6-5591-4e41-9020-2d81ece79575\") " pod="openstack/nova-cell1-conductor-0" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.006782 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d70faa6-5591-4e41-9020-2d81ece79575-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"6d70faa6-5591-4e41-9020-2d81ece79575\") " pod="openstack/nova-cell1-conductor-0" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.006837 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d70faa6-5591-4e41-9020-2d81ece79575-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"6d70faa6-5591-4e41-9020-2d81ece79575\") " pod="openstack/nova-cell1-conductor-0" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.022368 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d70faa6-5591-4e41-9020-2d81ece79575-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"6d70faa6-5591-4e41-9020-2d81ece79575\") " pod="openstack/nova-cell1-conductor-0" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.022442 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d70faa6-5591-4e41-9020-2d81ece79575-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"6d70faa6-5591-4e41-9020-2d81ece79575\") " pod="openstack/nova-cell1-conductor-0" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.026435 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vwtk\" (UniqueName: \"kubernetes.io/projected/6d70faa6-5591-4e41-9020-2d81ece79575-kube-api-access-9vwtk\") pod \"nova-cell1-conductor-0\" (UID: \"6d70faa6-5591-4e41-9020-2d81ece79575\") " pod="openstack/nova-cell1-conductor-0" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.198246 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 27 09:44:00 crc kubenswrapper[4971]: E1127 09:44:00.227860 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 016d6d9d10b45a1274f9cc99c96b25c645d557075858776c04cdf40371a30253 is running failed: container process not found" containerID="016d6d9d10b45a1274f9cc99c96b25c645d557075858776c04cdf40371a30253" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 27 09:44:00 crc kubenswrapper[4971]: E1127 09:44:00.228300 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 016d6d9d10b45a1274f9cc99c96b25c645d557075858776c04cdf40371a30253 is running failed: container process not found" containerID="016d6d9d10b45a1274f9cc99c96b25c645d557075858776c04cdf40371a30253" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 27 09:44:00 crc kubenswrapper[4971]: E1127 09:44:00.228495 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 016d6d9d10b45a1274f9cc99c96b25c645d557075858776c04cdf40371a30253 is running failed: container process not found" containerID="016d6d9d10b45a1274f9cc99c96b25c645d557075858776c04cdf40371a30253" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 27 09:44:00 crc kubenswrapper[4971]: E1127 09:44:00.228520 4971 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 016d6d9d10b45a1274f9cc99c96b25c645d557075858776c04cdf40371a30253 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="36b2d44b-b297-4a2a-b166-841b28acf914" containerName="nova-scheduler-scheduler" Nov 27 09:44:00 crc kubenswrapper[4971]: E1127 09:44:00.260559 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ad34b8b5148d73ba7a1d4d233cd94e40511831af3da591fa83dd75b18a287a2b is running failed: container process not found" containerID="ad34b8b5148d73ba7a1d4d233cd94e40511831af3da591fa83dd75b18a287a2b" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 27 09:44:00 crc kubenswrapper[4971]: E1127 09:44:00.261279 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ad34b8b5148d73ba7a1d4d233cd94e40511831af3da591fa83dd75b18a287a2b is running failed: container process not found" containerID="ad34b8b5148d73ba7a1d4d233cd94e40511831af3da591fa83dd75b18a287a2b" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 27 09:44:00 crc kubenswrapper[4971]: E1127 09:44:00.261835 4971 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ad34b8b5148d73ba7a1d4d233cd94e40511831af3da591fa83dd75b18a287a2b is running failed: container process not found" containerID="ad34b8b5148d73ba7a1d4d233cd94e40511831af3da591fa83dd75b18a287a2b" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 27 09:44:00 crc kubenswrapper[4971]: E1127 09:44:00.261874 4971 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ad34b8b5148d73ba7a1d4d233cd94e40511831af3da591fa83dd75b18a287a2b is running failed: container process not found" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f" containerName="nova-cell0-conductor-conductor" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.364888 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.371032 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.415088 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36b2d44b-b297-4a2a-b166-841b28acf914-combined-ca-bundle\") pod \"36b2d44b-b297-4a2a-b166-841b28acf914\" (UID: \"36b2d44b-b297-4a2a-b166-841b28acf914\") " Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.415232 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f-config-data\") pod \"9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f\" (UID: \"9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f\") " Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.415561 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xmqxr\" (UniqueName: \"kubernetes.io/projected/9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f-kube-api-access-xmqxr\") pod \"9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f\" (UID: \"9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f\") " Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.415676 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36b2d44b-b297-4a2a-b166-841b28acf914-config-data\") pod \"36b2d44b-b297-4a2a-b166-841b28acf914\" (UID: \"36b2d44b-b297-4a2a-b166-841b28acf914\") " Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.418819 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f-combined-ca-bundle\") pod \"9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f\" (UID: \"9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f\") " Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.418907 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kxznk\" (UniqueName: \"kubernetes.io/projected/36b2d44b-b297-4a2a-b166-841b28acf914-kube-api-access-kxznk\") pod \"36b2d44b-b297-4a2a-b166-841b28acf914\" (UID: \"36b2d44b-b297-4a2a-b166-841b28acf914\") " Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.426709 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36b2d44b-b297-4a2a-b166-841b28acf914-kube-api-access-kxznk" (OuterVolumeSpecName: "kube-api-access-kxznk") pod "36b2d44b-b297-4a2a-b166-841b28acf914" (UID: "36b2d44b-b297-4a2a-b166-841b28acf914"). InnerVolumeSpecName "kube-api-access-kxznk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.434340 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f-kube-api-access-xmqxr" (OuterVolumeSpecName: "kube-api-access-xmqxr") pod "9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f" (UID: "9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f"). InnerVolumeSpecName "kube-api-access-xmqxr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.461465 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f-config-data" (OuterVolumeSpecName: "config-data") pod "9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f" (UID: "9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.477255 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f" (UID: "9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.480306 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36b2d44b-b297-4a2a-b166-841b28acf914-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "36b2d44b-b297-4a2a-b166-841b28acf914" (UID: "36b2d44b-b297-4a2a-b166-841b28acf914"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.488843 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36b2d44b-b297-4a2a-b166-841b28acf914-config-data" (OuterVolumeSpecName: "config-data") pod "36b2d44b-b297-4a2a-b166-841b28acf914" (UID: "36b2d44b-b297-4a2a-b166-841b28acf914"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.525812 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.525861 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xmqxr\" (UniqueName: \"kubernetes.io/projected/9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f-kube-api-access-xmqxr\") on node \"crc\" DevicePath \"\"" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.525879 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36b2d44b-b297-4a2a-b166-841b28acf914-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.525891 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.525910 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kxznk\" (UniqueName: \"kubernetes.io/projected/36b2d44b-b297-4a2a-b166-841b28acf914-kube-api-access-kxznk\") on node \"crc\" DevicePath \"\"" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.525923 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36b2d44b-b297-4a2a-b166-841b28acf914-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.561593 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6d59e37d-03b0-47d2-9e61-7976f397d2a8" path="/var/lib/kubelet/pods/6d59e37d-03b0-47d2-9e61-7976f397d2a8/volumes" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.608456 4971 scope.go:117] "RemoveContainer" containerID="b9495126122a5659954daa77d5bb7bfa0a719ee215ba76a44ce1102e2ca3d70c" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.640979 4971 scope.go:117] "RemoveContainer" containerID="ad34b8b5148d73ba7a1d4d233cd94e40511831af3da591fa83dd75b18a287a2b" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.697509 4971 scope.go:117] "RemoveContainer" containerID="016d6d9d10b45a1274f9cc99c96b25c645d557075858776c04cdf40371a30253" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.747181 4971 generic.go:334] "Generic (PLEG): container finished" podID="36b2d44b-b297-4a2a-b166-841b28acf914" containerID="016d6d9d10b45a1274f9cc99c96b25c645d557075858776c04cdf40371a30253" exitCode=0 Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.747251 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"36b2d44b-b297-4a2a-b166-841b28acf914","Type":"ContainerDied","Data":"016d6d9d10b45a1274f9cc99c96b25c645d557075858776c04cdf40371a30253"} Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.747288 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"36b2d44b-b297-4a2a-b166-841b28acf914","Type":"ContainerDied","Data":"0b759b6c67f492898f95dce6ce923c589a6f49add133afe599b81ff10fa29a90"} Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.747312 4971 scope.go:117] "RemoveContainer" containerID="016d6d9d10b45a1274f9cc99c96b25c645d557075858776c04cdf40371a30253" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.747430 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.754304 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="55562639-e816-4faf-8c2b-7124cd156a85" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.94:8775/\": read tcp 10.217.0.2:42812->10.217.1.94:8775: read: connection reset by peer" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.754704 4971 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="55562639-e816-4faf-8c2b-7124cd156a85" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.94:8775/\": read tcp 10.217.0.2:42802->10.217.1.94:8775: read: connection reset by peer" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.762833 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f","Type":"ContainerDied","Data":"ad34b8b5148d73ba7a1d4d233cd94e40511831af3da591fa83dd75b18a287a2b"} Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.762900 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f","Type":"ContainerDied","Data":"27427448fa0d84dc131238024ee11c290a58d391f4c017bfd9effe5fcbc03ccc"} Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.762988 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.811599 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 09:44:00 crc kubenswrapper[4971]: E1127 09:44:00.819941 4971 log.go:32] "RemoveContainer from runtime service failed" err="rpc error: code = Unknown desc = failed to delete container k8s_nova-scheduler-scheduler_nova-scheduler-0_openstack_36b2d44b-b297-4a2a-b166-841b28acf914_0 in pod sandbox 0b759b6c67f492898f95dce6ce923c589a6f49add133afe599b81ff10fa29a90 from index: no such id: '016d6d9d10b45a1274f9cc99c96b25c645d557075858776c04cdf40371a30253'" containerID="016d6d9d10b45a1274f9cc99c96b25c645d557075858776c04cdf40371a30253" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.820057 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"016d6d9d10b45a1274f9cc99c96b25c645d557075858776c04cdf40371a30253"} err="rpc error: code = Unknown desc = failed to delete container k8s_nova-scheduler-scheduler_nova-scheduler-0_openstack_36b2d44b-b297-4a2a-b166-841b28acf914_0 in pod sandbox 0b759b6c67f492898f95dce6ce923c589a6f49add133afe599b81ff10fa29a90 from index: no such id: '016d6d9d10b45a1274f9cc99c96b25c645d557075858776c04cdf40371a30253'" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.820105 4971 scope.go:117] "RemoveContainer" containerID="016d6d9d10b45a1274f9cc99c96b25c645d557075858776c04cdf40371a30253" Nov 27 09:44:00 crc kubenswrapper[4971]: E1127 09:44:00.831901 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"016d6d9d10b45a1274f9cc99c96b25c645d557075858776c04cdf40371a30253\": container with ID starting with 016d6d9d10b45a1274f9cc99c96b25c645d557075858776c04cdf40371a30253 not found: ID does not exist" containerID="016d6d9d10b45a1274f9cc99c96b25c645d557075858776c04cdf40371a30253" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.831974 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"016d6d9d10b45a1274f9cc99c96b25c645d557075858776c04cdf40371a30253"} err="failed to get container status \"016d6d9d10b45a1274f9cc99c96b25c645d557075858776c04cdf40371a30253\": rpc error: code = NotFound desc = could not find container \"016d6d9d10b45a1274f9cc99c96b25c645d557075858776c04cdf40371a30253\": container with ID starting with 016d6d9d10b45a1274f9cc99c96b25c645d557075858776c04cdf40371a30253 not found: ID does not exist" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.849587 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.881607 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.907681 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.934511 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 09:44:00 crc kubenswrapper[4971]: E1127 09:44:00.935151 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f" containerName="nova-cell0-conductor-conductor" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.935172 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f" containerName="nova-cell0-conductor-conductor" Nov 27 09:44:00 crc kubenswrapper[4971]: E1127 09:44:00.935194 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36b2d44b-b297-4a2a-b166-841b28acf914" containerName="nova-scheduler-scheduler" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.935201 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="36b2d44b-b297-4a2a-b166-841b28acf914" containerName="nova-scheduler-scheduler" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.935432 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f" containerName="nova-cell0-conductor-conductor" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.935469 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="36b2d44b-b297-4a2a-b166-841b28acf914" containerName="nova-scheduler-scheduler" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.937313 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.941585 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.947210 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 27 09:44:00 crc kubenswrapper[4971]: I1127 09:44:00.974593 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.083995 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lx7r8\" (UniqueName: \"kubernetes.io/projected/c7f9a94d-de60-4ba6-b2b4-7d7de72108ac-kube-api-access-lx7r8\") pod \"nova-scheduler-0\" (UID: \"c7f9a94d-de60-4ba6-b2b4-7d7de72108ac\") " pod="openstack/nova-scheduler-0" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.084248 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7f9a94d-de60-4ba6-b2b4-7d7de72108ac-config-data\") pod \"nova-scheduler-0\" (UID: \"c7f9a94d-de60-4ba6-b2b4-7d7de72108ac\") " pod="openstack/nova-scheduler-0" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.084338 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7f9a94d-de60-4ba6-b2b4-7d7de72108ac-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c7f9a94d-de60-4ba6-b2b4-7d7de72108ac\") " pod="openstack/nova-scheduler-0" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.091642 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.110485 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.125409 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.188255 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7f9a94d-de60-4ba6-b2b4-7d7de72108ac-config-data\") pod \"nova-scheduler-0\" (UID: \"c7f9a94d-de60-4ba6-b2b4-7d7de72108ac\") " pod="openstack/nova-scheduler-0" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.189589 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7f9a94d-de60-4ba6-b2b4-7d7de72108ac-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c7f9a94d-de60-4ba6-b2b4-7d7de72108ac\") " pod="openstack/nova-scheduler-0" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.190025 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lx7r8\" (UniqueName: \"kubernetes.io/projected/c7f9a94d-de60-4ba6-b2b4-7d7de72108ac-kube-api-access-lx7r8\") pod \"nova-scheduler-0\" (UID: \"c7f9a94d-de60-4ba6-b2b4-7d7de72108ac\") " pod="openstack/nova-scheduler-0" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.200575 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7f9a94d-de60-4ba6-b2b4-7d7de72108ac-config-data\") pod \"nova-scheduler-0\" (UID: \"c7f9a94d-de60-4ba6-b2b4-7d7de72108ac\") " pod="openstack/nova-scheduler-0" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.211202 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lx7r8\" (UniqueName: \"kubernetes.io/projected/c7f9a94d-de60-4ba6-b2b4-7d7de72108ac-kube-api-access-lx7r8\") pod \"nova-scheduler-0\" (UID: \"c7f9a94d-de60-4ba6-b2b4-7d7de72108ac\") " pod="openstack/nova-scheduler-0" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.211597 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7f9a94d-de60-4ba6-b2b4-7d7de72108ac-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c7f9a94d-de60-4ba6-b2b4-7d7de72108ac\") " pod="openstack/nova-scheduler-0" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.214360 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.294938 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59bn4\" (UniqueName: \"kubernetes.io/projected/30642a33-6ef3-4ef7-9598-06fad84e16b1-kube-api-access-59bn4\") pod \"nova-cell0-conductor-0\" (UID: \"30642a33-6ef3-4ef7-9598-06fad84e16b1\") " pod="openstack/nova-cell0-conductor-0" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.295445 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30642a33-6ef3-4ef7-9598-06fad84e16b1-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"30642a33-6ef3-4ef7-9598-06fad84e16b1\") " pod="openstack/nova-cell0-conductor-0" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.295546 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30642a33-6ef3-4ef7-9598-06fad84e16b1-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"30642a33-6ef3-4ef7-9598-06fad84e16b1\") " pod="openstack/nova-cell0-conductor-0" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.397923 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30642a33-6ef3-4ef7-9598-06fad84e16b1-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"30642a33-6ef3-4ef7-9598-06fad84e16b1\") " pod="openstack/nova-cell0-conductor-0" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.398114 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59bn4\" (UniqueName: \"kubernetes.io/projected/30642a33-6ef3-4ef7-9598-06fad84e16b1-kube-api-access-59bn4\") pod \"nova-cell0-conductor-0\" (UID: \"30642a33-6ef3-4ef7-9598-06fad84e16b1\") " pod="openstack/nova-cell0-conductor-0" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.398162 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30642a33-6ef3-4ef7-9598-06fad84e16b1-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"30642a33-6ef3-4ef7-9598-06fad84e16b1\") " pod="openstack/nova-cell0-conductor-0" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.402242 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30642a33-6ef3-4ef7-9598-06fad84e16b1-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"30642a33-6ef3-4ef7-9598-06fad84e16b1\") " pod="openstack/nova-cell0-conductor-0" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.406913 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30642a33-6ef3-4ef7-9598-06fad84e16b1-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"30642a33-6ef3-4ef7-9598-06fad84e16b1\") " pod="openstack/nova-cell0-conductor-0" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.417501 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59bn4\" (UniqueName: \"kubernetes.io/projected/30642a33-6ef3-4ef7-9598-06fad84e16b1-kube-api-access-59bn4\") pod \"nova-cell0-conductor-0\" (UID: \"30642a33-6ef3-4ef7-9598-06fad84e16b1\") " pod="openstack/nova-cell0-conductor-0" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.420681 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.478514 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.492285 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.603143 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/55562639-e816-4faf-8c2b-7124cd156a85-logs\") pod \"55562639-e816-4faf-8c2b-7124cd156a85\" (UID: \"55562639-e816-4faf-8c2b-7124cd156a85\") " Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.603442 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tqfsn\" (UniqueName: \"kubernetes.io/projected/55562639-e816-4faf-8c2b-7124cd156a85-kube-api-access-tqfsn\") pod \"55562639-e816-4faf-8c2b-7124cd156a85\" (UID: \"55562639-e816-4faf-8c2b-7124cd156a85\") " Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.603471 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55562639-e816-4faf-8c2b-7124cd156a85-config-data\") pod \"55562639-e816-4faf-8c2b-7124cd156a85\" (UID: \"55562639-e816-4faf-8c2b-7124cd156a85\") " Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.603570 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55562639-e816-4faf-8c2b-7124cd156a85-combined-ca-bundle\") pod \"55562639-e816-4faf-8c2b-7124cd156a85\" (UID: \"55562639-e816-4faf-8c2b-7124cd156a85\") " Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.611117 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/55562639-e816-4faf-8c2b-7124cd156a85-logs" (OuterVolumeSpecName: "logs") pod "55562639-e816-4faf-8c2b-7124cd156a85" (UID: "55562639-e816-4faf-8c2b-7124cd156a85"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.627803 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55562639-e816-4faf-8c2b-7124cd156a85-kube-api-access-tqfsn" (OuterVolumeSpecName: "kube-api-access-tqfsn") pod "55562639-e816-4faf-8c2b-7124cd156a85" (UID: "55562639-e816-4faf-8c2b-7124cd156a85"). InnerVolumeSpecName "kube-api-access-tqfsn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.652682 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55562639-e816-4faf-8c2b-7124cd156a85-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "55562639-e816-4faf-8c2b-7124cd156a85" (UID: "55562639-e816-4faf-8c2b-7124cd156a85"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.670681 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55562639-e816-4faf-8c2b-7124cd156a85-config-data" (OuterVolumeSpecName: "config-data") pod "55562639-e816-4faf-8c2b-7124cd156a85" (UID: "55562639-e816-4faf-8c2b-7124cd156a85"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.701965 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.708350 4971 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/55562639-e816-4faf-8c2b-7124cd156a85-logs\") on node \"crc\" DevicePath \"\"" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.708393 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tqfsn\" (UniqueName: \"kubernetes.io/projected/55562639-e816-4faf-8c2b-7124cd156a85-kube-api-access-tqfsn\") on node \"crc\" DevicePath \"\"" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.708403 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55562639-e816-4faf-8c2b-7124cd156a85-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.708412 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55562639-e816-4faf-8c2b-7124cd156a85-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.812912 4971 generic.go:334] "Generic (PLEG): container finished" podID="5c011972-ac6d-497d-9fd5-b5eed11ef507" containerID="5049d8a6c9ddf9e828beac44153eb7f5791e0a7cb1ebf7c2a3377062621684d3" exitCode=0 Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.812984 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c011972-ac6d-497d-9fd5-b5eed11ef507-combined-ca-bundle\") pod \"5c011972-ac6d-497d-9fd5-b5eed11ef507\" (UID: \"5c011972-ac6d-497d-9fd5-b5eed11ef507\") " Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.813036 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5c011972-ac6d-497d-9fd5-b5eed11ef507","Type":"ContainerDied","Data":"5049d8a6c9ddf9e828beac44153eb7f5791e0a7cb1ebf7c2a3377062621684d3"} Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.813074 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5c011972-ac6d-497d-9fd5-b5eed11ef507","Type":"ContainerDied","Data":"bde6d0a98a06bd0e77828dade5f46fd57295440557006a26c3b1d9005be9ab07"} Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.813103 4971 scope.go:117] "RemoveContainer" containerID="5049d8a6c9ddf9e828beac44153eb7f5791e0a7cb1ebf7c2a3377062621684d3" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.813147 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c011972-ac6d-497d-9fd5-b5eed11ef507-logs\") pod \"5c011972-ac6d-497d-9fd5-b5eed11ef507\" (UID: \"5c011972-ac6d-497d-9fd5-b5eed11ef507\") " Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.813303 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.813379 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c011972-ac6d-497d-9fd5-b5eed11ef507-config-data\") pod \"5c011972-ac6d-497d-9fd5-b5eed11ef507\" (UID: \"5c011972-ac6d-497d-9fd5-b5eed11ef507\") " Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.813547 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rvkj7\" (UniqueName: \"kubernetes.io/projected/5c011972-ac6d-497d-9fd5-b5eed11ef507-kube-api-access-rvkj7\") pod \"5c011972-ac6d-497d-9fd5-b5eed11ef507\" (UID: \"5c011972-ac6d-497d-9fd5-b5eed11ef507\") " Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.815815 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c011972-ac6d-497d-9fd5-b5eed11ef507-logs" (OuterVolumeSpecName: "logs") pod "5c011972-ac6d-497d-9fd5-b5eed11ef507" (UID: "5c011972-ac6d-497d-9fd5-b5eed11ef507"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.828000 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c011972-ac6d-497d-9fd5-b5eed11ef507-kube-api-access-rvkj7" (OuterVolumeSpecName: "kube-api-access-rvkj7") pod "5c011972-ac6d-497d-9fd5-b5eed11ef507" (UID: "5c011972-ac6d-497d-9fd5-b5eed11ef507"). InnerVolumeSpecName "kube-api-access-rvkj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.838715 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"6d70faa6-5591-4e41-9020-2d81ece79575","Type":"ContainerStarted","Data":"295adedc4519d08feef81f40df98dcf78d39983e725ddce360b7b3702e1a3319"} Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.838779 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"6d70faa6-5591-4e41-9020-2d81ece79575","Type":"ContainerStarted","Data":"16571fa1d0c7c8851ff8ea882f0be22415f19f7ec9741e28327f74096b5152d9"} Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.838999 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.856215 4971 generic.go:334] "Generic (PLEG): container finished" podID="55562639-e816-4faf-8c2b-7124cd156a85" containerID="68315fba2e7808c887d15af24f2991b56487f29af6f65f6f9debb64df02b0d37" exitCode=0 Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.856282 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"55562639-e816-4faf-8c2b-7124cd156a85","Type":"ContainerDied","Data":"68315fba2e7808c887d15af24f2991b56487f29af6f65f6f9debb64df02b0d37"} Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.856333 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"55562639-e816-4faf-8c2b-7124cd156a85","Type":"ContainerDied","Data":"910f58a10b6ff346cf763441827931d43bfe0a5f1973cf8bc559ff97e96a0d6e"} Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.856423 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.891991 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.891960229 podStartE2EDuration="2.891960229s" podCreationTimestamp="2025-11-27 09:43:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 09:44:01.873246644 +0000 UTC m=+10280.065290562" watchObservedRunningTime="2025-11-27 09:44:01.891960229 +0000 UTC m=+10280.084004167" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.898931 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c011972-ac6d-497d-9fd5-b5eed11ef507-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5c011972-ac6d-497d-9fd5-b5eed11ef507" (UID: "5c011972-ac6d-497d-9fd5-b5eed11ef507"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.902123 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c011972-ac6d-497d-9fd5-b5eed11ef507-config-data" (OuterVolumeSpecName: "config-data") pod "5c011972-ac6d-497d-9fd5-b5eed11ef507" (UID: "5c011972-ac6d-497d-9fd5-b5eed11ef507"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.909712 4971 scope.go:117] "RemoveContainer" containerID="cda5e5686740453b3094623d07ae2dcf3a09ceba6d1b35aa43b64e96455b225e" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.917473 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c011972-ac6d-497d-9fd5-b5eed11ef507-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.917502 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rvkj7\" (UniqueName: \"kubernetes.io/projected/5c011972-ac6d-497d-9fd5-b5eed11ef507-kube-api-access-rvkj7\") on node \"crc\" DevicePath \"\"" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.917516 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c011972-ac6d-497d-9fd5-b5eed11ef507-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.917547 4971 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c011972-ac6d-497d-9fd5-b5eed11ef507-logs\") on node \"crc\" DevicePath \"\"" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.924584 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.951911 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.960487 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 27 09:44:01 crc kubenswrapper[4971]: E1127 09:44:01.961233 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c011972-ac6d-497d-9fd5-b5eed11ef507" containerName="nova-api-log" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.961254 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c011972-ac6d-497d-9fd5-b5eed11ef507" containerName="nova-api-log" Nov 27 09:44:01 crc kubenswrapper[4971]: E1127 09:44:01.961280 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c011972-ac6d-497d-9fd5-b5eed11ef507" containerName="nova-api-api" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.961288 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c011972-ac6d-497d-9fd5-b5eed11ef507" containerName="nova-api-api" Nov 27 09:44:01 crc kubenswrapper[4971]: E1127 09:44:01.961341 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55562639-e816-4faf-8c2b-7124cd156a85" containerName="nova-metadata-metadata" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.961352 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="55562639-e816-4faf-8c2b-7124cd156a85" containerName="nova-metadata-metadata" Nov 27 09:44:01 crc kubenswrapper[4971]: E1127 09:44:01.961377 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55562639-e816-4faf-8c2b-7124cd156a85" containerName="nova-metadata-log" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.961384 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="55562639-e816-4faf-8c2b-7124cd156a85" containerName="nova-metadata-log" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.961718 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="55562639-e816-4faf-8c2b-7124cd156a85" containerName="nova-metadata-metadata" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.961739 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="55562639-e816-4faf-8c2b-7124cd156a85" containerName="nova-metadata-log" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.961750 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c011972-ac6d-497d-9fd5-b5eed11ef507" containerName="nova-api-log" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.961772 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c011972-ac6d-497d-9fd5-b5eed11ef507" containerName="nova-api-api" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.963696 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.968162 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.980192 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.980735 4971 scope.go:117] "RemoveContainer" containerID="5049d8a6c9ddf9e828beac44153eb7f5791e0a7cb1ebf7c2a3377062621684d3" Nov 27 09:44:01 crc kubenswrapper[4971]: E1127 09:44:01.983261 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5049d8a6c9ddf9e828beac44153eb7f5791e0a7cb1ebf7c2a3377062621684d3\": container with ID starting with 5049d8a6c9ddf9e828beac44153eb7f5791e0a7cb1ebf7c2a3377062621684d3 not found: ID does not exist" containerID="5049d8a6c9ddf9e828beac44153eb7f5791e0a7cb1ebf7c2a3377062621684d3" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.983303 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5049d8a6c9ddf9e828beac44153eb7f5791e0a7cb1ebf7c2a3377062621684d3"} err="failed to get container status \"5049d8a6c9ddf9e828beac44153eb7f5791e0a7cb1ebf7c2a3377062621684d3\": rpc error: code = NotFound desc = could not find container \"5049d8a6c9ddf9e828beac44153eb7f5791e0a7cb1ebf7c2a3377062621684d3\": container with ID starting with 5049d8a6c9ddf9e828beac44153eb7f5791e0a7cb1ebf7c2a3377062621684d3 not found: ID does not exist" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.983336 4971 scope.go:117] "RemoveContainer" containerID="cda5e5686740453b3094623d07ae2dcf3a09ceba6d1b35aa43b64e96455b225e" Nov 27 09:44:01 crc kubenswrapper[4971]: E1127 09:44:01.983772 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cda5e5686740453b3094623d07ae2dcf3a09ceba6d1b35aa43b64e96455b225e\": container with ID starting with cda5e5686740453b3094623d07ae2dcf3a09ceba6d1b35aa43b64e96455b225e not found: ID does not exist" containerID="cda5e5686740453b3094623d07ae2dcf3a09ceba6d1b35aa43b64e96455b225e" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.983800 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cda5e5686740453b3094623d07ae2dcf3a09ceba6d1b35aa43b64e96455b225e"} err="failed to get container status \"cda5e5686740453b3094623d07ae2dcf3a09ceba6d1b35aa43b64e96455b225e\": rpc error: code = NotFound desc = could not find container \"cda5e5686740453b3094623d07ae2dcf3a09ceba6d1b35aa43b64e96455b225e\": container with ID starting with cda5e5686740453b3094623d07ae2dcf3a09ceba6d1b35aa43b64e96455b225e not found: ID does not exist" Nov 27 09:44:01 crc kubenswrapper[4971]: I1127 09:44:01.983816 4971 scope.go:117] "RemoveContainer" containerID="68315fba2e7808c887d15af24f2991b56487f29af6f65f6f9debb64df02b0d37" Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.078594 4971 scope.go:117] "RemoveContainer" containerID="1df28c45fcfa92c627f267b10956aa0fae302e93e9a77d39a043fc9873783e9d" Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.106148 4971 scope.go:117] "RemoveContainer" containerID="68315fba2e7808c887d15af24f2991b56487f29af6f65f6f9debb64df02b0d37" Nov 27 09:44:02 crc kubenswrapper[4971]: E1127 09:44:02.106797 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"68315fba2e7808c887d15af24f2991b56487f29af6f65f6f9debb64df02b0d37\": container with ID starting with 68315fba2e7808c887d15af24f2991b56487f29af6f65f6f9debb64df02b0d37 not found: ID does not exist" containerID="68315fba2e7808c887d15af24f2991b56487f29af6f65f6f9debb64df02b0d37" Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.106859 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68315fba2e7808c887d15af24f2991b56487f29af6f65f6f9debb64df02b0d37"} err="failed to get container status \"68315fba2e7808c887d15af24f2991b56487f29af6f65f6f9debb64df02b0d37\": rpc error: code = NotFound desc = could not find container \"68315fba2e7808c887d15af24f2991b56487f29af6f65f6f9debb64df02b0d37\": container with ID starting with 68315fba2e7808c887d15af24f2991b56487f29af6f65f6f9debb64df02b0d37 not found: ID does not exist" Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.106894 4971 scope.go:117] "RemoveContainer" containerID="1df28c45fcfa92c627f267b10956aa0fae302e93e9a77d39a043fc9873783e9d" Nov 27 09:44:02 crc kubenswrapper[4971]: E1127 09:44:02.107603 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1df28c45fcfa92c627f267b10956aa0fae302e93e9a77d39a043fc9873783e9d\": container with ID starting with 1df28c45fcfa92c627f267b10956aa0fae302e93e9a77d39a043fc9873783e9d not found: ID does not exist" containerID="1df28c45fcfa92c627f267b10956aa0fae302e93e9a77d39a043fc9873783e9d" Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.107651 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1df28c45fcfa92c627f267b10956aa0fae302e93e9a77d39a043fc9873783e9d"} err="failed to get container status \"1df28c45fcfa92c627f267b10956aa0fae302e93e9a77d39a043fc9873783e9d\": rpc error: code = NotFound desc = could not find container \"1df28c45fcfa92c627f267b10956aa0fae302e93e9a77d39a043fc9873783e9d\": container with ID starting with 1df28c45fcfa92c627f267b10956aa0fae302e93e9a77d39a043fc9873783e9d not found: ID does not exist" Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.122484 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2ba702d1-dbdd-4396-9105-0f258581ec30-logs\") pod \"nova-metadata-0\" (UID: \"2ba702d1-dbdd-4396-9105-0f258581ec30\") " pod="openstack/nova-metadata-0" Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.122619 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwp42\" (UniqueName: \"kubernetes.io/projected/2ba702d1-dbdd-4396-9105-0f258581ec30-kube-api-access-vwp42\") pod \"nova-metadata-0\" (UID: \"2ba702d1-dbdd-4396-9105-0f258581ec30\") " pod="openstack/nova-metadata-0" Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.122744 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ba702d1-dbdd-4396-9105-0f258581ec30-config-data\") pod \"nova-metadata-0\" (UID: \"2ba702d1-dbdd-4396-9105-0f258581ec30\") " pod="openstack/nova-metadata-0" Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.122801 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ba702d1-dbdd-4396-9105-0f258581ec30-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"2ba702d1-dbdd-4396-9105-0f258581ec30\") " pod="openstack/nova-metadata-0" Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.178809 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.202211 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.217826 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.221152 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.223515 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.225603 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ba702d1-dbdd-4396-9105-0f258581ec30-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"2ba702d1-dbdd-4396-9105-0f258581ec30\") " pod="openstack/nova-metadata-0" Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.225730 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2ba702d1-dbdd-4396-9105-0f258581ec30-logs\") pod \"nova-metadata-0\" (UID: \"2ba702d1-dbdd-4396-9105-0f258581ec30\") " pod="openstack/nova-metadata-0" Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.225966 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwp42\" (UniqueName: \"kubernetes.io/projected/2ba702d1-dbdd-4396-9105-0f258581ec30-kube-api-access-vwp42\") pod \"nova-metadata-0\" (UID: \"2ba702d1-dbdd-4396-9105-0f258581ec30\") " pod="openstack/nova-metadata-0" Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.226068 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ba702d1-dbdd-4396-9105-0f258581ec30-config-data\") pod \"nova-metadata-0\" (UID: \"2ba702d1-dbdd-4396-9105-0f258581ec30\") " pod="openstack/nova-metadata-0" Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.227428 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2ba702d1-dbdd-4396-9105-0f258581ec30-logs\") pod \"nova-metadata-0\" (UID: \"2ba702d1-dbdd-4396-9105-0f258581ec30\") " pod="openstack/nova-metadata-0" Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.232621 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.254756 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ba702d1-dbdd-4396-9105-0f258581ec30-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"2ba702d1-dbdd-4396-9105-0f258581ec30\") " pod="openstack/nova-metadata-0" Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.255725 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ba702d1-dbdd-4396-9105-0f258581ec30-config-data\") pod \"nova-metadata-0\" (UID: \"2ba702d1-dbdd-4396-9105-0f258581ec30\") " pod="openstack/nova-metadata-0" Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.258244 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwp42\" (UniqueName: \"kubernetes.io/projected/2ba702d1-dbdd-4396-9105-0f258581ec30-kube-api-access-vwp42\") pod \"nova-metadata-0\" (UID: \"2ba702d1-dbdd-4396-9105-0f258581ec30\") " pod="openstack/nova-metadata-0" Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.326530 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.328233 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/edd3cc22-05c2-4f9d-a2ef-df5f9fcdac84-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"edd3cc22-05c2-4f9d-a2ef-df5f9fcdac84\") " pod="openstack/nova-api-0" Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.328463 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/edd3cc22-05c2-4f9d-a2ef-df5f9fcdac84-logs\") pod \"nova-api-0\" (UID: \"edd3cc22-05c2-4f9d-a2ef-df5f9fcdac84\") " pod="openstack/nova-api-0" Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.328584 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mdc5l\" (UniqueName: \"kubernetes.io/projected/edd3cc22-05c2-4f9d-a2ef-df5f9fcdac84-kube-api-access-mdc5l\") pod \"nova-api-0\" (UID: \"edd3cc22-05c2-4f9d-a2ef-df5f9fcdac84\") " pod="openstack/nova-api-0" Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.328690 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/edd3cc22-05c2-4f9d-a2ef-df5f9fcdac84-config-data\") pod \"nova-api-0\" (UID: \"edd3cc22-05c2-4f9d-a2ef-df5f9fcdac84\") " pod="openstack/nova-api-0" Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.352499 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.364680 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.434908 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/edd3cc22-05c2-4f9d-a2ef-df5f9fcdac84-logs\") pod \"nova-api-0\" (UID: \"edd3cc22-05c2-4f9d-a2ef-df5f9fcdac84\") " pod="openstack/nova-api-0" Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.435323 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mdc5l\" (UniqueName: \"kubernetes.io/projected/edd3cc22-05c2-4f9d-a2ef-df5f9fcdac84-kube-api-access-mdc5l\") pod \"nova-api-0\" (UID: \"edd3cc22-05c2-4f9d-a2ef-df5f9fcdac84\") " pod="openstack/nova-api-0" Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.435415 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/edd3cc22-05c2-4f9d-a2ef-df5f9fcdac84-config-data\") pod \"nova-api-0\" (UID: \"edd3cc22-05c2-4f9d-a2ef-df5f9fcdac84\") " pod="openstack/nova-api-0" Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.435497 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/edd3cc22-05c2-4f9d-a2ef-df5f9fcdac84-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"edd3cc22-05c2-4f9d-a2ef-df5f9fcdac84\") " pod="openstack/nova-api-0" Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.435497 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/edd3cc22-05c2-4f9d-a2ef-df5f9fcdac84-logs\") pod \"nova-api-0\" (UID: \"edd3cc22-05c2-4f9d-a2ef-df5f9fcdac84\") " pod="openstack/nova-api-0" Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.440853 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/edd3cc22-05c2-4f9d-a2ef-df5f9fcdac84-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"edd3cc22-05c2-4f9d-a2ef-df5f9fcdac84\") " pod="openstack/nova-api-0" Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.441055 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/edd3cc22-05c2-4f9d-a2ef-df5f9fcdac84-config-data\") pod \"nova-api-0\" (UID: \"edd3cc22-05c2-4f9d-a2ef-df5f9fcdac84\") " pod="openstack/nova-api-0" Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.460208 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mdc5l\" (UniqueName: \"kubernetes.io/projected/edd3cc22-05c2-4f9d-a2ef-df5f9fcdac84-kube-api-access-mdc5l\") pod \"nova-api-0\" (UID: \"edd3cc22-05c2-4f9d-a2ef-df5f9fcdac84\") " pod="openstack/nova-api-0" Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.469401 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.574867 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36b2d44b-b297-4a2a-b166-841b28acf914" path="/var/lib/kubelet/pods/36b2d44b-b297-4a2a-b166-841b28acf914/volumes" Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.575501 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="55562639-e816-4faf-8c2b-7124cd156a85" path="/var/lib/kubelet/pods/55562639-e816-4faf-8c2b-7124cd156a85/volumes" Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.582429 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c011972-ac6d-497d-9fd5-b5eed11ef507" path="/var/lib/kubelet/pods/5c011972-ac6d-497d-9fd5-b5eed11ef507/volumes" Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.583675 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f" path="/var/lib/kubelet/pods/9b8e735a-dd6c-4623-bb44-79dc8f4bcb4f/volumes" Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.871669 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c7f9a94d-de60-4ba6-b2b4-7d7de72108ac","Type":"ContainerStarted","Data":"07837a42e0bb429493887f61226c9c6d463e9656b667ddcb898e52d26cb3b79d"} Nov 27 09:44:02 crc kubenswrapper[4971]: I1127 09:44:02.875523 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"30642a33-6ef3-4ef7-9598-06fad84e16b1","Type":"ContainerStarted","Data":"f9f38d6659a61e1fd70364e8574747717cf7fd7dc97dc7f548ae2b7e4063c4a3"} Nov 27 09:44:03 crc kubenswrapper[4971]: I1127 09:44:03.099081 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 27 09:44:03 crc kubenswrapper[4971]: W1127 09:44:03.099879 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2ba702d1_dbdd_4396_9105_0f258581ec30.slice/crio-491fad87825deca484c2095fe9589d7c9a711b08a2a4ac6011e54966877ef6c5 WatchSource:0}: Error finding container 491fad87825deca484c2095fe9589d7c9a711b08a2a4ac6011e54966877ef6c5: Status 404 returned error can't find the container with id 491fad87825deca484c2095fe9589d7c9a711b08a2a4ac6011e54966877ef6c5 Nov 27 09:44:03 crc kubenswrapper[4971]: W1127 09:44:03.219961 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podedd3cc22_05c2_4f9d_a2ef_df5f9fcdac84.slice/crio-a1e722669f695282b807e42a634fe19e03159847693f53444a64c8ed28cbba93 WatchSource:0}: Error finding container a1e722669f695282b807e42a634fe19e03159847693f53444a64c8ed28cbba93: Status 404 returned error can't find the container with id a1e722669f695282b807e42a634fe19e03159847693f53444a64c8ed28cbba93 Nov 27 09:44:03 crc kubenswrapper[4971]: I1127 09:44:03.244901 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 27 09:44:03 crc kubenswrapper[4971]: I1127 09:44:03.893441 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c7f9a94d-de60-4ba6-b2b4-7d7de72108ac","Type":"ContainerStarted","Data":"3e9e340737e1233aa90cfc9b3552d000738f56882a7cd5fdc14270aabbebe130"} Nov 27 09:44:03 crc kubenswrapper[4971]: I1127 09:44:03.896756 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"edd3cc22-05c2-4f9d-a2ef-df5f9fcdac84","Type":"ContainerStarted","Data":"b174bbf6156c43c8f3c58a41a444e1f1a7bee22314bb5911210d6a532afe285b"} Nov 27 09:44:03 crc kubenswrapper[4971]: I1127 09:44:03.896820 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"edd3cc22-05c2-4f9d-a2ef-df5f9fcdac84","Type":"ContainerStarted","Data":"3899edd744643da29b16261323ca1b467e0ed98907ed9c69f34edcf668bc2b8e"} Nov 27 09:44:03 crc kubenswrapper[4971]: I1127 09:44:03.896837 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"edd3cc22-05c2-4f9d-a2ef-df5f9fcdac84","Type":"ContainerStarted","Data":"a1e722669f695282b807e42a634fe19e03159847693f53444a64c8ed28cbba93"} Nov 27 09:44:03 crc kubenswrapper[4971]: I1127 09:44:03.904403 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"30642a33-6ef3-4ef7-9598-06fad84e16b1","Type":"ContainerStarted","Data":"caac6e862319f8b38c54fefa62c81899b0904f23523dc9cde0df871069e1e32a"} Nov 27 09:44:03 crc kubenswrapper[4971]: I1127 09:44:03.904690 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Nov 27 09:44:03 crc kubenswrapper[4971]: I1127 09:44:03.909441 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2ba702d1-dbdd-4396-9105-0f258581ec30","Type":"ContainerStarted","Data":"8de4c9644fd236981f51278f05371c8629b01b2d2913efeadc631c9d1eaae3fd"} Nov 27 09:44:03 crc kubenswrapper[4971]: I1127 09:44:03.909500 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2ba702d1-dbdd-4396-9105-0f258581ec30","Type":"ContainerStarted","Data":"c3de885a1b42bacb8bf4f2ef36899b6f5fa651e1d5d603ef4fa2612d7b3f51c8"} Nov 27 09:44:03 crc kubenswrapper[4971]: I1127 09:44:03.909517 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2ba702d1-dbdd-4396-9105-0f258581ec30","Type":"ContainerStarted","Data":"491fad87825deca484c2095fe9589d7c9a711b08a2a4ac6011e54966877ef6c5"} Nov 27 09:44:03 crc kubenswrapper[4971]: I1127 09:44:03.928452 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.92842217 podStartE2EDuration="3.92842217s" podCreationTimestamp="2025-11-27 09:44:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 09:44:03.910897889 +0000 UTC m=+10282.102941817" watchObservedRunningTime="2025-11-27 09:44:03.92842217 +0000 UTC m=+10282.120466098" Nov 27 09:44:03 crc kubenswrapper[4971]: I1127 09:44:03.949950 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=3.949921314 podStartE2EDuration="3.949921314s" podCreationTimestamp="2025-11-27 09:44:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 09:44:03.937289183 +0000 UTC m=+10282.129333121" watchObservedRunningTime="2025-11-27 09:44:03.949921314 +0000 UTC m=+10282.141965252" Nov 27 09:44:03 crc kubenswrapper[4971]: I1127 09:44:03.982378 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.982346091 podStartE2EDuration="2.982346091s" podCreationTimestamp="2025-11-27 09:44:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 09:44:03.961615748 +0000 UTC m=+10282.153659686" watchObservedRunningTime="2025-11-27 09:44:03.982346091 +0000 UTC m=+10282.174390029" Nov 27 09:44:04 crc kubenswrapper[4971]: I1127 09:44:04.008664 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.008636092 podStartE2EDuration="2.008636092s" podCreationTimestamp="2025-11-27 09:44:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 09:44:03.992324176 +0000 UTC m=+10282.184368114" watchObservedRunningTime="2025-11-27 09:44:04.008636092 +0000 UTC m=+10282.200680020" Nov 27 09:44:06 crc kubenswrapper[4971]: I1127 09:44:06.479324 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Nov 27 09:44:07 crc kubenswrapper[4971]: I1127 09:44:07.365965 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 27 09:44:07 crc kubenswrapper[4971]: I1127 09:44:07.366120 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 27 09:44:10 crc kubenswrapper[4971]: I1127 09:44:10.234202 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Nov 27 09:44:11 crc kubenswrapper[4971]: I1127 09:44:11.479738 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Nov 27 09:44:11 crc kubenswrapper[4971]: I1127 09:44:11.507622 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Nov 27 09:44:11 crc kubenswrapper[4971]: I1127 09:44:11.532892 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Nov 27 09:44:12 crc kubenswrapper[4971]: I1127 09:44:12.077680 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Nov 27 09:44:12 crc kubenswrapper[4971]: I1127 09:44:12.366233 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 27 09:44:12 crc kubenswrapper[4971]: I1127 09:44:12.366325 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 27 09:44:12 crc kubenswrapper[4971]: I1127 09:44:12.470043 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 27 09:44:12 crc kubenswrapper[4971]: I1127 09:44:12.470135 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 27 09:44:13 crc kubenswrapper[4971]: I1127 09:44:13.447823 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="2ba702d1-dbdd-4396-9105-0f258581ec30" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.204:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 27 09:44:13 crc kubenswrapper[4971]: I1127 09:44:13.447763 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="2ba702d1-dbdd-4396-9105-0f258581ec30" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.204:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 27 09:44:13 crc kubenswrapper[4971]: I1127 09:44:13.552996 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="edd3cc22-05c2-4f9d-a2ef-df5f9fcdac84" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.205:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 27 09:44:13 crc kubenswrapper[4971]: I1127 09:44:13.553598 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="edd3cc22-05c2-4f9d-a2ef-df5f9fcdac84" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.205:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 27 09:44:22 crc kubenswrapper[4971]: I1127 09:44:22.383656 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 27 09:44:22 crc kubenswrapper[4971]: I1127 09:44:22.384612 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 27 09:44:22 crc kubenswrapper[4971]: I1127 09:44:22.387716 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 27 09:44:22 crc kubenswrapper[4971]: I1127 09:44:22.388939 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 27 09:44:22 crc kubenswrapper[4971]: I1127 09:44:22.476194 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 27 09:44:22 crc kubenswrapper[4971]: I1127 09:44:22.477491 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 27 09:44:22 crc kubenswrapper[4971]: I1127 09:44:22.478625 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 27 09:44:22 crc kubenswrapper[4971]: I1127 09:44:22.492407 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 27 09:44:23 crc kubenswrapper[4971]: I1127 09:44:23.169501 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 27 09:44:23 crc kubenswrapper[4971]: I1127 09:44:23.174346 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 27 09:44:26 crc kubenswrapper[4971]: I1127 09:44:26.413222 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 09:44:26 crc kubenswrapper[4971]: I1127 09:44:26.414949 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 09:44:56 crc kubenswrapper[4971]: I1127 09:44:56.413940 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 09:44:56 crc kubenswrapper[4971]: I1127 09:44:56.414734 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 09:44:56 crc kubenswrapper[4971]: I1127 09:44:56.414847 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 09:44:56 crc kubenswrapper[4971]: I1127 09:44:56.421729 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"31b85eb2444b7a4c49f418f3febd731ee00e77aa6a823a8826a6dfb293a04ccf"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 09:44:56 crc kubenswrapper[4971]: I1127 09:44:56.421916 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://31b85eb2444b7a4c49f418f3febd731ee00e77aa6a823a8826a6dfb293a04ccf" gracePeriod=600 Nov 27 09:44:56 crc kubenswrapper[4971]: I1127 09:44:56.747170 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="31b85eb2444b7a4c49f418f3febd731ee00e77aa6a823a8826a6dfb293a04ccf" exitCode=0 Nov 27 09:44:56 crc kubenswrapper[4971]: I1127 09:44:56.747509 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"31b85eb2444b7a4c49f418f3febd731ee00e77aa6a823a8826a6dfb293a04ccf"} Nov 27 09:44:56 crc kubenswrapper[4971]: I1127 09:44:56.747576 4971 scope.go:117] "RemoveContainer" containerID="5fce088764c403e267f5aed900d4424b9d5037db83a894ca8739f5f5af0bea73" Nov 27 09:44:57 crc kubenswrapper[4971]: I1127 09:44:57.768771 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"d5d3a8883a93ccaa51c1b9319f9364a05ad09a90156d64a0180b7518c3a242d6"} Nov 27 09:45:00 crc kubenswrapper[4971]: I1127 09:45:00.183494 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403945-skfvv"] Nov 27 09:45:00 crc kubenswrapper[4971]: I1127 09:45:00.210559 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403945-skfvv" Nov 27 09:45:00 crc kubenswrapper[4971]: I1127 09:45:00.216744 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 27 09:45:00 crc kubenswrapper[4971]: I1127 09:45:00.217378 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 27 09:45:00 crc kubenswrapper[4971]: I1127 09:45:00.257112 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403945-skfvv"] Nov 27 09:45:00 crc kubenswrapper[4971]: I1127 09:45:00.262685 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c37fb49d-e681-42e1-a2c6-295824b2223c-config-volume\") pod \"collect-profiles-29403945-skfvv\" (UID: \"c37fb49d-e681-42e1-a2c6-295824b2223c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403945-skfvv" Nov 27 09:45:00 crc kubenswrapper[4971]: I1127 09:45:00.262745 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c37fb49d-e681-42e1-a2c6-295824b2223c-secret-volume\") pod \"collect-profiles-29403945-skfvv\" (UID: \"c37fb49d-e681-42e1-a2c6-295824b2223c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403945-skfvv" Nov 27 09:45:00 crc kubenswrapper[4971]: I1127 09:45:00.262806 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tm4zr\" (UniqueName: \"kubernetes.io/projected/c37fb49d-e681-42e1-a2c6-295824b2223c-kube-api-access-tm4zr\") pod \"collect-profiles-29403945-skfvv\" (UID: \"c37fb49d-e681-42e1-a2c6-295824b2223c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403945-skfvv" Nov 27 09:45:00 crc kubenswrapper[4971]: I1127 09:45:00.364661 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c37fb49d-e681-42e1-a2c6-295824b2223c-config-volume\") pod \"collect-profiles-29403945-skfvv\" (UID: \"c37fb49d-e681-42e1-a2c6-295824b2223c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403945-skfvv" Nov 27 09:45:00 crc kubenswrapper[4971]: I1127 09:45:00.365837 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c37fb49d-e681-42e1-a2c6-295824b2223c-config-volume\") pod \"collect-profiles-29403945-skfvv\" (UID: \"c37fb49d-e681-42e1-a2c6-295824b2223c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403945-skfvv" Nov 27 09:45:00 crc kubenswrapper[4971]: I1127 09:45:00.365875 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c37fb49d-e681-42e1-a2c6-295824b2223c-secret-volume\") pod \"collect-profiles-29403945-skfvv\" (UID: \"c37fb49d-e681-42e1-a2c6-295824b2223c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403945-skfvv" Nov 27 09:45:00 crc kubenswrapper[4971]: I1127 09:45:00.366073 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tm4zr\" (UniqueName: \"kubernetes.io/projected/c37fb49d-e681-42e1-a2c6-295824b2223c-kube-api-access-tm4zr\") pod \"collect-profiles-29403945-skfvv\" (UID: \"c37fb49d-e681-42e1-a2c6-295824b2223c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403945-skfvv" Nov 27 09:45:00 crc kubenswrapper[4971]: I1127 09:45:00.374053 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c37fb49d-e681-42e1-a2c6-295824b2223c-secret-volume\") pod \"collect-profiles-29403945-skfvv\" (UID: \"c37fb49d-e681-42e1-a2c6-295824b2223c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403945-skfvv" Nov 27 09:45:00 crc kubenswrapper[4971]: I1127 09:45:00.384148 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tm4zr\" (UniqueName: \"kubernetes.io/projected/c37fb49d-e681-42e1-a2c6-295824b2223c-kube-api-access-tm4zr\") pod \"collect-profiles-29403945-skfvv\" (UID: \"c37fb49d-e681-42e1-a2c6-295824b2223c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403945-skfvv" Nov 27 09:45:00 crc kubenswrapper[4971]: I1127 09:45:00.550733 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403945-skfvv" Nov 27 09:45:01 crc kubenswrapper[4971]: I1127 09:45:01.028817 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403945-skfvv"] Nov 27 09:45:01 crc kubenswrapper[4971]: W1127 09:45:01.036348 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc37fb49d_e681_42e1_a2c6_295824b2223c.slice/crio-448fd07371cbb2f3410672e646a022ba8d77fba76d4be50f2716a818a987b1e5 WatchSource:0}: Error finding container 448fd07371cbb2f3410672e646a022ba8d77fba76d4be50f2716a818a987b1e5: Status 404 returned error can't find the container with id 448fd07371cbb2f3410672e646a022ba8d77fba76d4be50f2716a818a987b1e5 Nov 27 09:45:01 crc kubenswrapper[4971]: I1127 09:45:01.822921 4971 generic.go:334] "Generic (PLEG): container finished" podID="c37fb49d-e681-42e1-a2c6-295824b2223c" containerID="152b4dfeb501488a392f27a0ca5eefc764ad5dd668bbc8b62e6df992b8f3ccd5" exitCode=0 Nov 27 09:45:01 crc kubenswrapper[4971]: I1127 09:45:01.823041 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403945-skfvv" event={"ID":"c37fb49d-e681-42e1-a2c6-295824b2223c","Type":"ContainerDied","Data":"152b4dfeb501488a392f27a0ca5eefc764ad5dd668bbc8b62e6df992b8f3ccd5"} Nov 27 09:45:01 crc kubenswrapper[4971]: I1127 09:45:01.823380 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403945-skfvv" event={"ID":"c37fb49d-e681-42e1-a2c6-295824b2223c","Type":"ContainerStarted","Data":"448fd07371cbb2f3410672e646a022ba8d77fba76d4be50f2716a818a987b1e5"} Nov 27 09:45:03 crc kubenswrapper[4971]: I1127 09:45:03.316282 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403945-skfvv" Nov 27 09:45:03 crc kubenswrapper[4971]: I1127 09:45:03.432286 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tm4zr\" (UniqueName: \"kubernetes.io/projected/c37fb49d-e681-42e1-a2c6-295824b2223c-kube-api-access-tm4zr\") pod \"c37fb49d-e681-42e1-a2c6-295824b2223c\" (UID: \"c37fb49d-e681-42e1-a2c6-295824b2223c\") " Nov 27 09:45:03 crc kubenswrapper[4971]: I1127 09:45:03.432757 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c37fb49d-e681-42e1-a2c6-295824b2223c-config-volume\") pod \"c37fb49d-e681-42e1-a2c6-295824b2223c\" (UID: \"c37fb49d-e681-42e1-a2c6-295824b2223c\") " Nov 27 09:45:03 crc kubenswrapper[4971]: I1127 09:45:03.432905 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c37fb49d-e681-42e1-a2c6-295824b2223c-secret-volume\") pod \"c37fb49d-e681-42e1-a2c6-295824b2223c\" (UID: \"c37fb49d-e681-42e1-a2c6-295824b2223c\") " Nov 27 09:45:03 crc kubenswrapper[4971]: I1127 09:45:03.433741 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c37fb49d-e681-42e1-a2c6-295824b2223c-config-volume" (OuterVolumeSpecName: "config-volume") pod "c37fb49d-e681-42e1-a2c6-295824b2223c" (UID: "c37fb49d-e681-42e1-a2c6-295824b2223c"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:45:03 crc kubenswrapper[4971]: I1127 09:45:03.443704 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c37fb49d-e681-42e1-a2c6-295824b2223c-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c37fb49d-e681-42e1-a2c6-295824b2223c" (UID: "c37fb49d-e681-42e1-a2c6-295824b2223c"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:45:03 crc kubenswrapper[4971]: I1127 09:45:03.443956 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c37fb49d-e681-42e1-a2c6-295824b2223c-kube-api-access-tm4zr" (OuterVolumeSpecName: "kube-api-access-tm4zr") pod "c37fb49d-e681-42e1-a2c6-295824b2223c" (UID: "c37fb49d-e681-42e1-a2c6-295824b2223c"). InnerVolumeSpecName "kube-api-access-tm4zr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:45:03 crc kubenswrapper[4971]: I1127 09:45:03.535938 4971 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c37fb49d-e681-42e1-a2c6-295824b2223c-config-volume\") on node \"crc\" DevicePath \"\"" Nov 27 09:45:03 crc kubenswrapper[4971]: I1127 09:45:03.535979 4971 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c37fb49d-e681-42e1-a2c6-295824b2223c-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 27 09:45:03 crc kubenswrapper[4971]: I1127 09:45:03.535991 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tm4zr\" (UniqueName: \"kubernetes.io/projected/c37fb49d-e681-42e1-a2c6-295824b2223c-kube-api-access-tm4zr\") on node \"crc\" DevicePath \"\"" Nov 27 09:45:03 crc kubenswrapper[4971]: I1127 09:45:03.852860 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403945-skfvv" event={"ID":"c37fb49d-e681-42e1-a2c6-295824b2223c","Type":"ContainerDied","Data":"448fd07371cbb2f3410672e646a022ba8d77fba76d4be50f2716a818a987b1e5"} Nov 27 09:45:03 crc kubenswrapper[4971]: I1127 09:45:03.853042 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="448fd07371cbb2f3410672e646a022ba8d77fba76d4be50f2716a818a987b1e5" Nov 27 09:45:03 crc kubenswrapper[4971]: I1127 09:45:03.853008 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403945-skfvv" Nov 27 09:45:04 crc kubenswrapper[4971]: I1127 09:45:04.415382 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403900-m8c8b"] Nov 27 09:45:04 crc kubenswrapper[4971]: I1127 09:45:04.426476 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403900-m8c8b"] Nov 27 09:45:04 crc kubenswrapper[4971]: I1127 09:45:04.566126 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="59eb86a0-923c-4cc0-bd18-74fdcc5d129b" path="/var/lib/kubelet/pods/59eb86a0-923c-4cc0-bd18-74fdcc5d129b/volumes" Nov 27 09:46:01 crc kubenswrapper[4971]: I1127 09:46:01.238001 4971 scope.go:117] "RemoveContainer" containerID="bf2326358d4a5bbd000535154a56d2c0bec8f0bf5e381043fed2e43204616727" Nov 27 09:46:31 crc kubenswrapper[4971]: I1127 09:46:31.088941 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-kgmmr"] Nov 27 09:46:31 crc kubenswrapper[4971]: E1127 09:46:31.090215 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c37fb49d-e681-42e1-a2c6-295824b2223c" containerName="collect-profiles" Nov 27 09:46:31 crc kubenswrapper[4971]: I1127 09:46:31.090235 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="c37fb49d-e681-42e1-a2c6-295824b2223c" containerName="collect-profiles" Nov 27 09:46:31 crc kubenswrapper[4971]: I1127 09:46:31.090511 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="c37fb49d-e681-42e1-a2c6-295824b2223c" containerName="collect-profiles" Nov 27 09:46:31 crc kubenswrapper[4971]: I1127 09:46:31.092605 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kgmmr" Nov 27 09:46:31 crc kubenswrapper[4971]: I1127 09:46:31.105872 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kgmmr"] Nov 27 09:46:31 crc kubenswrapper[4971]: I1127 09:46:31.183908 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ef1dae40-706a-4301-aae1-8abcf66b6d99-catalog-content\") pod \"community-operators-kgmmr\" (UID: \"ef1dae40-706a-4301-aae1-8abcf66b6d99\") " pod="openshift-marketplace/community-operators-kgmmr" Nov 27 09:46:31 crc kubenswrapper[4971]: I1127 09:46:31.183970 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ef1dae40-706a-4301-aae1-8abcf66b6d99-utilities\") pod \"community-operators-kgmmr\" (UID: \"ef1dae40-706a-4301-aae1-8abcf66b6d99\") " pod="openshift-marketplace/community-operators-kgmmr" Nov 27 09:46:31 crc kubenswrapper[4971]: I1127 09:46:31.184265 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ck529\" (UniqueName: \"kubernetes.io/projected/ef1dae40-706a-4301-aae1-8abcf66b6d99-kube-api-access-ck529\") pod \"community-operators-kgmmr\" (UID: \"ef1dae40-706a-4301-aae1-8abcf66b6d99\") " pod="openshift-marketplace/community-operators-kgmmr" Nov 27 09:46:31 crc kubenswrapper[4971]: I1127 09:46:31.287425 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ck529\" (UniqueName: \"kubernetes.io/projected/ef1dae40-706a-4301-aae1-8abcf66b6d99-kube-api-access-ck529\") pod \"community-operators-kgmmr\" (UID: \"ef1dae40-706a-4301-aae1-8abcf66b6d99\") " pod="openshift-marketplace/community-operators-kgmmr" Nov 27 09:46:31 crc kubenswrapper[4971]: I1127 09:46:31.287572 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ef1dae40-706a-4301-aae1-8abcf66b6d99-catalog-content\") pod \"community-operators-kgmmr\" (UID: \"ef1dae40-706a-4301-aae1-8abcf66b6d99\") " pod="openshift-marketplace/community-operators-kgmmr" Nov 27 09:46:31 crc kubenswrapper[4971]: I1127 09:46:31.287609 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ef1dae40-706a-4301-aae1-8abcf66b6d99-utilities\") pod \"community-operators-kgmmr\" (UID: \"ef1dae40-706a-4301-aae1-8abcf66b6d99\") " pod="openshift-marketplace/community-operators-kgmmr" Nov 27 09:46:31 crc kubenswrapper[4971]: I1127 09:46:31.288259 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ef1dae40-706a-4301-aae1-8abcf66b6d99-catalog-content\") pod \"community-operators-kgmmr\" (UID: \"ef1dae40-706a-4301-aae1-8abcf66b6d99\") " pod="openshift-marketplace/community-operators-kgmmr" Nov 27 09:46:31 crc kubenswrapper[4971]: I1127 09:46:31.288307 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ef1dae40-706a-4301-aae1-8abcf66b6d99-utilities\") pod \"community-operators-kgmmr\" (UID: \"ef1dae40-706a-4301-aae1-8abcf66b6d99\") " pod="openshift-marketplace/community-operators-kgmmr" Nov 27 09:46:31 crc kubenswrapper[4971]: I1127 09:46:31.317816 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ck529\" (UniqueName: \"kubernetes.io/projected/ef1dae40-706a-4301-aae1-8abcf66b6d99-kube-api-access-ck529\") pod \"community-operators-kgmmr\" (UID: \"ef1dae40-706a-4301-aae1-8abcf66b6d99\") " pod="openshift-marketplace/community-operators-kgmmr" Nov 27 09:46:31 crc kubenswrapper[4971]: I1127 09:46:31.470320 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kgmmr" Nov 27 09:46:32 crc kubenswrapper[4971]: I1127 09:46:32.095889 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kgmmr"] Nov 27 09:46:33 crc kubenswrapper[4971]: I1127 09:46:33.007004 4971 generic.go:334] "Generic (PLEG): container finished" podID="ef1dae40-706a-4301-aae1-8abcf66b6d99" containerID="7701c77838cff37ec1b10ef9bc246cc975413c28686751a5d6faae6a0807e2c9" exitCode=0 Nov 27 09:46:33 crc kubenswrapper[4971]: I1127 09:46:33.007100 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kgmmr" event={"ID":"ef1dae40-706a-4301-aae1-8abcf66b6d99","Type":"ContainerDied","Data":"7701c77838cff37ec1b10ef9bc246cc975413c28686751a5d6faae6a0807e2c9"} Nov 27 09:46:33 crc kubenswrapper[4971]: I1127 09:46:33.009654 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kgmmr" event={"ID":"ef1dae40-706a-4301-aae1-8abcf66b6d99","Type":"ContainerStarted","Data":"3c7c9603f1f56b9f6ecd0d51c1b43d9450c55af6b487892c1ddd942467c0ac33"} Nov 27 09:46:33 crc kubenswrapper[4971]: I1127 09:46:33.010295 4971 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 27 09:46:34 crc kubenswrapper[4971]: I1127 09:46:34.020236 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kgmmr" event={"ID":"ef1dae40-706a-4301-aae1-8abcf66b6d99","Type":"ContainerStarted","Data":"0a70a86dc009726bd63cd30b3e2770208a232d8962379e8230bfe3be6883417c"} Nov 27 09:46:36 crc kubenswrapper[4971]: I1127 09:46:36.049830 4971 generic.go:334] "Generic (PLEG): container finished" podID="ef1dae40-706a-4301-aae1-8abcf66b6d99" containerID="0a70a86dc009726bd63cd30b3e2770208a232d8962379e8230bfe3be6883417c" exitCode=0 Nov 27 09:46:36 crc kubenswrapper[4971]: I1127 09:46:36.049942 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kgmmr" event={"ID":"ef1dae40-706a-4301-aae1-8abcf66b6d99","Type":"ContainerDied","Data":"0a70a86dc009726bd63cd30b3e2770208a232d8962379e8230bfe3be6883417c"} Nov 27 09:46:37 crc kubenswrapper[4971]: I1127 09:46:37.064186 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kgmmr" event={"ID":"ef1dae40-706a-4301-aae1-8abcf66b6d99","Type":"ContainerStarted","Data":"42c86896a24013d2899945fa58f8c9758cb99f892267c77e9c3c95abb2a15461"} Nov 27 09:46:37 crc kubenswrapper[4971]: I1127 09:46:37.091356 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-kgmmr" podStartSLOduration=2.428108175 podStartE2EDuration="6.091330193s" podCreationTimestamp="2025-11-27 09:46:31 +0000 UTC" firstStartedPulling="2025-11-27 09:46:33.009901551 +0000 UTC m=+10431.201945479" lastFinishedPulling="2025-11-27 09:46:36.673123579 +0000 UTC m=+10434.865167497" observedRunningTime="2025-11-27 09:46:37.091181459 +0000 UTC m=+10435.283225407" watchObservedRunningTime="2025-11-27 09:46:37.091330193 +0000 UTC m=+10435.283374131" Nov 27 09:46:41 crc kubenswrapper[4971]: I1127 09:46:41.472050 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-kgmmr" Nov 27 09:46:41 crc kubenswrapper[4971]: I1127 09:46:41.474717 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-kgmmr" Nov 27 09:46:41 crc kubenswrapper[4971]: I1127 09:46:41.548641 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-kgmmr" Nov 27 09:46:42 crc kubenswrapper[4971]: I1127 09:46:42.192290 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-kgmmr" Nov 27 09:46:42 crc kubenswrapper[4971]: I1127 09:46:42.250671 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kgmmr"] Nov 27 09:46:44 crc kubenswrapper[4971]: I1127 09:46:44.151650 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-kgmmr" podUID="ef1dae40-706a-4301-aae1-8abcf66b6d99" containerName="registry-server" containerID="cri-o://42c86896a24013d2899945fa58f8c9758cb99f892267c77e9c3c95abb2a15461" gracePeriod=2 Nov 27 09:46:44 crc kubenswrapper[4971]: I1127 09:46:44.734885 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kgmmr" Nov 27 09:46:44 crc kubenswrapper[4971]: I1127 09:46:44.863610 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ef1dae40-706a-4301-aae1-8abcf66b6d99-utilities\") pod \"ef1dae40-706a-4301-aae1-8abcf66b6d99\" (UID: \"ef1dae40-706a-4301-aae1-8abcf66b6d99\") " Nov 27 09:46:44 crc kubenswrapper[4971]: I1127 09:46:44.863812 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ef1dae40-706a-4301-aae1-8abcf66b6d99-catalog-content\") pod \"ef1dae40-706a-4301-aae1-8abcf66b6d99\" (UID: \"ef1dae40-706a-4301-aae1-8abcf66b6d99\") " Nov 27 09:46:44 crc kubenswrapper[4971]: I1127 09:46:44.863873 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ck529\" (UniqueName: \"kubernetes.io/projected/ef1dae40-706a-4301-aae1-8abcf66b6d99-kube-api-access-ck529\") pod \"ef1dae40-706a-4301-aae1-8abcf66b6d99\" (UID: \"ef1dae40-706a-4301-aae1-8abcf66b6d99\") " Nov 27 09:46:44 crc kubenswrapper[4971]: I1127 09:46:44.865871 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef1dae40-706a-4301-aae1-8abcf66b6d99-utilities" (OuterVolumeSpecName: "utilities") pod "ef1dae40-706a-4301-aae1-8abcf66b6d99" (UID: "ef1dae40-706a-4301-aae1-8abcf66b6d99"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:46:44 crc kubenswrapper[4971]: I1127 09:46:44.866687 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ef1dae40-706a-4301-aae1-8abcf66b6d99-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 09:46:44 crc kubenswrapper[4971]: I1127 09:46:44.872302 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef1dae40-706a-4301-aae1-8abcf66b6d99-kube-api-access-ck529" (OuterVolumeSpecName: "kube-api-access-ck529") pod "ef1dae40-706a-4301-aae1-8abcf66b6d99" (UID: "ef1dae40-706a-4301-aae1-8abcf66b6d99"). InnerVolumeSpecName "kube-api-access-ck529". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:46:44 crc kubenswrapper[4971]: I1127 09:46:44.953995 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef1dae40-706a-4301-aae1-8abcf66b6d99-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ef1dae40-706a-4301-aae1-8abcf66b6d99" (UID: "ef1dae40-706a-4301-aae1-8abcf66b6d99"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:46:44 crc kubenswrapper[4971]: I1127 09:46:44.968944 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ef1dae40-706a-4301-aae1-8abcf66b6d99-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 09:46:44 crc kubenswrapper[4971]: I1127 09:46:44.969213 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ck529\" (UniqueName: \"kubernetes.io/projected/ef1dae40-706a-4301-aae1-8abcf66b6d99-kube-api-access-ck529\") on node \"crc\" DevicePath \"\"" Nov 27 09:46:45 crc kubenswrapper[4971]: I1127 09:46:45.163968 4971 generic.go:334] "Generic (PLEG): container finished" podID="ef1dae40-706a-4301-aae1-8abcf66b6d99" containerID="42c86896a24013d2899945fa58f8c9758cb99f892267c77e9c3c95abb2a15461" exitCode=0 Nov 27 09:46:45 crc kubenswrapper[4971]: I1127 09:46:45.164022 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kgmmr" event={"ID":"ef1dae40-706a-4301-aae1-8abcf66b6d99","Type":"ContainerDied","Data":"42c86896a24013d2899945fa58f8c9758cb99f892267c77e9c3c95abb2a15461"} Nov 27 09:46:45 crc kubenswrapper[4971]: I1127 09:46:45.164066 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kgmmr" event={"ID":"ef1dae40-706a-4301-aae1-8abcf66b6d99","Type":"ContainerDied","Data":"3c7c9603f1f56b9f6ecd0d51c1b43d9450c55af6b487892c1ddd942467c0ac33"} Nov 27 09:46:45 crc kubenswrapper[4971]: I1127 09:46:45.164091 4971 scope.go:117] "RemoveContainer" containerID="42c86896a24013d2899945fa58f8c9758cb99f892267c77e9c3c95abb2a15461" Nov 27 09:46:45 crc kubenswrapper[4971]: I1127 09:46:45.164245 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kgmmr" Nov 27 09:46:45 crc kubenswrapper[4971]: I1127 09:46:45.203779 4971 scope.go:117] "RemoveContainer" containerID="0a70a86dc009726bd63cd30b3e2770208a232d8962379e8230bfe3be6883417c" Nov 27 09:46:45 crc kubenswrapper[4971]: I1127 09:46:45.222853 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kgmmr"] Nov 27 09:46:45 crc kubenswrapper[4971]: I1127 09:46:45.235178 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-kgmmr"] Nov 27 09:46:45 crc kubenswrapper[4971]: I1127 09:46:45.239360 4971 scope.go:117] "RemoveContainer" containerID="7701c77838cff37ec1b10ef9bc246cc975413c28686751a5d6faae6a0807e2c9" Nov 27 09:46:45 crc kubenswrapper[4971]: I1127 09:46:45.271756 4971 scope.go:117] "RemoveContainer" containerID="42c86896a24013d2899945fa58f8c9758cb99f892267c77e9c3c95abb2a15461" Nov 27 09:46:45 crc kubenswrapper[4971]: E1127 09:46:45.272165 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"42c86896a24013d2899945fa58f8c9758cb99f892267c77e9c3c95abb2a15461\": container with ID starting with 42c86896a24013d2899945fa58f8c9758cb99f892267c77e9c3c95abb2a15461 not found: ID does not exist" containerID="42c86896a24013d2899945fa58f8c9758cb99f892267c77e9c3c95abb2a15461" Nov 27 09:46:45 crc kubenswrapper[4971]: I1127 09:46:45.272209 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42c86896a24013d2899945fa58f8c9758cb99f892267c77e9c3c95abb2a15461"} err="failed to get container status \"42c86896a24013d2899945fa58f8c9758cb99f892267c77e9c3c95abb2a15461\": rpc error: code = NotFound desc = could not find container \"42c86896a24013d2899945fa58f8c9758cb99f892267c77e9c3c95abb2a15461\": container with ID starting with 42c86896a24013d2899945fa58f8c9758cb99f892267c77e9c3c95abb2a15461 not found: ID does not exist" Nov 27 09:46:45 crc kubenswrapper[4971]: I1127 09:46:45.272232 4971 scope.go:117] "RemoveContainer" containerID="0a70a86dc009726bd63cd30b3e2770208a232d8962379e8230bfe3be6883417c" Nov 27 09:46:45 crc kubenswrapper[4971]: E1127 09:46:45.272468 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0a70a86dc009726bd63cd30b3e2770208a232d8962379e8230bfe3be6883417c\": container with ID starting with 0a70a86dc009726bd63cd30b3e2770208a232d8962379e8230bfe3be6883417c not found: ID does not exist" containerID="0a70a86dc009726bd63cd30b3e2770208a232d8962379e8230bfe3be6883417c" Nov 27 09:46:45 crc kubenswrapper[4971]: I1127 09:46:45.272492 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a70a86dc009726bd63cd30b3e2770208a232d8962379e8230bfe3be6883417c"} err="failed to get container status \"0a70a86dc009726bd63cd30b3e2770208a232d8962379e8230bfe3be6883417c\": rpc error: code = NotFound desc = could not find container \"0a70a86dc009726bd63cd30b3e2770208a232d8962379e8230bfe3be6883417c\": container with ID starting with 0a70a86dc009726bd63cd30b3e2770208a232d8962379e8230bfe3be6883417c not found: ID does not exist" Nov 27 09:46:45 crc kubenswrapper[4971]: I1127 09:46:45.272504 4971 scope.go:117] "RemoveContainer" containerID="7701c77838cff37ec1b10ef9bc246cc975413c28686751a5d6faae6a0807e2c9" Nov 27 09:46:45 crc kubenswrapper[4971]: E1127 09:46:45.272767 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7701c77838cff37ec1b10ef9bc246cc975413c28686751a5d6faae6a0807e2c9\": container with ID starting with 7701c77838cff37ec1b10ef9bc246cc975413c28686751a5d6faae6a0807e2c9 not found: ID does not exist" containerID="7701c77838cff37ec1b10ef9bc246cc975413c28686751a5d6faae6a0807e2c9" Nov 27 09:46:45 crc kubenswrapper[4971]: I1127 09:46:45.272789 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7701c77838cff37ec1b10ef9bc246cc975413c28686751a5d6faae6a0807e2c9"} err="failed to get container status \"7701c77838cff37ec1b10ef9bc246cc975413c28686751a5d6faae6a0807e2c9\": rpc error: code = NotFound desc = could not find container \"7701c77838cff37ec1b10ef9bc246cc975413c28686751a5d6faae6a0807e2c9\": container with ID starting with 7701c77838cff37ec1b10ef9bc246cc975413c28686751a5d6faae6a0807e2c9 not found: ID does not exist" Nov 27 09:46:46 crc kubenswrapper[4971]: I1127 09:46:46.570259 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef1dae40-706a-4301-aae1-8abcf66b6d99" path="/var/lib/kubelet/pods/ef1dae40-706a-4301-aae1-8abcf66b6d99/volumes" Nov 27 09:46:56 crc kubenswrapper[4971]: I1127 09:46:56.413203 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 09:46:56 crc kubenswrapper[4971]: I1127 09:46:56.413978 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 09:47:06 crc kubenswrapper[4971]: I1127 09:47:06.444014 4971 generic.go:334] "Generic (PLEG): container finished" podID="9cbe8f92-e455-4fc6-b875-1895f7dee6fd" containerID="229ca6642f016bc52f7150262c3f40164b2e4bbeac97023cdf91be06cb600a6a" exitCode=0 Nov 27 09:47:06 crc kubenswrapper[4971]: I1127 09:47:06.444135 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" event={"ID":"9cbe8f92-e455-4fc6-b875-1895f7dee6fd","Type":"ContainerDied","Data":"229ca6642f016bc52f7150262c3f40164b2e4bbeac97023cdf91be06cb600a6a"} Nov 27 09:47:08 crc kubenswrapper[4971]: I1127 09:47:08.009064 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" Nov 27 09:47:08 crc kubenswrapper[4971]: I1127 09:47:08.177276 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-cell1-combined-ca-bundle\") pod \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " Nov 27 09:47:08 crc kubenswrapper[4971]: I1127 09:47:08.177376 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-ssh-key\") pod \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " Nov 27 09:47:08 crc kubenswrapper[4971]: I1127 09:47:08.177474 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-inventory\") pod \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " Nov 27 09:47:08 crc kubenswrapper[4971]: I1127 09:47:08.177525 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-cells-global-config-0\") pod \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " Nov 27 09:47:08 crc kubenswrapper[4971]: I1127 09:47:08.177576 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tksh2\" (UniqueName: \"kubernetes.io/projected/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-kube-api-access-tksh2\") pod \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " Nov 27 09:47:08 crc kubenswrapper[4971]: I1127 09:47:08.177613 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-ceph\") pod \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " Nov 27 09:47:08 crc kubenswrapper[4971]: I1127 09:47:08.177714 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-migration-ssh-key-1\") pod \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " Nov 27 09:47:08 crc kubenswrapper[4971]: I1127 09:47:08.177751 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-cell1-compute-config-1\") pod \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " Nov 27 09:47:08 crc kubenswrapper[4971]: I1127 09:47:08.177798 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-cells-global-config-1\") pod \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " Nov 27 09:47:08 crc kubenswrapper[4971]: I1127 09:47:08.177827 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-cell1-compute-config-0\") pod \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " Nov 27 09:47:08 crc kubenswrapper[4971]: I1127 09:47:08.177902 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-migration-ssh-key-0\") pod \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\" (UID: \"9cbe8f92-e455-4fc6-b875-1895f7dee6fd\") " Nov 27 09:47:08 crc kubenswrapper[4971]: I1127 09:47:08.185006 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-ceph" (OuterVolumeSpecName: "ceph") pod "9cbe8f92-e455-4fc6-b875-1895f7dee6fd" (UID: "9cbe8f92-e455-4fc6-b875-1895f7dee6fd"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:47:08 crc kubenswrapper[4971]: I1127 09:47:08.188514 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-kube-api-access-tksh2" (OuterVolumeSpecName: "kube-api-access-tksh2") pod "9cbe8f92-e455-4fc6-b875-1895f7dee6fd" (UID: "9cbe8f92-e455-4fc6-b875-1895f7dee6fd"). InnerVolumeSpecName "kube-api-access-tksh2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:47:08 crc kubenswrapper[4971]: I1127 09:47:08.193347 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-cell1-combined-ca-bundle" (OuterVolumeSpecName: "nova-cell1-combined-ca-bundle") pod "9cbe8f92-e455-4fc6-b875-1895f7dee6fd" (UID: "9cbe8f92-e455-4fc6-b875-1895f7dee6fd"). InnerVolumeSpecName "nova-cell1-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:47:08 crc kubenswrapper[4971]: I1127 09:47:08.218633 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-inventory" (OuterVolumeSpecName: "inventory") pod "9cbe8f92-e455-4fc6-b875-1895f7dee6fd" (UID: "9cbe8f92-e455-4fc6-b875-1895f7dee6fd"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:47:08 crc kubenswrapper[4971]: I1127 09:47:08.221226 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "9cbe8f92-e455-4fc6-b875-1895f7dee6fd" (UID: "9cbe8f92-e455-4fc6-b875-1895f7dee6fd"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:47:08 crc kubenswrapper[4971]: I1127 09:47:08.224212 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-cells-global-config-0" (OuterVolumeSpecName: "nova-cells-global-config-0") pod "9cbe8f92-e455-4fc6-b875-1895f7dee6fd" (UID: "9cbe8f92-e455-4fc6-b875-1895f7dee6fd"). InnerVolumeSpecName "nova-cells-global-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:47:08 crc kubenswrapper[4971]: I1127 09:47:08.228067 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "9cbe8f92-e455-4fc6-b875-1895f7dee6fd" (UID: "9cbe8f92-e455-4fc6-b875-1895f7dee6fd"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:47:08 crc kubenswrapper[4971]: I1127 09:47:08.231871 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-cells-global-config-1" (OuterVolumeSpecName: "nova-cells-global-config-1") pod "9cbe8f92-e455-4fc6-b875-1895f7dee6fd" (UID: "9cbe8f92-e455-4fc6-b875-1895f7dee6fd"). InnerVolumeSpecName "nova-cells-global-config-1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 09:47:08 crc kubenswrapper[4971]: I1127 09:47:08.231909 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "9cbe8f92-e455-4fc6-b875-1895f7dee6fd" (UID: "9cbe8f92-e455-4fc6-b875-1895f7dee6fd"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:47:08 crc kubenswrapper[4971]: I1127 09:47:08.232680 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "9cbe8f92-e455-4fc6-b875-1895f7dee6fd" (UID: "9cbe8f92-e455-4fc6-b875-1895f7dee6fd"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:47:08 crc kubenswrapper[4971]: I1127 09:47:08.241846 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "9cbe8f92-e455-4fc6-b875-1895f7dee6fd" (UID: "9cbe8f92-e455-4fc6-b875-1895f7dee6fd"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:47:08 crc kubenswrapper[4971]: I1127 09:47:08.281737 4971 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Nov 27 09:47:08 crc kubenswrapper[4971]: I1127 09:47:08.281786 4971 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-cell1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 09:47:08 crc kubenswrapper[4971]: I1127 09:47:08.281798 4971 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 27 09:47:08 crc kubenswrapper[4971]: I1127 09:47:08.281808 4971 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-inventory\") on node \"crc\" DevicePath \"\"" Nov 27 09:47:08 crc kubenswrapper[4971]: I1127 09:47:08.281818 4971 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-cells-global-config-0\") on node \"crc\" DevicePath \"\"" Nov 27 09:47:08 crc kubenswrapper[4971]: I1127 09:47:08.281827 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tksh2\" (UniqueName: \"kubernetes.io/projected/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-kube-api-access-tksh2\") on node \"crc\" DevicePath \"\"" Nov 27 09:47:08 crc kubenswrapper[4971]: I1127 09:47:08.281836 4971 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-ceph\") on node \"crc\" DevicePath \"\"" Nov 27 09:47:08 crc kubenswrapper[4971]: I1127 09:47:08.281848 4971 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Nov 27 09:47:08 crc kubenswrapper[4971]: I1127 09:47:08.281860 4971 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Nov 27 09:47:08 crc kubenswrapper[4971]: I1127 09:47:08.281870 4971 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-cells-global-config-1\") on node \"crc\" DevicePath \"\"" Nov 27 09:47:08 crc kubenswrapper[4971]: I1127 09:47:08.281880 4971 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/9cbe8f92-e455-4fc6-b875-1895f7dee6fd-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Nov 27 09:47:08 crc kubenswrapper[4971]: I1127 09:47:08.479907 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" event={"ID":"9cbe8f92-e455-4fc6-b875-1895f7dee6fd","Type":"ContainerDied","Data":"78f2d9bcfd3508489e6023db9031d7908fa74448050fd66296bea9b050ec3c26"} Nov 27 09:47:08 crc kubenswrapper[4971]: I1127 09:47:08.479983 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="78f2d9bcfd3508489e6023db9031d7908fa74448050fd66296bea9b050ec3c26" Nov 27 09:47:08 crc kubenswrapper[4971]: I1127 09:47:08.480458 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c" Nov 27 09:47:26 crc kubenswrapper[4971]: I1127 09:47:26.413905 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 09:47:26 crc kubenswrapper[4971]: I1127 09:47:26.414691 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 09:47:56 crc kubenswrapper[4971]: I1127 09:47:56.413288 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 09:47:56 crc kubenswrapper[4971]: I1127 09:47:56.414269 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 09:47:56 crc kubenswrapper[4971]: I1127 09:47:56.414366 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 09:47:56 crc kubenswrapper[4971]: I1127 09:47:56.415915 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d5d3a8883a93ccaa51c1b9319f9364a05ad09a90156d64a0180b7518c3a242d6"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 09:47:56 crc kubenswrapper[4971]: I1127 09:47:56.416012 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://d5d3a8883a93ccaa51c1b9319f9364a05ad09a90156d64a0180b7518c3a242d6" gracePeriod=600 Nov 27 09:47:56 crc kubenswrapper[4971]: E1127 09:47:56.808733 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:47:57 crc kubenswrapper[4971]: I1127 09:47:57.169026 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="d5d3a8883a93ccaa51c1b9319f9364a05ad09a90156d64a0180b7518c3a242d6" exitCode=0 Nov 27 09:47:57 crc kubenswrapper[4971]: I1127 09:47:57.169093 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"d5d3a8883a93ccaa51c1b9319f9364a05ad09a90156d64a0180b7518c3a242d6"} Nov 27 09:47:57 crc kubenswrapper[4971]: I1127 09:47:57.169149 4971 scope.go:117] "RemoveContainer" containerID="31b85eb2444b7a4c49f418f3febd731ee00e77aa6a823a8826a6dfb293a04ccf" Nov 27 09:47:57 crc kubenswrapper[4971]: I1127 09:47:57.170244 4971 scope.go:117] "RemoveContainer" containerID="d5d3a8883a93ccaa51c1b9319f9364a05ad09a90156d64a0180b7518c3a242d6" Nov 27 09:47:57 crc kubenswrapper[4971]: E1127 09:47:57.170742 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:48:09 crc kubenswrapper[4971]: I1127 09:48:09.551358 4971 scope.go:117] "RemoveContainer" containerID="d5d3a8883a93ccaa51c1b9319f9364a05ad09a90156d64a0180b7518c3a242d6" Nov 27 09:48:09 crc kubenswrapper[4971]: E1127 09:48:09.552302 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:48:13 crc kubenswrapper[4971]: E1127 09:48:13.854845 4971 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.50:38006->38.102.83.50:35357: write tcp 38.102.83.50:38006->38.102.83.50:35357: write: broken pipe Nov 27 09:48:23 crc kubenswrapper[4971]: I1127 09:48:23.552936 4971 scope.go:117] "RemoveContainer" containerID="d5d3a8883a93ccaa51c1b9319f9364a05ad09a90156d64a0180b7518c3a242d6" Nov 27 09:48:23 crc kubenswrapper[4971]: E1127 09:48:23.557840 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:48:34 crc kubenswrapper[4971]: I1127 09:48:34.554076 4971 scope.go:117] "RemoveContainer" containerID="d5d3a8883a93ccaa51c1b9319f9364a05ad09a90156d64a0180b7518c3a242d6" Nov 27 09:48:34 crc kubenswrapper[4971]: E1127 09:48:34.555023 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:48:48 crc kubenswrapper[4971]: I1127 09:48:48.021135 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-copy-data"] Nov 27 09:48:48 crc kubenswrapper[4971]: I1127 09:48:48.023306 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/mariadb-copy-data" podUID="6a46d8be-7005-4ab5-8a1f-0b158e60d38b" containerName="adoption" containerID="cri-o://cd9e88392578f007fa3f57e174e953c084f6e5cb42af83d8f3ec1fd23f1ff888" gracePeriod=30 Nov 27 09:48:49 crc kubenswrapper[4971]: I1127 09:48:49.550606 4971 scope.go:117] "RemoveContainer" containerID="d5d3a8883a93ccaa51c1b9319f9364a05ad09a90156d64a0180b7518c3a242d6" Nov 27 09:48:49 crc kubenswrapper[4971]: E1127 09:48:49.550950 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:49:02 crc kubenswrapper[4971]: I1127 09:49:02.571502 4971 scope.go:117] "RemoveContainer" containerID="d5d3a8883a93ccaa51c1b9319f9364a05ad09a90156d64a0180b7518c3a242d6" Nov 27 09:49:02 crc kubenswrapper[4971]: E1127 09:49:02.572745 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:49:16 crc kubenswrapper[4971]: I1127 09:49:16.551282 4971 scope.go:117] "RemoveContainer" containerID="d5d3a8883a93ccaa51c1b9319f9364a05ad09a90156d64a0180b7518c3a242d6" Nov 27 09:49:16 crc kubenswrapper[4971]: E1127 09:49:16.552336 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:49:18 crc kubenswrapper[4971]: I1127 09:49:18.282303 4971 generic.go:334] "Generic (PLEG): container finished" podID="6a46d8be-7005-4ab5-8a1f-0b158e60d38b" containerID="cd9e88392578f007fa3f57e174e953c084f6e5cb42af83d8f3ec1fd23f1ff888" exitCode=137 Nov 27 09:49:18 crc kubenswrapper[4971]: I1127 09:49:18.282399 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"6a46d8be-7005-4ab5-8a1f-0b158e60d38b","Type":"ContainerDied","Data":"cd9e88392578f007fa3f57e174e953c084f6e5cb42af83d8f3ec1fd23f1ff888"} Nov 27 09:49:18 crc kubenswrapper[4971]: I1127 09:49:18.657940 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Nov 27 09:49:18 crc kubenswrapper[4971]: I1127 09:49:18.741888 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mariadb-data\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6afd7684-5df4-45a0-931c-e3913e0a3c7b\") pod \"6a46d8be-7005-4ab5-8a1f-0b158e60d38b\" (UID: \"6a46d8be-7005-4ab5-8a1f-0b158e60d38b\") " Nov 27 09:49:18 crc kubenswrapper[4971]: I1127 09:49:18.742007 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qfsdr\" (UniqueName: \"kubernetes.io/projected/6a46d8be-7005-4ab5-8a1f-0b158e60d38b-kube-api-access-qfsdr\") pod \"6a46d8be-7005-4ab5-8a1f-0b158e60d38b\" (UID: \"6a46d8be-7005-4ab5-8a1f-0b158e60d38b\") " Nov 27 09:49:18 crc kubenswrapper[4971]: I1127 09:49:18.752678 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a46d8be-7005-4ab5-8a1f-0b158e60d38b-kube-api-access-qfsdr" (OuterVolumeSpecName: "kube-api-access-qfsdr") pod "6a46d8be-7005-4ab5-8a1f-0b158e60d38b" (UID: "6a46d8be-7005-4ab5-8a1f-0b158e60d38b"). InnerVolumeSpecName "kube-api-access-qfsdr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:49:18 crc kubenswrapper[4971]: I1127 09:49:18.802858 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6afd7684-5df4-45a0-931c-e3913e0a3c7b" (OuterVolumeSpecName: "mariadb-data") pod "6a46d8be-7005-4ab5-8a1f-0b158e60d38b" (UID: "6a46d8be-7005-4ab5-8a1f-0b158e60d38b"). InnerVolumeSpecName "pvc-6afd7684-5df4-45a0-931c-e3913e0a3c7b". PluginName "kubernetes.io/csi", VolumeGidValue "" Nov 27 09:49:18 crc kubenswrapper[4971]: I1127 09:49:18.845188 4971 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-6afd7684-5df4-45a0-931c-e3913e0a3c7b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6afd7684-5df4-45a0-931c-e3913e0a3c7b\") on node \"crc\" " Nov 27 09:49:18 crc kubenswrapper[4971]: I1127 09:49:18.845228 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qfsdr\" (UniqueName: \"kubernetes.io/projected/6a46d8be-7005-4ab5-8a1f-0b158e60d38b-kube-api-access-qfsdr\") on node \"crc\" DevicePath \"\"" Nov 27 09:49:18 crc kubenswrapper[4971]: I1127 09:49:18.876967 4971 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Nov 27 09:49:18 crc kubenswrapper[4971]: I1127 09:49:18.877242 4971 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-6afd7684-5df4-45a0-931c-e3913e0a3c7b" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6afd7684-5df4-45a0-931c-e3913e0a3c7b") on node "crc" Nov 27 09:49:18 crc kubenswrapper[4971]: I1127 09:49:18.949142 4971 reconciler_common.go:293] "Volume detached for volume \"pvc-6afd7684-5df4-45a0-931c-e3913e0a3c7b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6afd7684-5df4-45a0-931c-e3913e0a3c7b\") on node \"crc\" DevicePath \"\"" Nov 27 09:49:19 crc kubenswrapper[4971]: I1127 09:49:19.300456 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"6a46d8be-7005-4ab5-8a1f-0b158e60d38b","Type":"ContainerDied","Data":"582a33e11053042cfd0f9e670d5b39aa775831c98df28cdcabd1362996f84306"} Nov 27 09:49:19 crc kubenswrapper[4971]: I1127 09:49:19.300570 4971 scope.go:117] "RemoveContainer" containerID="cd9e88392578f007fa3f57e174e953c084f6e5cb42af83d8f3ec1fd23f1ff888" Nov 27 09:49:19 crc kubenswrapper[4971]: I1127 09:49:19.300581 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Nov 27 09:49:19 crc kubenswrapper[4971]: I1127 09:49:19.355727 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-copy-data"] Nov 27 09:49:19 crc kubenswrapper[4971]: I1127 09:49:19.367718 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-copy-data"] Nov 27 09:49:19 crc kubenswrapper[4971]: I1127 09:49:19.970030 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-copy-data"] Nov 27 09:49:19 crc kubenswrapper[4971]: I1127 09:49:19.970785 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-copy-data" podUID="a2476780-5471-4c3b-a8e8-7537ea708699" containerName="adoption" containerID="cri-o://5cc65425c5a8a31100d7b91375dc9df42e60afcca47481890480d2ca7f5ab7b6" gracePeriod=30 Nov 27 09:49:20 crc kubenswrapper[4971]: I1127 09:49:20.566719 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a46d8be-7005-4ab5-8a1f-0b158e60d38b" path="/var/lib/kubelet/pods/6a46d8be-7005-4ab5-8a1f-0b158e60d38b/volumes" Nov 27 09:49:27 crc kubenswrapper[4971]: I1127 09:49:27.551226 4971 scope.go:117] "RemoveContainer" containerID="d5d3a8883a93ccaa51c1b9319f9364a05ad09a90156d64a0180b7518c3a242d6" Nov 27 09:49:27 crc kubenswrapper[4971]: E1127 09:49:27.552654 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:49:42 crc kubenswrapper[4971]: I1127 09:49:42.563788 4971 scope.go:117] "RemoveContainer" containerID="d5d3a8883a93ccaa51c1b9319f9364a05ad09a90156d64a0180b7518c3a242d6" Nov 27 09:49:42 crc kubenswrapper[4971]: E1127 09:49:42.564902 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:49:50 crc kubenswrapper[4971]: I1127 09:49:50.045343 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-lmrkm"] Nov 27 09:49:50 crc kubenswrapper[4971]: E1127 09:49:50.046484 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef1dae40-706a-4301-aae1-8abcf66b6d99" containerName="extract-content" Nov 27 09:49:50 crc kubenswrapper[4971]: I1127 09:49:50.046500 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef1dae40-706a-4301-aae1-8abcf66b6d99" containerName="extract-content" Nov 27 09:49:50 crc kubenswrapper[4971]: E1127 09:49:50.046546 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cbe8f92-e455-4fc6-b875-1895f7dee6fd" containerName="nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1" Nov 27 09:49:50 crc kubenswrapper[4971]: I1127 09:49:50.046570 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cbe8f92-e455-4fc6-b875-1895f7dee6fd" containerName="nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1" Nov 27 09:49:50 crc kubenswrapper[4971]: E1127 09:49:50.046581 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef1dae40-706a-4301-aae1-8abcf66b6d99" containerName="extract-utilities" Nov 27 09:49:50 crc kubenswrapper[4971]: I1127 09:49:50.046587 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef1dae40-706a-4301-aae1-8abcf66b6d99" containerName="extract-utilities" Nov 27 09:49:50 crc kubenswrapper[4971]: E1127 09:49:50.046623 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef1dae40-706a-4301-aae1-8abcf66b6d99" containerName="registry-server" Nov 27 09:49:50 crc kubenswrapper[4971]: I1127 09:49:50.046630 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef1dae40-706a-4301-aae1-8abcf66b6d99" containerName="registry-server" Nov 27 09:49:50 crc kubenswrapper[4971]: E1127 09:49:50.046648 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a46d8be-7005-4ab5-8a1f-0b158e60d38b" containerName="adoption" Nov 27 09:49:50 crc kubenswrapper[4971]: I1127 09:49:50.046656 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a46d8be-7005-4ab5-8a1f-0b158e60d38b" containerName="adoption" Nov 27 09:49:50 crc kubenswrapper[4971]: I1127 09:49:50.046886 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a46d8be-7005-4ab5-8a1f-0b158e60d38b" containerName="adoption" Nov 27 09:49:50 crc kubenswrapper[4971]: I1127 09:49:50.046904 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cbe8f92-e455-4fc6-b875-1895f7dee6fd" containerName="nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1" Nov 27 09:49:50 crc kubenswrapper[4971]: I1127 09:49:50.046916 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef1dae40-706a-4301-aae1-8abcf66b6d99" containerName="registry-server" Nov 27 09:49:50 crc kubenswrapper[4971]: I1127 09:49:50.048557 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lmrkm" Nov 27 09:49:50 crc kubenswrapper[4971]: I1127 09:49:50.068336 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lmrkm"] Nov 27 09:49:50 crc kubenswrapper[4971]: I1127 09:49:50.128115 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2dmmt\" (UniqueName: \"kubernetes.io/projected/151f0be0-5abe-4e97-8684-6623e4481c38-kube-api-access-2dmmt\") pod \"redhat-operators-lmrkm\" (UID: \"151f0be0-5abe-4e97-8684-6623e4481c38\") " pod="openshift-marketplace/redhat-operators-lmrkm" Nov 27 09:49:50 crc kubenswrapper[4971]: I1127 09:49:50.128217 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/151f0be0-5abe-4e97-8684-6623e4481c38-utilities\") pod \"redhat-operators-lmrkm\" (UID: \"151f0be0-5abe-4e97-8684-6623e4481c38\") " pod="openshift-marketplace/redhat-operators-lmrkm" Nov 27 09:49:50 crc kubenswrapper[4971]: I1127 09:49:50.128430 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/151f0be0-5abe-4e97-8684-6623e4481c38-catalog-content\") pod \"redhat-operators-lmrkm\" (UID: \"151f0be0-5abe-4e97-8684-6623e4481c38\") " pod="openshift-marketplace/redhat-operators-lmrkm" Nov 27 09:49:50 crc kubenswrapper[4971]: I1127 09:49:50.231387 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2dmmt\" (UniqueName: \"kubernetes.io/projected/151f0be0-5abe-4e97-8684-6623e4481c38-kube-api-access-2dmmt\") pod \"redhat-operators-lmrkm\" (UID: \"151f0be0-5abe-4e97-8684-6623e4481c38\") " pod="openshift-marketplace/redhat-operators-lmrkm" Nov 27 09:49:50 crc kubenswrapper[4971]: I1127 09:49:50.231494 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/151f0be0-5abe-4e97-8684-6623e4481c38-utilities\") pod \"redhat-operators-lmrkm\" (UID: \"151f0be0-5abe-4e97-8684-6623e4481c38\") " pod="openshift-marketplace/redhat-operators-lmrkm" Nov 27 09:49:50 crc kubenswrapper[4971]: I1127 09:49:50.231600 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/151f0be0-5abe-4e97-8684-6623e4481c38-catalog-content\") pod \"redhat-operators-lmrkm\" (UID: \"151f0be0-5abe-4e97-8684-6623e4481c38\") " pod="openshift-marketplace/redhat-operators-lmrkm" Nov 27 09:49:50 crc kubenswrapper[4971]: I1127 09:49:50.232187 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/151f0be0-5abe-4e97-8684-6623e4481c38-utilities\") pod \"redhat-operators-lmrkm\" (UID: \"151f0be0-5abe-4e97-8684-6623e4481c38\") " pod="openshift-marketplace/redhat-operators-lmrkm" Nov 27 09:49:50 crc kubenswrapper[4971]: I1127 09:49:50.232252 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/151f0be0-5abe-4e97-8684-6623e4481c38-catalog-content\") pod \"redhat-operators-lmrkm\" (UID: \"151f0be0-5abe-4e97-8684-6623e4481c38\") " pod="openshift-marketplace/redhat-operators-lmrkm" Nov 27 09:49:50 crc kubenswrapper[4971]: I1127 09:49:50.657573 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2dmmt\" (UniqueName: \"kubernetes.io/projected/151f0be0-5abe-4e97-8684-6623e4481c38-kube-api-access-2dmmt\") pod \"redhat-operators-lmrkm\" (UID: \"151f0be0-5abe-4e97-8684-6623e4481c38\") " pod="openshift-marketplace/redhat-operators-lmrkm" Nov 27 09:49:50 crc kubenswrapper[4971]: I1127 09:49:50.677584 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lmrkm" Nov 27 09:49:51 crc kubenswrapper[4971]: I1127 09:49:51.303732 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lmrkm"] Nov 27 09:49:51 crc kubenswrapper[4971]: I1127 09:49:51.420915 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Nov 27 09:49:51 crc kubenswrapper[4971]: I1127 09:49:51.569455 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/a2476780-5471-4c3b-a8e8-7537ea708699-ovn-data-cert\") pod \"a2476780-5471-4c3b-a8e8-7537ea708699\" (UID: \"a2476780-5471-4c3b-a8e8-7537ea708699\") " Nov 27 09:49:51 crc kubenswrapper[4971]: I1127 09:49:51.569669 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q2skb\" (UniqueName: \"kubernetes.io/projected/a2476780-5471-4c3b-a8e8-7537ea708699-kube-api-access-q2skb\") pod \"a2476780-5471-4c3b-a8e8-7537ea708699\" (UID: \"a2476780-5471-4c3b-a8e8-7537ea708699\") " Nov 27 09:49:51 crc kubenswrapper[4971]: I1127 09:49:51.575692 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-data\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-29969a46-0e7a-4676-9e24-eedcda19284b\") pod \"a2476780-5471-4c3b-a8e8-7537ea708699\" (UID: \"a2476780-5471-4c3b-a8e8-7537ea708699\") " Nov 27 09:49:51 crc kubenswrapper[4971]: I1127 09:49:51.586455 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2476780-5471-4c3b-a8e8-7537ea708699-ovn-data-cert" (OuterVolumeSpecName: "ovn-data-cert") pod "a2476780-5471-4c3b-a8e8-7537ea708699" (UID: "a2476780-5471-4c3b-a8e8-7537ea708699"). InnerVolumeSpecName "ovn-data-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 09:49:51 crc kubenswrapper[4971]: I1127 09:49:51.588273 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2476780-5471-4c3b-a8e8-7537ea708699-kube-api-access-q2skb" (OuterVolumeSpecName: "kube-api-access-q2skb") pod "a2476780-5471-4c3b-a8e8-7537ea708699" (UID: "a2476780-5471-4c3b-a8e8-7537ea708699"). InnerVolumeSpecName "kube-api-access-q2skb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:49:51 crc kubenswrapper[4971]: I1127 09:49:51.635872 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-29969a46-0e7a-4676-9e24-eedcda19284b" (OuterVolumeSpecName: "ovn-data") pod "a2476780-5471-4c3b-a8e8-7537ea708699" (UID: "a2476780-5471-4c3b-a8e8-7537ea708699"). InnerVolumeSpecName "pvc-29969a46-0e7a-4676-9e24-eedcda19284b". PluginName "kubernetes.io/csi", VolumeGidValue "" Nov 27 09:49:51 crc kubenswrapper[4971]: I1127 09:49:51.679416 4971 reconciler_common.go:293] "Volume detached for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/a2476780-5471-4c3b-a8e8-7537ea708699-ovn-data-cert\") on node \"crc\" DevicePath \"\"" Nov 27 09:49:51 crc kubenswrapper[4971]: I1127 09:49:51.679797 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q2skb\" (UniqueName: \"kubernetes.io/projected/a2476780-5471-4c3b-a8e8-7537ea708699-kube-api-access-q2skb\") on node \"crc\" DevicePath \"\"" Nov 27 09:49:51 crc kubenswrapper[4971]: I1127 09:49:51.679899 4971 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-29969a46-0e7a-4676-9e24-eedcda19284b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-29969a46-0e7a-4676-9e24-eedcda19284b\") on node \"crc\" " Nov 27 09:49:51 crc kubenswrapper[4971]: I1127 09:49:51.739160 4971 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Nov 27 09:49:51 crc kubenswrapper[4971]: I1127 09:49:51.739391 4971 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-29969a46-0e7a-4676-9e24-eedcda19284b" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-29969a46-0e7a-4676-9e24-eedcda19284b") on node "crc" Nov 27 09:49:51 crc kubenswrapper[4971]: I1127 09:49:51.787918 4971 reconciler_common.go:293] "Volume detached for volume \"pvc-29969a46-0e7a-4676-9e24-eedcda19284b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-29969a46-0e7a-4676-9e24-eedcda19284b\") on node \"crc\" DevicePath \"\"" Nov 27 09:49:51 crc kubenswrapper[4971]: I1127 09:49:51.833551 4971 generic.go:334] "Generic (PLEG): container finished" podID="151f0be0-5abe-4e97-8684-6623e4481c38" containerID="5a03f10c0748cc7eb0bd5ec6e55db86b63a1ffb1f54cfe64bd83f61861163eb3" exitCode=0 Nov 27 09:49:51 crc kubenswrapper[4971]: I1127 09:49:51.833657 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lmrkm" event={"ID":"151f0be0-5abe-4e97-8684-6623e4481c38","Type":"ContainerDied","Data":"5a03f10c0748cc7eb0bd5ec6e55db86b63a1ffb1f54cfe64bd83f61861163eb3"} Nov 27 09:49:51 crc kubenswrapper[4971]: I1127 09:49:51.833695 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lmrkm" event={"ID":"151f0be0-5abe-4e97-8684-6623e4481c38","Type":"ContainerStarted","Data":"6e96eb3f6fda62d7dbbba8a3c6d2c3f14d05ff9a6a7f6824266880eb5ff4b569"} Nov 27 09:49:51 crc kubenswrapper[4971]: I1127 09:49:51.850398 4971 generic.go:334] "Generic (PLEG): container finished" podID="a2476780-5471-4c3b-a8e8-7537ea708699" containerID="5cc65425c5a8a31100d7b91375dc9df42e60afcca47481890480d2ca7f5ab7b6" exitCode=137 Nov 27 09:49:51 crc kubenswrapper[4971]: I1127 09:49:51.850459 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"a2476780-5471-4c3b-a8e8-7537ea708699","Type":"ContainerDied","Data":"5cc65425c5a8a31100d7b91375dc9df42e60afcca47481890480d2ca7f5ab7b6"} Nov 27 09:49:51 crc kubenswrapper[4971]: I1127 09:49:51.850517 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"a2476780-5471-4c3b-a8e8-7537ea708699","Type":"ContainerDied","Data":"edf4422bc3cfd9129f064802c6e2d2dee2150820681b939c491071aacd5464ba"} Nov 27 09:49:51 crc kubenswrapper[4971]: I1127 09:49:51.850608 4971 scope.go:117] "RemoveContainer" containerID="5cc65425c5a8a31100d7b91375dc9df42e60afcca47481890480d2ca7f5ab7b6" Nov 27 09:49:51 crc kubenswrapper[4971]: I1127 09:49:51.851062 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Nov 27 09:49:51 crc kubenswrapper[4971]: I1127 09:49:51.917558 4971 scope.go:117] "RemoveContainer" containerID="5cc65425c5a8a31100d7b91375dc9df42e60afcca47481890480d2ca7f5ab7b6" Nov 27 09:49:51 crc kubenswrapper[4971]: E1127 09:49:51.918441 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5cc65425c5a8a31100d7b91375dc9df42e60afcca47481890480d2ca7f5ab7b6\": container with ID starting with 5cc65425c5a8a31100d7b91375dc9df42e60afcca47481890480d2ca7f5ab7b6 not found: ID does not exist" containerID="5cc65425c5a8a31100d7b91375dc9df42e60afcca47481890480d2ca7f5ab7b6" Nov 27 09:49:51 crc kubenswrapper[4971]: I1127 09:49:51.918545 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5cc65425c5a8a31100d7b91375dc9df42e60afcca47481890480d2ca7f5ab7b6"} err="failed to get container status \"5cc65425c5a8a31100d7b91375dc9df42e60afcca47481890480d2ca7f5ab7b6\": rpc error: code = NotFound desc = could not find container \"5cc65425c5a8a31100d7b91375dc9df42e60afcca47481890480d2ca7f5ab7b6\": container with ID starting with 5cc65425c5a8a31100d7b91375dc9df42e60afcca47481890480d2ca7f5ab7b6 not found: ID does not exist" Nov 27 09:49:51 crc kubenswrapper[4971]: I1127 09:49:51.930813 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-copy-data"] Nov 27 09:49:51 crc kubenswrapper[4971]: I1127 09:49:51.945694 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-copy-data"] Nov 27 09:49:52 crc kubenswrapper[4971]: I1127 09:49:52.563966 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2476780-5471-4c3b-a8e8-7537ea708699" path="/var/lib/kubelet/pods/a2476780-5471-4c3b-a8e8-7537ea708699/volumes" Nov 27 09:49:53 crc kubenswrapper[4971]: I1127 09:49:53.882264 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lmrkm" event={"ID":"151f0be0-5abe-4e97-8684-6623e4481c38","Type":"ContainerStarted","Data":"bb4fa6f6ed831a57798a0c5f46e416b5a97f9dfc7aecc4765ea1f902f9482e42"} Nov 27 09:49:55 crc kubenswrapper[4971]: I1127 09:49:55.551006 4971 scope.go:117] "RemoveContainer" containerID="d5d3a8883a93ccaa51c1b9319f9364a05ad09a90156d64a0180b7518c3a242d6" Nov 27 09:49:55 crc kubenswrapper[4971]: E1127 09:49:55.551775 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:49:57 crc kubenswrapper[4971]: I1127 09:49:57.928955 4971 generic.go:334] "Generic (PLEG): container finished" podID="151f0be0-5abe-4e97-8684-6623e4481c38" containerID="bb4fa6f6ed831a57798a0c5f46e416b5a97f9dfc7aecc4765ea1f902f9482e42" exitCode=0 Nov 27 09:49:57 crc kubenswrapper[4971]: I1127 09:49:57.929001 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lmrkm" event={"ID":"151f0be0-5abe-4e97-8684-6623e4481c38","Type":"ContainerDied","Data":"bb4fa6f6ed831a57798a0c5f46e416b5a97f9dfc7aecc4765ea1f902f9482e42"} Nov 27 09:49:58 crc kubenswrapper[4971]: I1127 09:49:58.945243 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lmrkm" event={"ID":"151f0be0-5abe-4e97-8684-6623e4481c38","Type":"ContainerStarted","Data":"e4ea80175c3148b08412bd8efdb026b6a0e0f4dd4111ed34019c1c2f3cdf3640"} Nov 27 09:49:58 crc kubenswrapper[4971]: I1127 09:49:58.988867 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-lmrkm" podStartSLOduration=2.339872785 podStartE2EDuration="8.988838148s" podCreationTimestamp="2025-11-27 09:49:50 +0000 UTC" firstStartedPulling="2025-11-27 09:49:51.83940084 +0000 UTC m=+10630.031444758" lastFinishedPulling="2025-11-27 09:49:58.488366163 +0000 UTC m=+10636.680410121" observedRunningTime="2025-11-27 09:49:58.964462911 +0000 UTC m=+10637.156506859" watchObservedRunningTime="2025-11-27 09:49:58.988838148 +0000 UTC m=+10637.180882066" Nov 27 09:50:00 crc kubenswrapper[4971]: I1127 09:50:00.679493 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-lmrkm" Nov 27 09:50:00 crc kubenswrapper[4971]: I1127 09:50:00.681526 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-lmrkm" Nov 27 09:50:01 crc kubenswrapper[4971]: I1127 09:50:01.747861 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-lmrkm" podUID="151f0be0-5abe-4e97-8684-6623e4481c38" containerName="registry-server" probeResult="failure" output=< Nov 27 09:50:01 crc kubenswrapper[4971]: timeout: failed to connect service ":50051" within 1s Nov 27 09:50:01 crc kubenswrapper[4971]: > Nov 27 09:50:10 crc kubenswrapper[4971]: I1127 09:50:10.550890 4971 scope.go:117] "RemoveContainer" containerID="d5d3a8883a93ccaa51c1b9319f9364a05ad09a90156d64a0180b7518c3a242d6" Nov 27 09:50:10 crc kubenswrapper[4971]: E1127 09:50:10.552345 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:50:10 crc kubenswrapper[4971]: I1127 09:50:10.746934 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-lmrkm" Nov 27 09:50:10 crc kubenswrapper[4971]: I1127 09:50:10.794084 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-lmrkm" Nov 27 09:50:11 crc kubenswrapper[4971]: I1127 09:50:11.006902 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lmrkm"] Nov 27 09:50:12 crc kubenswrapper[4971]: I1127 09:50:12.126790 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-lmrkm" podUID="151f0be0-5abe-4e97-8684-6623e4481c38" containerName="registry-server" containerID="cri-o://e4ea80175c3148b08412bd8efdb026b6a0e0f4dd4111ed34019c1c2f3cdf3640" gracePeriod=2 Nov 27 09:50:12 crc kubenswrapper[4971]: I1127 09:50:12.705750 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lmrkm" Nov 27 09:50:12 crc kubenswrapper[4971]: I1127 09:50:12.869057 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2dmmt\" (UniqueName: \"kubernetes.io/projected/151f0be0-5abe-4e97-8684-6623e4481c38-kube-api-access-2dmmt\") pod \"151f0be0-5abe-4e97-8684-6623e4481c38\" (UID: \"151f0be0-5abe-4e97-8684-6623e4481c38\") " Nov 27 09:50:12 crc kubenswrapper[4971]: I1127 09:50:12.869165 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/151f0be0-5abe-4e97-8684-6623e4481c38-catalog-content\") pod \"151f0be0-5abe-4e97-8684-6623e4481c38\" (UID: \"151f0be0-5abe-4e97-8684-6623e4481c38\") " Nov 27 09:50:12 crc kubenswrapper[4971]: I1127 09:50:12.869252 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/151f0be0-5abe-4e97-8684-6623e4481c38-utilities\") pod \"151f0be0-5abe-4e97-8684-6623e4481c38\" (UID: \"151f0be0-5abe-4e97-8684-6623e4481c38\") " Nov 27 09:50:12 crc kubenswrapper[4971]: I1127 09:50:12.870599 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/151f0be0-5abe-4e97-8684-6623e4481c38-utilities" (OuterVolumeSpecName: "utilities") pod "151f0be0-5abe-4e97-8684-6623e4481c38" (UID: "151f0be0-5abe-4e97-8684-6623e4481c38"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:50:12 crc kubenswrapper[4971]: I1127 09:50:12.883823 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/151f0be0-5abe-4e97-8684-6623e4481c38-kube-api-access-2dmmt" (OuterVolumeSpecName: "kube-api-access-2dmmt") pod "151f0be0-5abe-4e97-8684-6623e4481c38" (UID: "151f0be0-5abe-4e97-8684-6623e4481c38"). InnerVolumeSpecName "kube-api-access-2dmmt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:50:12 crc kubenswrapper[4971]: I1127 09:50:12.973889 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2dmmt\" (UniqueName: \"kubernetes.io/projected/151f0be0-5abe-4e97-8684-6623e4481c38-kube-api-access-2dmmt\") on node \"crc\" DevicePath \"\"" Nov 27 09:50:12 crc kubenswrapper[4971]: I1127 09:50:12.973969 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/151f0be0-5abe-4e97-8684-6623e4481c38-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 09:50:13 crc kubenswrapper[4971]: I1127 09:50:13.031575 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/151f0be0-5abe-4e97-8684-6623e4481c38-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "151f0be0-5abe-4e97-8684-6623e4481c38" (UID: "151f0be0-5abe-4e97-8684-6623e4481c38"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:50:13 crc kubenswrapper[4971]: I1127 09:50:13.077030 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/151f0be0-5abe-4e97-8684-6623e4481c38-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 09:50:13 crc kubenswrapper[4971]: I1127 09:50:13.144127 4971 generic.go:334] "Generic (PLEG): container finished" podID="151f0be0-5abe-4e97-8684-6623e4481c38" containerID="e4ea80175c3148b08412bd8efdb026b6a0e0f4dd4111ed34019c1c2f3cdf3640" exitCode=0 Nov 27 09:50:13 crc kubenswrapper[4971]: I1127 09:50:13.144186 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lmrkm" event={"ID":"151f0be0-5abe-4e97-8684-6623e4481c38","Type":"ContainerDied","Data":"e4ea80175c3148b08412bd8efdb026b6a0e0f4dd4111ed34019c1c2f3cdf3640"} Nov 27 09:50:13 crc kubenswrapper[4971]: I1127 09:50:13.144239 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lmrkm" event={"ID":"151f0be0-5abe-4e97-8684-6623e4481c38","Type":"ContainerDied","Data":"6e96eb3f6fda62d7dbbba8a3c6d2c3f14d05ff9a6a7f6824266880eb5ff4b569"} Nov 27 09:50:13 crc kubenswrapper[4971]: I1127 09:50:13.144265 4971 scope.go:117] "RemoveContainer" containerID="e4ea80175c3148b08412bd8efdb026b6a0e0f4dd4111ed34019c1c2f3cdf3640" Nov 27 09:50:13 crc kubenswrapper[4971]: I1127 09:50:13.144343 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lmrkm" Nov 27 09:50:13 crc kubenswrapper[4971]: I1127 09:50:13.173924 4971 scope.go:117] "RemoveContainer" containerID="bb4fa6f6ed831a57798a0c5f46e416b5a97f9dfc7aecc4765ea1f902f9482e42" Nov 27 09:50:13 crc kubenswrapper[4971]: I1127 09:50:13.210214 4971 scope.go:117] "RemoveContainer" containerID="5a03f10c0748cc7eb0bd5ec6e55db86b63a1ffb1f54cfe64bd83f61861163eb3" Nov 27 09:50:13 crc kubenswrapper[4971]: I1127 09:50:13.214057 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lmrkm"] Nov 27 09:50:13 crc kubenswrapper[4971]: I1127 09:50:13.226958 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-lmrkm"] Nov 27 09:50:13 crc kubenswrapper[4971]: I1127 09:50:13.271878 4971 scope.go:117] "RemoveContainer" containerID="e4ea80175c3148b08412bd8efdb026b6a0e0f4dd4111ed34019c1c2f3cdf3640" Nov 27 09:50:13 crc kubenswrapper[4971]: E1127 09:50:13.272404 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4ea80175c3148b08412bd8efdb026b6a0e0f4dd4111ed34019c1c2f3cdf3640\": container with ID starting with e4ea80175c3148b08412bd8efdb026b6a0e0f4dd4111ed34019c1c2f3cdf3640 not found: ID does not exist" containerID="e4ea80175c3148b08412bd8efdb026b6a0e0f4dd4111ed34019c1c2f3cdf3640" Nov 27 09:50:13 crc kubenswrapper[4971]: I1127 09:50:13.272461 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4ea80175c3148b08412bd8efdb026b6a0e0f4dd4111ed34019c1c2f3cdf3640"} err="failed to get container status \"e4ea80175c3148b08412bd8efdb026b6a0e0f4dd4111ed34019c1c2f3cdf3640\": rpc error: code = NotFound desc = could not find container \"e4ea80175c3148b08412bd8efdb026b6a0e0f4dd4111ed34019c1c2f3cdf3640\": container with ID starting with e4ea80175c3148b08412bd8efdb026b6a0e0f4dd4111ed34019c1c2f3cdf3640 not found: ID does not exist" Nov 27 09:50:13 crc kubenswrapper[4971]: I1127 09:50:13.272497 4971 scope.go:117] "RemoveContainer" containerID="bb4fa6f6ed831a57798a0c5f46e416b5a97f9dfc7aecc4765ea1f902f9482e42" Nov 27 09:50:13 crc kubenswrapper[4971]: E1127 09:50:13.272940 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb4fa6f6ed831a57798a0c5f46e416b5a97f9dfc7aecc4765ea1f902f9482e42\": container with ID starting with bb4fa6f6ed831a57798a0c5f46e416b5a97f9dfc7aecc4765ea1f902f9482e42 not found: ID does not exist" containerID="bb4fa6f6ed831a57798a0c5f46e416b5a97f9dfc7aecc4765ea1f902f9482e42" Nov 27 09:50:13 crc kubenswrapper[4971]: I1127 09:50:13.272983 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb4fa6f6ed831a57798a0c5f46e416b5a97f9dfc7aecc4765ea1f902f9482e42"} err="failed to get container status \"bb4fa6f6ed831a57798a0c5f46e416b5a97f9dfc7aecc4765ea1f902f9482e42\": rpc error: code = NotFound desc = could not find container \"bb4fa6f6ed831a57798a0c5f46e416b5a97f9dfc7aecc4765ea1f902f9482e42\": container with ID starting with bb4fa6f6ed831a57798a0c5f46e416b5a97f9dfc7aecc4765ea1f902f9482e42 not found: ID does not exist" Nov 27 09:50:13 crc kubenswrapper[4971]: I1127 09:50:13.273014 4971 scope.go:117] "RemoveContainer" containerID="5a03f10c0748cc7eb0bd5ec6e55db86b63a1ffb1f54cfe64bd83f61861163eb3" Nov 27 09:50:13 crc kubenswrapper[4971]: E1127 09:50:13.273472 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a03f10c0748cc7eb0bd5ec6e55db86b63a1ffb1f54cfe64bd83f61861163eb3\": container with ID starting with 5a03f10c0748cc7eb0bd5ec6e55db86b63a1ffb1f54cfe64bd83f61861163eb3 not found: ID does not exist" containerID="5a03f10c0748cc7eb0bd5ec6e55db86b63a1ffb1f54cfe64bd83f61861163eb3" Nov 27 09:50:13 crc kubenswrapper[4971]: I1127 09:50:13.273546 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a03f10c0748cc7eb0bd5ec6e55db86b63a1ffb1f54cfe64bd83f61861163eb3"} err="failed to get container status \"5a03f10c0748cc7eb0bd5ec6e55db86b63a1ffb1f54cfe64bd83f61861163eb3\": rpc error: code = NotFound desc = could not find container \"5a03f10c0748cc7eb0bd5ec6e55db86b63a1ffb1f54cfe64bd83f61861163eb3\": container with ID starting with 5a03f10c0748cc7eb0bd5ec6e55db86b63a1ffb1f54cfe64bd83f61861163eb3 not found: ID does not exist" Nov 27 09:50:14 crc kubenswrapper[4971]: I1127 09:50:14.001865 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-7dxfj"] Nov 27 09:50:14 crc kubenswrapper[4971]: E1127 09:50:14.002526 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2476780-5471-4c3b-a8e8-7537ea708699" containerName="adoption" Nov 27 09:50:14 crc kubenswrapper[4971]: I1127 09:50:14.002556 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2476780-5471-4c3b-a8e8-7537ea708699" containerName="adoption" Nov 27 09:50:14 crc kubenswrapper[4971]: E1127 09:50:14.002568 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="151f0be0-5abe-4e97-8684-6623e4481c38" containerName="extract-content" Nov 27 09:50:14 crc kubenswrapper[4971]: I1127 09:50:14.002577 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="151f0be0-5abe-4e97-8684-6623e4481c38" containerName="extract-content" Nov 27 09:50:14 crc kubenswrapper[4971]: E1127 09:50:14.002609 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="151f0be0-5abe-4e97-8684-6623e4481c38" containerName="registry-server" Nov 27 09:50:14 crc kubenswrapper[4971]: I1127 09:50:14.002616 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="151f0be0-5abe-4e97-8684-6623e4481c38" containerName="registry-server" Nov 27 09:50:14 crc kubenswrapper[4971]: E1127 09:50:14.002625 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="151f0be0-5abe-4e97-8684-6623e4481c38" containerName="extract-utilities" Nov 27 09:50:14 crc kubenswrapper[4971]: I1127 09:50:14.002631 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="151f0be0-5abe-4e97-8684-6623e4481c38" containerName="extract-utilities" Nov 27 09:50:14 crc kubenswrapper[4971]: I1127 09:50:14.002853 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2476780-5471-4c3b-a8e8-7537ea708699" containerName="adoption" Nov 27 09:50:14 crc kubenswrapper[4971]: I1127 09:50:14.002871 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="151f0be0-5abe-4e97-8684-6623e4481c38" containerName="registry-server" Nov 27 09:50:14 crc kubenswrapper[4971]: I1127 09:50:14.004385 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7dxfj" Nov 27 09:50:14 crc kubenswrapper[4971]: I1127 09:50:14.025360 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7dxfj"] Nov 27 09:50:14 crc kubenswrapper[4971]: I1127 09:50:14.100678 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e95a3b1-7d30-4b9c-9a21-615b8359fe17-utilities\") pod \"certified-operators-7dxfj\" (UID: \"0e95a3b1-7d30-4b9c-9a21-615b8359fe17\") " pod="openshift-marketplace/certified-operators-7dxfj" Nov 27 09:50:14 crc kubenswrapper[4971]: I1127 09:50:14.100816 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7r4mx\" (UniqueName: \"kubernetes.io/projected/0e95a3b1-7d30-4b9c-9a21-615b8359fe17-kube-api-access-7r4mx\") pod \"certified-operators-7dxfj\" (UID: \"0e95a3b1-7d30-4b9c-9a21-615b8359fe17\") " pod="openshift-marketplace/certified-operators-7dxfj" Nov 27 09:50:14 crc kubenswrapper[4971]: I1127 09:50:14.100846 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e95a3b1-7d30-4b9c-9a21-615b8359fe17-catalog-content\") pod \"certified-operators-7dxfj\" (UID: \"0e95a3b1-7d30-4b9c-9a21-615b8359fe17\") " pod="openshift-marketplace/certified-operators-7dxfj" Nov 27 09:50:14 crc kubenswrapper[4971]: I1127 09:50:14.203375 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7r4mx\" (UniqueName: \"kubernetes.io/projected/0e95a3b1-7d30-4b9c-9a21-615b8359fe17-kube-api-access-7r4mx\") pod \"certified-operators-7dxfj\" (UID: \"0e95a3b1-7d30-4b9c-9a21-615b8359fe17\") " pod="openshift-marketplace/certified-operators-7dxfj" Nov 27 09:50:14 crc kubenswrapper[4971]: I1127 09:50:14.203443 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e95a3b1-7d30-4b9c-9a21-615b8359fe17-catalog-content\") pod \"certified-operators-7dxfj\" (UID: \"0e95a3b1-7d30-4b9c-9a21-615b8359fe17\") " pod="openshift-marketplace/certified-operators-7dxfj" Nov 27 09:50:14 crc kubenswrapper[4971]: I1127 09:50:14.203709 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e95a3b1-7d30-4b9c-9a21-615b8359fe17-utilities\") pod \"certified-operators-7dxfj\" (UID: \"0e95a3b1-7d30-4b9c-9a21-615b8359fe17\") " pod="openshift-marketplace/certified-operators-7dxfj" Nov 27 09:50:14 crc kubenswrapper[4971]: I1127 09:50:14.204241 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e95a3b1-7d30-4b9c-9a21-615b8359fe17-utilities\") pod \"certified-operators-7dxfj\" (UID: \"0e95a3b1-7d30-4b9c-9a21-615b8359fe17\") " pod="openshift-marketplace/certified-operators-7dxfj" Nov 27 09:50:14 crc kubenswrapper[4971]: I1127 09:50:14.205000 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e95a3b1-7d30-4b9c-9a21-615b8359fe17-catalog-content\") pod \"certified-operators-7dxfj\" (UID: \"0e95a3b1-7d30-4b9c-9a21-615b8359fe17\") " pod="openshift-marketplace/certified-operators-7dxfj" Nov 27 09:50:14 crc kubenswrapper[4971]: I1127 09:50:14.224834 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7r4mx\" (UniqueName: \"kubernetes.io/projected/0e95a3b1-7d30-4b9c-9a21-615b8359fe17-kube-api-access-7r4mx\") pod \"certified-operators-7dxfj\" (UID: \"0e95a3b1-7d30-4b9c-9a21-615b8359fe17\") " pod="openshift-marketplace/certified-operators-7dxfj" Nov 27 09:50:14 crc kubenswrapper[4971]: I1127 09:50:14.321185 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7dxfj" Nov 27 09:50:14 crc kubenswrapper[4971]: I1127 09:50:14.567129 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="151f0be0-5abe-4e97-8684-6623e4481c38" path="/var/lib/kubelet/pods/151f0be0-5abe-4e97-8684-6623e4481c38/volumes" Nov 27 09:50:14 crc kubenswrapper[4971]: I1127 09:50:14.848681 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7dxfj"] Nov 27 09:50:15 crc kubenswrapper[4971]: I1127 09:50:15.170782 4971 generic.go:334] "Generic (PLEG): container finished" podID="0e95a3b1-7d30-4b9c-9a21-615b8359fe17" containerID="6283765f4d0edcd013c5228414e7e328a18238cf91e67360a12be59f92d96e02" exitCode=0 Nov 27 09:50:15 crc kubenswrapper[4971]: I1127 09:50:15.171006 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7dxfj" event={"ID":"0e95a3b1-7d30-4b9c-9a21-615b8359fe17","Type":"ContainerDied","Data":"6283765f4d0edcd013c5228414e7e328a18238cf91e67360a12be59f92d96e02"} Nov 27 09:50:15 crc kubenswrapper[4971]: I1127 09:50:15.171178 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7dxfj" event={"ID":"0e95a3b1-7d30-4b9c-9a21-615b8359fe17","Type":"ContainerStarted","Data":"ac274f7af6555e8f5da37e017f12b00dc34420837c6cc92ea63e0b96444ccc0c"} Nov 27 09:50:17 crc kubenswrapper[4971]: I1127 09:50:17.198415 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7dxfj" event={"ID":"0e95a3b1-7d30-4b9c-9a21-615b8359fe17","Type":"ContainerStarted","Data":"87b7bc9f75c624216c8af2992ee21fbeff82667e8b5adeaf01ee52d926ac4792"} Nov 27 09:50:18 crc kubenswrapper[4971]: I1127 09:50:18.212976 4971 generic.go:334] "Generic (PLEG): container finished" podID="0e95a3b1-7d30-4b9c-9a21-615b8359fe17" containerID="87b7bc9f75c624216c8af2992ee21fbeff82667e8b5adeaf01ee52d926ac4792" exitCode=0 Nov 27 09:50:18 crc kubenswrapper[4971]: I1127 09:50:18.213042 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7dxfj" event={"ID":"0e95a3b1-7d30-4b9c-9a21-615b8359fe17","Type":"ContainerDied","Data":"87b7bc9f75c624216c8af2992ee21fbeff82667e8b5adeaf01ee52d926ac4792"} Nov 27 09:50:20 crc kubenswrapper[4971]: I1127 09:50:20.235309 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7dxfj" event={"ID":"0e95a3b1-7d30-4b9c-9a21-615b8359fe17","Type":"ContainerStarted","Data":"dacc32fc448547c0735db738e001cabdb88c850d5490a1d0ec6b2c9f467841d0"} Nov 27 09:50:20 crc kubenswrapper[4971]: I1127 09:50:20.273547 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-7dxfj" podStartSLOduration=3.322928551 podStartE2EDuration="7.273506224s" podCreationTimestamp="2025-11-27 09:50:13 +0000 UTC" firstStartedPulling="2025-11-27 09:50:15.172343424 +0000 UTC m=+10653.364387352" lastFinishedPulling="2025-11-27 09:50:19.122921107 +0000 UTC m=+10657.314965025" observedRunningTime="2025-11-27 09:50:20.261515962 +0000 UTC m=+10658.453559920" watchObservedRunningTime="2025-11-27 09:50:20.273506224 +0000 UTC m=+10658.465550182" Nov 27 09:50:24 crc kubenswrapper[4971]: I1127 09:50:24.322629 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-7dxfj" Nov 27 09:50:24 crc kubenswrapper[4971]: I1127 09:50:24.323073 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-7dxfj" Nov 27 09:50:24 crc kubenswrapper[4971]: I1127 09:50:24.400612 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-7dxfj" Nov 27 09:50:25 crc kubenswrapper[4971]: I1127 09:50:25.374995 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-7dxfj" Nov 27 09:50:25 crc kubenswrapper[4971]: I1127 09:50:25.516723 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7dxfj"] Nov 27 09:50:25 crc kubenswrapper[4971]: I1127 09:50:25.551679 4971 scope.go:117] "RemoveContainer" containerID="d5d3a8883a93ccaa51c1b9319f9364a05ad09a90156d64a0180b7518c3a242d6" Nov 27 09:50:25 crc kubenswrapper[4971]: E1127 09:50:25.552139 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:50:27 crc kubenswrapper[4971]: I1127 09:50:27.322309 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-7dxfj" podUID="0e95a3b1-7d30-4b9c-9a21-615b8359fe17" containerName="registry-server" containerID="cri-o://dacc32fc448547c0735db738e001cabdb88c850d5490a1d0ec6b2c9f467841d0" gracePeriod=2 Nov 27 09:50:28 crc kubenswrapper[4971]: I1127 09:50:28.339493 4971 generic.go:334] "Generic (PLEG): container finished" podID="0e95a3b1-7d30-4b9c-9a21-615b8359fe17" containerID="dacc32fc448547c0735db738e001cabdb88c850d5490a1d0ec6b2c9f467841d0" exitCode=0 Nov 27 09:50:28 crc kubenswrapper[4971]: I1127 09:50:28.339621 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7dxfj" event={"ID":"0e95a3b1-7d30-4b9c-9a21-615b8359fe17","Type":"ContainerDied","Data":"dacc32fc448547c0735db738e001cabdb88c850d5490a1d0ec6b2c9f467841d0"} Nov 27 09:50:28 crc kubenswrapper[4971]: I1127 09:50:28.340006 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7dxfj" event={"ID":"0e95a3b1-7d30-4b9c-9a21-615b8359fe17","Type":"ContainerDied","Data":"ac274f7af6555e8f5da37e017f12b00dc34420837c6cc92ea63e0b96444ccc0c"} Nov 27 09:50:28 crc kubenswrapper[4971]: I1127 09:50:28.340038 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ac274f7af6555e8f5da37e017f12b00dc34420837c6cc92ea63e0b96444ccc0c" Nov 27 09:50:28 crc kubenswrapper[4971]: I1127 09:50:28.357787 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7dxfj" Nov 27 09:50:28 crc kubenswrapper[4971]: I1127 09:50:28.456013 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7r4mx\" (UniqueName: \"kubernetes.io/projected/0e95a3b1-7d30-4b9c-9a21-615b8359fe17-kube-api-access-7r4mx\") pod \"0e95a3b1-7d30-4b9c-9a21-615b8359fe17\" (UID: \"0e95a3b1-7d30-4b9c-9a21-615b8359fe17\") " Nov 27 09:50:28 crc kubenswrapper[4971]: I1127 09:50:28.456138 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e95a3b1-7d30-4b9c-9a21-615b8359fe17-utilities\") pod \"0e95a3b1-7d30-4b9c-9a21-615b8359fe17\" (UID: \"0e95a3b1-7d30-4b9c-9a21-615b8359fe17\") " Nov 27 09:50:28 crc kubenswrapper[4971]: I1127 09:50:28.456259 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e95a3b1-7d30-4b9c-9a21-615b8359fe17-catalog-content\") pod \"0e95a3b1-7d30-4b9c-9a21-615b8359fe17\" (UID: \"0e95a3b1-7d30-4b9c-9a21-615b8359fe17\") " Nov 27 09:50:28 crc kubenswrapper[4971]: I1127 09:50:28.457393 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e95a3b1-7d30-4b9c-9a21-615b8359fe17-utilities" (OuterVolumeSpecName: "utilities") pod "0e95a3b1-7d30-4b9c-9a21-615b8359fe17" (UID: "0e95a3b1-7d30-4b9c-9a21-615b8359fe17"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:50:28 crc kubenswrapper[4971]: I1127 09:50:28.465695 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e95a3b1-7d30-4b9c-9a21-615b8359fe17-kube-api-access-7r4mx" (OuterVolumeSpecName: "kube-api-access-7r4mx") pod "0e95a3b1-7d30-4b9c-9a21-615b8359fe17" (UID: "0e95a3b1-7d30-4b9c-9a21-615b8359fe17"). InnerVolumeSpecName "kube-api-access-7r4mx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:50:28 crc kubenswrapper[4971]: I1127 09:50:28.534505 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e95a3b1-7d30-4b9c-9a21-615b8359fe17-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0e95a3b1-7d30-4b9c-9a21-615b8359fe17" (UID: "0e95a3b1-7d30-4b9c-9a21-615b8359fe17"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:50:28 crc kubenswrapper[4971]: I1127 09:50:28.558663 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7r4mx\" (UniqueName: \"kubernetes.io/projected/0e95a3b1-7d30-4b9c-9a21-615b8359fe17-kube-api-access-7r4mx\") on node \"crc\" DevicePath \"\"" Nov 27 09:50:28 crc kubenswrapper[4971]: I1127 09:50:28.558708 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e95a3b1-7d30-4b9c-9a21-615b8359fe17-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 09:50:28 crc kubenswrapper[4971]: I1127 09:50:28.558722 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e95a3b1-7d30-4b9c-9a21-615b8359fe17-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 09:50:29 crc kubenswrapper[4971]: I1127 09:50:29.351255 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7dxfj" Nov 27 09:50:29 crc kubenswrapper[4971]: I1127 09:50:29.394965 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7dxfj"] Nov 27 09:50:29 crc kubenswrapper[4971]: I1127 09:50:29.413450 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-7dxfj"] Nov 27 09:50:30 crc kubenswrapper[4971]: I1127 09:50:30.561724 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e95a3b1-7d30-4b9c-9a21-615b8359fe17" path="/var/lib/kubelet/pods/0e95a3b1-7d30-4b9c-9a21-615b8359fe17/volumes" Nov 27 09:50:37 crc kubenswrapper[4971]: I1127 09:50:37.550597 4971 scope.go:117] "RemoveContainer" containerID="d5d3a8883a93ccaa51c1b9319f9364a05ad09a90156d64a0180b7518c3a242d6" Nov 27 09:50:37 crc kubenswrapper[4971]: E1127 09:50:37.551471 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:50:50 crc kubenswrapper[4971]: I1127 09:50:50.550524 4971 scope.go:117] "RemoveContainer" containerID="d5d3a8883a93ccaa51c1b9319f9364a05ad09a90156d64a0180b7518c3a242d6" Nov 27 09:50:50 crc kubenswrapper[4971]: E1127 09:50:50.552876 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:51:04 crc kubenswrapper[4971]: I1127 09:51:04.551282 4971 scope.go:117] "RemoveContainer" containerID="d5d3a8883a93ccaa51c1b9319f9364a05ad09a90156d64a0180b7518c3a242d6" Nov 27 09:51:04 crc kubenswrapper[4971]: E1127 09:51:04.552755 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:51:14 crc kubenswrapper[4971]: I1127 09:51:14.697861 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-cbzd2"] Nov 27 09:51:14 crc kubenswrapper[4971]: E1127 09:51:14.699049 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e95a3b1-7d30-4b9c-9a21-615b8359fe17" containerName="extract-utilities" Nov 27 09:51:14 crc kubenswrapper[4971]: I1127 09:51:14.699069 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e95a3b1-7d30-4b9c-9a21-615b8359fe17" containerName="extract-utilities" Nov 27 09:51:14 crc kubenswrapper[4971]: E1127 09:51:14.699095 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e95a3b1-7d30-4b9c-9a21-615b8359fe17" containerName="registry-server" Nov 27 09:51:14 crc kubenswrapper[4971]: I1127 09:51:14.699102 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e95a3b1-7d30-4b9c-9a21-615b8359fe17" containerName="registry-server" Nov 27 09:51:14 crc kubenswrapper[4971]: E1127 09:51:14.699152 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e95a3b1-7d30-4b9c-9a21-615b8359fe17" containerName="extract-content" Nov 27 09:51:14 crc kubenswrapper[4971]: I1127 09:51:14.699160 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e95a3b1-7d30-4b9c-9a21-615b8359fe17" containerName="extract-content" Nov 27 09:51:14 crc kubenswrapper[4971]: I1127 09:51:14.699363 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e95a3b1-7d30-4b9c-9a21-615b8359fe17" containerName="registry-server" Nov 27 09:51:14 crc kubenswrapper[4971]: I1127 09:51:14.701076 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cbzd2" Nov 27 09:51:14 crc kubenswrapper[4971]: I1127 09:51:14.720479 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-cbzd2"] Nov 27 09:51:14 crc kubenswrapper[4971]: I1127 09:51:14.813125 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39254c20-5238-4edc-8523-4bf3b2fd85a2-catalog-content\") pod \"redhat-marketplace-cbzd2\" (UID: \"39254c20-5238-4edc-8523-4bf3b2fd85a2\") " pod="openshift-marketplace/redhat-marketplace-cbzd2" Nov 27 09:51:14 crc kubenswrapper[4971]: I1127 09:51:14.813220 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39254c20-5238-4edc-8523-4bf3b2fd85a2-utilities\") pod \"redhat-marketplace-cbzd2\" (UID: \"39254c20-5238-4edc-8523-4bf3b2fd85a2\") " pod="openshift-marketplace/redhat-marketplace-cbzd2" Nov 27 09:51:14 crc kubenswrapper[4971]: I1127 09:51:14.813262 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w6bsr\" (UniqueName: \"kubernetes.io/projected/39254c20-5238-4edc-8523-4bf3b2fd85a2-kube-api-access-w6bsr\") pod \"redhat-marketplace-cbzd2\" (UID: \"39254c20-5238-4edc-8523-4bf3b2fd85a2\") " pod="openshift-marketplace/redhat-marketplace-cbzd2" Nov 27 09:51:14 crc kubenswrapper[4971]: I1127 09:51:14.915592 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39254c20-5238-4edc-8523-4bf3b2fd85a2-utilities\") pod \"redhat-marketplace-cbzd2\" (UID: \"39254c20-5238-4edc-8523-4bf3b2fd85a2\") " pod="openshift-marketplace/redhat-marketplace-cbzd2" Nov 27 09:51:14 crc kubenswrapper[4971]: I1127 09:51:14.915889 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w6bsr\" (UniqueName: \"kubernetes.io/projected/39254c20-5238-4edc-8523-4bf3b2fd85a2-kube-api-access-w6bsr\") pod \"redhat-marketplace-cbzd2\" (UID: \"39254c20-5238-4edc-8523-4bf3b2fd85a2\") " pod="openshift-marketplace/redhat-marketplace-cbzd2" Nov 27 09:51:14 crc kubenswrapper[4971]: I1127 09:51:14.916020 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39254c20-5238-4edc-8523-4bf3b2fd85a2-catalog-content\") pod \"redhat-marketplace-cbzd2\" (UID: \"39254c20-5238-4edc-8523-4bf3b2fd85a2\") " pod="openshift-marketplace/redhat-marketplace-cbzd2" Nov 27 09:51:14 crc kubenswrapper[4971]: I1127 09:51:14.916162 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39254c20-5238-4edc-8523-4bf3b2fd85a2-utilities\") pod \"redhat-marketplace-cbzd2\" (UID: \"39254c20-5238-4edc-8523-4bf3b2fd85a2\") " pod="openshift-marketplace/redhat-marketplace-cbzd2" Nov 27 09:51:14 crc kubenswrapper[4971]: I1127 09:51:14.916470 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39254c20-5238-4edc-8523-4bf3b2fd85a2-catalog-content\") pod \"redhat-marketplace-cbzd2\" (UID: \"39254c20-5238-4edc-8523-4bf3b2fd85a2\") " pod="openshift-marketplace/redhat-marketplace-cbzd2" Nov 27 09:51:14 crc kubenswrapper[4971]: I1127 09:51:14.962148 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w6bsr\" (UniqueName: \"kubernetes.io/projected/39254c20-5238-4edc-8523-4bf3b2fd85a2-kube-api-access-w6bsr\") pod \"redhat-marketplace-cbzd2\" (UID: \"39254c20-5238-4edc-8523-4bf3b2fd85a2\") " pod="openshift-marketplace/redhat-marketplace-cbzd2" Nov 27 09:51:15 crc kubenswrapper[4971]: I1127 09:51:15.045064 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cbzd2" Nov 27 09:51:15 crc kubenswrapper[4971]: I1127 09:51:15.603487 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-cbzd2"] Nov 27 09:51:15 crc kubenswrapper[4971]: I1127 09:51:15.984021 4971 generic.go:334] "Generic (PLEG): container finished" podID="39254c20-5238-4edc-8523-4bf3b2fd85a2" containerID="b604866ef6ba294e6897d5340ae0f6e011ee7b9791a0e669c303e1f17bfdf089" exitCode=0 Nov 27 09:51:15 crc kubenswrapper[4971]: I1127 09:51:15.984295 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cbzd2" event={"ID":"39254c20-5238-4edc-8523-4bf3b2fd85a2","Type":"ContainerDied","Data":"b604866ef6ba294e6897d5340ae0f6e011ee7b9791a0e669c303e1f17bfdf089"} Nov 27 09:51:15 crc kubenswrapper[4971]: I1127 09:51:15.984328 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cbzd2" event={"ID":"39254c20-5238-4edc-8523-4bf3b2fd85a2","Type":"ContainerStarted","Data":"a5ce9757a9f1a71981ed7067358fad5d271c9a9d22417209f4d384ac98b15cb4"} Nov 27 09:51:16 crc kubenswrapper[4971]: I1127 09:51:16.998700 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cbzd2" event={"ID":"39254c20-5238-4edc-8523-4bf3b2fd85a2","Type":"ContainerStarted","Data":"df406eb70842f396cb897889f834de2b15eaab6f7e430f9885bdb41f57fbcee3"} Nov 27 09:51:18 crc kubenswrapper[4971]: I1127 09:51:18.017706 4971 generic.go:334] "Generic (PLEG): container finished" podID="39254c20-5238-4edc-8523-4bf3b2fd85a2" containerID="df406eb70842f396cb897889f834de2b15eaab6f7e430f9885bdb41f57fbcee3" exitCode=0 Nov 27 09:51:18 crc kubenswrapper[4971]: I1127 09:51:18.017987 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cbzd2" event={"ID":"39254c20-5238-4edc-8523-4bf3b2fd85a2","Type":"ContainerDied","Data":"df406eb70842f396cb897889f834de2b15eaab6f7e430f9885bdb41f57fbcee3"} Nov 27 09:51:18 crc kubenswrapper[4971]: I1127 09:51:18.556403 4971 scope.go:117] "RemoveContainer" containerID="d5d3a8883a93ccaa51c1b9319f9364a05ad09a90156d64a0180b7518c3a242d6" Nov 27 09:51:18 crc kubenswrapper[4971]: E1127 09:51:18.557122 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:51:19 crc kubenswrapper[4971]: I1127 09:51:19.036933 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cbzd2" event={"ID":"39254c20-5238-4edc-8523-4bf3b2fd85a2","Type":"ContainerStarted","Data":"d58d6834a5231328dc56a794c0480e02390ebca3ac146bb0636088a98b9b9cc6"} Nov 27 09:51:19 crc kubenswrapper[4971]: I1127 09:51:19.061868 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-cbzd2" podStartSLOduration=2.577365615 podStartE2EDuration="5.061846527s" podCreationTimestamp="2025-11-27 09:51:14 +0000 UTC" firstStartedPulling="2025-11-27 09:51:15.986345422 +0000 UTC m=+10714.178389340" lastFinishedPulling="2025-11-27 09:51:18.470826304 +0000 UTC m=+10716.662870252" observedRunningTime="2025-11-27 09:51:19.059905991 +0000 UTC m=+10717.251949919" watchObservedRunningTime="2025-11-27 09:51:19.061846527 +0000 UTC m=+10717.253890445" Nov 27 09:51:25 crc kubenswrapper[4971]: I1127 09:51:25.046264 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-cbzd2" Nov 27 09:51:25 crc kubenswrapper[4971]: I1127 09:51:25.048735 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-cbzd2" Nov 27 09:51:25 crc kubenswrapper[4971]: I1127 09:51:25.119973 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-cbzd2" Nov 27 09:51:25 crc kubenswrapper[4971]: I1127 09:51:25.199908 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-cbzd2" Nov 27 09:51:25 crc kubenswrapper[4971]: I1127 09:51:25.368091 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-cbzd2"] Nov 27 09:51:27 crc kubenswrapper[4971]: I1127 09:51:27.128451 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-cbzd2" podUID="39254c20-5238-4edc-8523-4bf3b2fd85a2" containerName="registry-server" containerID="cri-o://d58d6834a5231328dc56a794c0480e02390ebca3ac146bb0636088a98b9b9cc6" gracePeriod=2 Nov 27 09:51:27 crc kubenswrapper[4971]: I1127 09:51:27.683497 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cbzd2" Nov 27 09:51:27 crc kubenswrapper[4971]: I1127 09:51:27.816057 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w6bsr\" (UniqueName: \"kubernetes.io/projected/39254c20-5238-4edc-8523-4bf3b2fd85a2-kube-api-access-w6bsr\") pod \"39254c20-5238-4edc-8523-4bf3b2fd85a2\" (UID: \"39254c20-5238-4edc-8523-4bf3b2fd85a2\") " Nov 27 09:51:27 crc kubenswrapper[4971]: I1127 09:51:27.816224 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39254c20-5238-4edc-8523-4bf3b2fd85a2-utilities\") pod \"39254c20-5238-4edc-8523-4bf3b2fd85a2\" (UID: \"39254c20-5238-4edc-8523-4bf3b2fd85a2\") " Nov 27 09:51:27 crc kubenswrapper[4971]: I1127 09:51:27.816305 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39254c20-5238-4edc-8523-4bf3b2fd85a2-catalog-content\") pod \"39254c20-5238-4edc-8523-4bf3b2fd85a2\" (UID: \"39254c20-5238-4edc-8523-4bf3b2fd85a2\") " Nov 27 09:51:27 crc kubenswrapper[4971]: I1127 09:51:27.817247 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39254c20-5238-4edc-8523-4bf3b2fd85a2-utilities" (OuterVolumeSpecName: "utilities") pod "39254c20-5238-4edc-8523-4bf3b2fd85a2" (UID: "39254c20-5238-4edc-8523-4bf3b2fd85a2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:51:27 crc kubenswrapper[4971]: I1127 09:51:27.827869 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39254c20-5238-4edc-8523-4bf3b2fd85a2-kube-api-access-w6bsr" (OuterVolumeSpecName: "kube-api-access-w6bsr") pod "39254c20-5238-4edc-8523-4bf3b2fd85a2" (UID: "39254c20-5238-4edc-8523-4bf3b2fd85a2"). InnerVolumeSpecName "kube-api-access-w6bsr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:51:27 crc kubenswrapper[4971]: I1127 09:51:27.837231 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39254c20-5238-4edc-8523-4bf3b2fd85a2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "39254c20-5238-4edc-8523-4bf3b2fd85a2" (UID: "39254c20-5238-4edc-8523-4bf3b2fd85a2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:51:27 crc kubenswrapper[4971]: I1127 09:51:27.919253 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39254c20-5238-4edc-8523-4bf3b2fd85a2-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 09:51:27 crc kubenswrapper[4971]: I1127 09:51:27.919521 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39254c20-5238-4edc-8523-4bf3b2fd85a2-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 09:51:27 crc kubenswrapper[4971]: I1127 09:51:27.919630 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w6bsr\" (UniqueName: \"kubernetes.io/projected/39254c20-5238-4edc-8523-4bf3b2fd85a2-kube-api-access-w6bsr\") on node \"crc\" DevicePath \"\"" Nov 27 09:51:28 crc kubenswrapper[4971]: I1127 09:51:28.141665 4971 generic.go:334] "Generic (PLEG): container finished" podID="39254c20-5238-4edc-8523-4bf3b2fd85a2" containerID="d58d6834a5231328dc56a794c0480e02390ebca3ac146bb0636088a98b9b9cc6" exitCode=0 Nov 27 09:51:28 crc kubenswrapper[4971]: I1127 09:51:28.141727 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cbzd2" event={"ID":"39254c20-5238-4edc-8523-4bf3b2fd85a2","Type":"ContainerDied","Data":"d58d6834a5231328dc56a794c0480e02390ebca3ac146bb0636088a98b9b9cc6"} Nov 27 09:51:28 crc kubenswrapper[4971]: I1127 09:51:28.141773 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cbzd2" event={"ID":"39254c20-5238-4edc-8523-4bf3b2fd85a2","Type":"ContainerDied","Data":"a5ce9757a9f1a71981ed7067358fad5d271c9a9d22417209f4d384ac98b15cb4"} Nov 27 09:51:28 crc kubenswrapper[4971]: I1127 09:51:28.141798 4971 scope.go:117] "RemoveContainer" containerID="d58d6834a5231328dc56a794c0480e02390ebca3ac146bb0636088a98b9b9cc6" Nov 27 09:51:28 crc kubenswrapper[4971]: I1127 09:51:28.142655 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cbzd2" Nov 27 09:51:28 crc kubenswrapper[4971]: I1127 09:51:28.171899 4971 scope.go:117] "RemoveContainer" containerID="df406eb70842f396cb897889f834de2b15eaab6f7e430f9885bdb41f57fbcee3" Nov 27 09:51:28 crc kubenswrapper[4971]: I1127 09:51:28.184332 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-cbzd2"] Nov 27 09:51:28 crc kubenswrapper[4971]: I1127 09:51:28.195505 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-cbzd2"] Nov 27 09:51:28 crc kubenswrapper[4971]: I1127 09:51:28.197422 4971 scope.go:117] "RemoveContainer" containerID="b604866ef6ba294e6897d5340ae0f6e011ee7b9791a0e669c303e1f17bfdf089" Nov 27 09:51:28 crc kubenswrapper[4971]: I1127 09:51:28.267708 4971 scope.go:117] "RemoveContainer" containerID="d58d6834a5231328dc56a794c0480e02390ebca3ac146bb0636088a98b9b9cc6" Nov 27 09:51:28 crc kubenswrapper[4971]: E1127 09:51:28.268812 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d58d6834a5231328dc56a794c0480e02390ebca3ac146bb0636088a98b9b9cc6\": container with ID starting with d58d6834a5231328dc56a794c0480e02390ebca3ac146bb0636088a98b9b9cc6 not found: ID does not exist" containerID="d58d6834a5231328dc56a794c0480e02390ebca3ac146bb0636088a98b9b9cc6" Nov 27 09:51:28 crc kubenswrapper[4971]: I1127 09:51:28.268998 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d58d6834a5231328dc56a794c0480e02390ebca3ac146bb0636088a98b9b9cc6"} err="failed to get container status \"d58d6834a5231328dc56a794c0480e02390ebca3ac146bb0636088a98b9b9cc6\": rpc error: code = NotFound desc = could not find container \"d58d6834a5231328dc56a794c0480e02390ebca3ac146bb0636088a98b9b9cc6\": container with ID starting with d58d6834a5231328dc56a794c0480e02390ebca3ac146bb0636088a98b9b9cc6 not found: ID does not exist" Nov 27 09:51:28 crc kubenswrapper[4971]: I1127 09:51:28.269050 4971 scope.go:117] "RemoveContainer" containerID="df406eb70842f396cb897889f834de2b15eaab6f7e430f9885bdb41f57fbcee3" Nov 27 09:51:28 crc kubenswrapper[4971]: E1127 09:51:28.269396 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df406eb70842f396cb897889f834de2b15eaab6f7e430f9885bdb41f57fbcee3\": container with ID starting with df406eb70842f396cb897889f834de2b15eaab6f7e430f9885bdb41f57fbcee3 not found: ID does not exist" containerID="df406eb70842f396cb897889f834de2b15eaab6f7e430f9885bdb41f57fbcee3" Nov 27 09:51:28 crc kubenswrapper[4971]: I1127 09:51:28.269440 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df406eb70842f396cb897889f834de2b15eaab6f7e430f9885bdb41f57fbcee3"} err="failed to get container status \"df406eb70842f396cb897889f834de2b15eaab6f7e430f9885bdb41f57fbcee3\": rpc error: code = NotFound desc = could not find container \"df406eb70842f396cb897889f834de2b15eaab6f7e430f9885bdb41f57fbcee3\": container with ID starting with df406eb70842f396cb897889f834de2b15eaab6f7e430f9885bdb41f57fbcee3 not found: ID does not exist" Nov 27 09:51:28 crc kubenswrapper[4971]: I1127 09:51:28.269469 4971 scope.go:117] "RemoveContainer" containerID="b604866ef6ba294e6897d5340ae0f6e011ee7b9791a0e669c303e1f17bfdf089" Nov 27 09:51:28 crc kubenswrapper[4971]: E1127 09:51:28.269825 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b604866ef6ba294e6897d5340ae0f6e011ee7b9791a0e669c303e1f17bfdf089\": container with ID starting with b604866ef6ba294e6897d5340ae0f6e011ee7b9791a0e669c303e1f17bfdf089 not found: ID does not exist" containerID="b604866ef6ba294e6897d5340ae0f6e011ee7b9791a0e669c303e1f17bfdf089" Nov 27 09:51:28 crc kubenswrapper[4971]: I1127 09:51:28.269885 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b604866ef6ba294e6897d5340ae0f6e011ee7b9791a0e669c303e1f17bfdf089"} err="failed to get container status \"b604866ef6ba294e6897d5340ae0f6e011ee7b9791a0e669c303e1f17bfdf089\": rpc error: code = NotFound desc = could not find container \"b604866ef6ba294e6897d5340ae0f6e011ee7b9791a0e669c303e1f17bfdf089\": container with ID starting with b604866ef6ba294e6897d5340ae0f6e011ee7b9791a0e669c303e1f17bfdf089 not found: ID does not exist" Nov 27 09:51:28 crc kubenswrapper[4971]: I1127 09:51:28.570166 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39254c20-5238-4edc-8523-4bf3b2fd85a2" path="/var/lib/kubelet/pods/39254c20-5238-4edc-8523-4bf3b2fd85a2/volumes" Nov 27 09:51:30 crc kubenswrapper[4971]: I1127 09:51:30.550634 4971 scope.go:117] "RemoveContainer" containerID="d5d3a8883a93ccaa51c1b9319f9364a05ad09a90156d64a0180b7518c3a242d6" Nov 27 09:51:30 crc kubenswrapper[4971]: E1127 09:51:30.551280 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:51:43 crc kubenswrapper[4971]: I1127 09:51:43.551477 4971 scope.go:117] "RemoveContainer" containerID="d5d3a8883a93ccaa51c1b9319f9364a05ad09a90156d64a0180b7518c3a242d6" Nov 27 09:51:43 crc kubenswrapper[4971]: E1127 09:51:43.552658 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:51:56 crc kubenswrapper[4971]: I1127 09:51:56.556046 4971 scope.go:117] "RemoveContainer" containerID="d5d3a8883a93ccaa51c1b9319f9364a05ad09a90156d64a0180b7518c3a242d6" Nov 27 09:51:56 crc kubenswrapper[4971]: E1127 09:51:56.556853 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:52:09 crc kubenswrapper[4971]: I1127 09:52:09.552469 4971 scope.go:117] "RemoveContainer" containerID="d5d3a8883a93ccaa51c1b9319f9364a05ad09a90156d64a0180b7518c3a242d6" Nov 27 09:52:09 crc kubenswrapper[4971]: E1127 09:52:09.553464 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:52:21 crc kubenswrapper[4971]: I1127 09:52:21.550520 4971 scope.go:117] "RemoveContainer" containerID="d5d3a8883a93ccaa51c1b9319f9364a05ad09a90156d64a0180b7518c3a242d6" Nov 27 09:52:21 crc kubenswrapper[4971]: E1127 09:52:21.551661 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:52:33 crc kubenswrapper[4971]: I1127 09:52:33.551738 4971 scope.go:117] "RemoveContainer" containerID="d5d3a8883a93ccaa51c1b9319f9364a05ad09a90156d64a0180b7518c3a242d6" Nov 27 09:52:33 crc kubenswrapper[4971]: E1127 09:52:33.552854 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:52:45 crc kubenswrapper[4971]: I1127 09:52:45.551081 4971 scope.go:117] "RemoveContainer" containerID="d5d3a8883a93ccaa51c1b9319f9364a05ad09a90156d64a0180b7518c3a242d6" Nov 27 09:52:45 crc kubenswrapper[4971]: E1127 09:52:45.573749 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:52:57 crc kubenswrapper[4971]: I1127 09:52:57.551076 4971 scope.go:117] "RemoveContainer" containerID="d5d3a8883a93ccaa51c1b9319f9364a05ad09a90156d64a0180b7518c3a242d6" Nov 27 09:52:58 crc kubenswrapper[4971]: I1127 09:52:58.473425 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"abfb28f790ede5cd72fbb5dc60b5a5e8a898d243568dbe3ace67e693e4e0d682"} Nov 27 09:55:26 crc kubenswrapper[4971]: I1127 09:55:26.413077 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 09:55:26 crc kubenswrapper[4971]: I1127 09:55:26.414065 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 09:55:56 crc kubenswrapper[4971]: I1127 09:55:56.413250 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 09:55:56 crc kubenswrapper[4971]: I1127 09:55:56.413885 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 09:56:26 crc kubenswrapper[4971]: I1127 09:56:26.413420 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 09:56:26 crc kubenswrapper[4971]: I1127 09:56:26.413943 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 09:56:26 crc kubenswrapper[4971]: I1127 09:56:26.413984 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 09:56:26 crc kubenswrapper[4971]: I1127 09:56:26.414724 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"abfb28f790ede5cd72fbb5dc60b5a5e8a898d243568dbe3ace67e693e4e0d682"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 09:56:26 crc kubenswrapper[4971]: I1127 09:56:26.414778 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://abfb28f790ede5cd72fbb5dc60b5a5e8a898d243568dbe3ace67e693e4e0d682" gracePeriod=600 Nov 27 09:56:27 crc kubenswrapper[4971]: I1127 09:56:27.154042 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="abfb28f790ede5cd72fbb5dc60b5a5e8a898d243568dbe3ace67e693e4e0d682" exitCode=0 Nov 27 09:56:27 crc kubenswrapper[4971]: I1127 09:56:27.154142 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"abfb28f790ede5cd72fbb5dc60b5a5e8a898d243568dbe3ace67e693e4e0d682"} Nov 27 09:56:27 crc kubenswrapper[4971]: I1127 09:56:27.154606 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"32a90286264c9bed67c869ab67cc4bfe55ac520174eb5ee9f1f031fb4d0905da"} Nov 27 09:56:27 crc kubenswrapper[4971]: I1127 09:56:27.154630 4971 scope.go:117] "RemoveContainer" containerID="d5d3a8883a93ccaa51c1b9319f9364a05ad09a90156d64a0180b7518c3a242d6" Nov 27 09:57:01 crc kubenswrapper[4971]: I1127 09:57:01.700091 4971 scope.go:117] "RemoveContainer" containerID="87b7bc9f75c624216c8af2992ee21fbeff82667e8b5adeaf01ee52d926ac4792" Nov 27 09:57:01 crc kubenswrapper[4971]: I1127 09:57:01.734850 4971 scope.go:117] "RemoveContainer" containerID="6283765f4d0edcd013c5228414e7e328a18238cf91e67360a12be59f92d96e02" Nov 27 09:57:01 crc kubenswrapper[4971]: I1127 09:57:01.789016 4971 scope.go:117] "RemoveContainer" containerID="dacc32fc448547c0735db738e001cabdb88c850d5490a1d0ec6b2c9f467841d0" Nov 27 09:57:52 crc kubenswrapper[4971]: I1127 09:57:52.863136 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-jj2vv"] Nov 27 09:57:52 crc kubenswrapper[4971]: E1127 09:57:52.864279 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39254c20-5238-4edc-8523-4bf3b2fd85a2" containerName="extract-utilities" Nov 27 09:57:52 crc kubenswrapper[4971]: I1127 09:57:52.864296 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="39254c20-5238-4edc-8523-4bf3b2fd85a2" containerName="extract-utilities" Nov 27 09:57:52 crc kubenswrapper[4971]: E1127 09:57:52.864319 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39254c20-5238-4edc-8523-4bf3b2fd85a2" containerName="extract-content" Nov 27 09:57:52 crc kubenswrapper[4971]: I1127 09:57:52.864327 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="39254c20-5238-4edc-8523-4bf3b2fd85a2" containerName="extract-content" Nov 27 09:57:52 crc kubenswrapper[4971]: E1127 09:57:52.864340 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39254c20-5238-4edc-8523-4bf3b2fd85a2" containerName="registry-server" Nov 27 09:57:52 crc kubenswrapper[4971]: I1127 09:57:52.864349 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="39254c20-5238-4edc-8523-4bf3b2fd85a2" containerName="registry-server" Nov 27 09:57:52 crc kubenswrapper[4971]: I1127 09:57:52.864696 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="39254c20-5238-4edc-8523-4bf3b2fd85a2" containerName="registry-server" Nov 27 09:57:52 crc kubenswrapper[4971]: I1127 09:57:52.893709 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jj2vv" Nov 27 09:57:52 crc kubenswrapper[4971]: I1127 09:57:52.924976 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1fb050e0-b560-46e6-9d65-6332e4b8b2d4-catalog-content\") pod \"community-operators-jj2vv\" (UID: \"1fb050e0-b560-46e6-9d65-6332e4b8b2d4\") " pod="openshift-marketplace/community-operators-jj2vv" Nov 27 09:57:52 crc kubenswrapper[4971]: I1127 09:57:52.925094 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1fb050e0-b560-46e6-9d65-6332e4b8b2d4-utilities\") pod \"community-operators-jj2vv\" (UID: \"1fb050e0-b560-46e6-9d65-6332e4b8b2d4\") " pod="openshift-marketplace/community-operators-jj2vv" Nov 27 09:57:52 crc kubenswrapper[4971]: I1127 09:57:52.925117 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m95zh\" (UniqueName: \"kubernetes.io/projected/1fb050e0-b560-46e6-9d65-6332e4b8b2d4-kube-api-access-m95zh\") pod \"community-operators-jj2vv\" (UID: \"1fb050e0-b560-46e6-9d65-6332e4b8b2d4\") " pod="openshift-marketplace/community-operators-jj2vv" Nov 27 09:57:52 crc kubenswrapper[4971]: I1127 09:57:52.929732 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jj2vv"] Nov 27 09:57:53 crc kubenswrapper[4971]: I1127 09:57:53.026493 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1fb050e0-b560-46e6-9d65-6332e4b8b2d4-utilities\") pod \"community-operators-jj2vv\" (UID: \"1fb050e0-b560-46e6-9d65-6332e4b8b2d4\") " pod="openshift-marketplace/community-operators-jj2vv" Nov 27 09:57:53 crc kubenswrapper[4971]: I1127 09:57:53.026550 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m95zh\" (UniqueName: \"kubernetes.io/projected/1fb050e0-b560-46e6-9d65-6332e4b8b2d4-kube-api-access-m95zh\") pod \"community-operators-jj2vv\" (UID: \"1fb050e0-b560-46e6-9d65-6332e4b8b2d4\") " pod="openshift-marketplace/community-operators-jj2vv" Nov 27 09:57:53 crc kubenswrapper[4971]: I1127 09:57:53.026644 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1fb050e0-b560-46e6-9d65-6332e4b8b2d4-catalog-content\") pod \"community-operators-jj2vv\" (UID: \"1fb050e0-b560-46e6-9d65-6332e4b8b2d4\") " pod="openshift-marketplace/community-operators-jj2vv" Nov 27 09:57:53 crc kubenswrapper[4971]: I1127 09:57:53.027167 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1fb050e0-b560-46e6-9d65-6332e4b8b2d4-utilities\") pod \"community-operators-jj2vv\" (UID: \"1fb050e0-b560-46e6-9d65-6332e4b8b2d4\") " pod="openshift-marketplace/community-operators-jj2vv" Nov 27 09:57:53 crc kubenswrapper[4971]: I1127 09:57:53.027180 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1fb050e0-b560-46e6-9d65-6332e4b8b2d4-catalog-content\") pod \"community-operators-jj2vv\" (UID: \"1fb050e0-b560-46e6-9d65-6332e4b8b2d4\") " pod="openshift-marketplace/community-operators-jj2vv" Nov 27 09:57:53 crc kubenswrapper[4971]: I1127 09:57:53.049508 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m95zh\" (UniqueName: \"kubernetes.io/projected/1fb050e0-b560-46e6-9d65-6332e4b8b2d4-kube-api-access-m95zh\") pod \"community-operators-jj2vv\" (UID: \"1fb050e0-b560-46e6-9d65-6332e4b8b2d4\") " pod="openshift-marketplace/community-operators-jj2vv" Nov 27 09:57:53 crc kubenswrapper[4971]: I1127 09:57:53.236260 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jj2vv" Nov 27 09:57:53 crc kubenswrapper[4971]: I1127 09:57:53.774814 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jj2vv"] Nov 27 09:57:54 crc kubenswrapper[4971]: I1127 09:57:54.229604 4971 generic.go:334] "Generic (PLEG): container finished" podID="1fb050e0-b560-46e6-9d65-6332e4b8b2d4" containerID="530d529c12b1bf62a848bcf5628cdfef865cbf1c74de216c8292ddceccbed96d" exitCode=0 Nov 27 09:57:54 crc kubenswrapper[4971]: I1127 09:57:54.229707 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jj2vv" event={"ID":"1fb050e0-b560-46e6-9d65-6332e4b8b2d4","Type":"ContainerDied","Data":"530d529c12b1bf62a848bcf5628cdfef865cbf1c74de216c8292ddceccbed96d"} Nov 27 09:57:54 crc kubenswrapper[4971]: I1127 09:57:54.229968 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jj2vv" event={"ID":"1fb050e0-b560-46e6-9d65-6332e4b8b2d4","Type":"ContainerStarted","Data":"e30bf59b1baede1e8b2e663acacefafc47ef63ca3258a8e4cd420946c873f5c5"} Nov 27 09:57:54 crc kubenswrapper[4971]: I1127 09:57:54.231525 4971 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 27 09:57:55 crc kubenswrapper[4971]: I1127 09:57:55.247466 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jj2vv" event={"ID":"1fb050e0-b560-46e6-9d65-6332e4b8b2d4","Type":"ContainerStarted","Data":"5c5fa219935d46d09efc5f39c8f670f6ff1baca585f3885e14ac30679a11496b"} Nov 27 09:57:56 crc kubenswrapper[4971]: I1127 09:57:56.264228 4971 generic.go:334] "Generic (PLEG): container finished" podID="1fb050e0-b560-46e6-9d65-6332e4b8b2d4" containerID="5c5fa219935d46d09efc5f39c8f670f6ff1baca585f3885e14ac30679a11496b" exitCode=0 Nov 27 09:57:56 crc kubenswrapper[4971]: I1127 09:57:56.264306 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jj2vv" event={"ID":"1fb050e0-b560-46e6-9d65-6332e4b8b2d4","Type":"ContainerDied","Data":"5c5fa219935d46d09efc5f39c8f670f6ff1baca585f3885e14ac30679a11496b"} Nov 27 09:57:57 crc kubenswrapper[4971]: I1127 09:57:57.280856 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jj2vv" event={"ID":"1fb050e0-b560-46e6-9d65-6332e4b8b2d4","Type":"ContainerStarted","Data":"6b42ef2276ada3817ad0a78185c8ccc86ef5075ac748995319a13ab72cfb4a35"} Nov 27 09:57:57 crc kubenswrapper[4971]: I1127 09:57:57.312922 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-jj2vv" podStartSLOduration=2.6878322150000002 podStartE2EDuration="5.312898399s" podCreationTimestamp="2025-11-27 09:57:52 +0000 UTC" firstStartedPulling="2025-11-27 09:57:54.23129431 +0000 UTC m=+11112.423338228" lastFinishedPulling="2025-11-27 09:57:56.856360484 +0000 UTC m=+11115.048404412" observedRunningTime="2025-11-27 09:57:57.301985877 +0000 UTC m=+11115.494029825" watchObservedRunningTime="2025-11-27 09:57:57.312898399 +0000 UTC m=+11115.504942327" Nov 27 09:58:03 crc kubenswrapper[4971]: I1127 09:58:03.236702 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-jj2vv" Nov 27 09:58:03 crc kubenswrapper[4971]: I1127 09:58:03.237467 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-jj2vv" Nov 27 09:58:03 crc kubenswrapper[4971]: I1127 09:58:03.285516 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-jj2vv" Nov 27 09:58:03 crc kubenswrapper[4971]: I1127 09:58:03.401055 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-jj2vv" Nov 27 09:58:03 crc kubenswrapper[4971]: I1127 09:58:03.532388 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jj2vv"] Nov 27 09:58:05 crc kubenswrapper[4971]: I1127 09:58:05.379237 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-jj2vv" podUID="1fb050e0-b560-46e6-9d65-6332e4b8b2d4" containerName="registry-server" containerID="cri-o://6b42ef2276ada3817ad0a78185c8ccc86ef5075ac748995319a13ab72cfb4a35" gracePeriod=2 Nov 27 09:58:06 crc kubenswrapper[4971]: I1127 09:58:06.399094 4971 generic.go:334] "Generic (PLEG): container finished" podID="1fb050e0-b560-46e6-9d65-6332e4b8b2d4" containerID="6b42ef2276ada3817ad0a78185c8ccc86ef5075ac748995319a13ab72cfb4a35" exitCode=0 Nov 27 09:58:06 crc kubenswrapper[4971]: I1127 09:58:06.399238 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jj2vv" event={"ID":"1fb050e0-b560-46e6-9d65-6332e4b8b2d4","Type":"ContainerDied","Data":"6b42ef2276ada3817ad0a78185c8ccc86ef5075ac748995319a13ab72cfb4a35"} Nov 27 09:58:07 crc kubenswrapper[4971]: I1127 09:58:07.270651 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jj2vv" Nov 27 09:58:07 crc kubenswrapper[4971]: I1127 09:58:07.396238 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1fb050e0-b560-46e6-9d65-6332e4b8b2d4-catalog-content\") pod \"1fb050e0-b560-46e6-9d65-6332e4b8b2d4\" (UID: \"1fb050e0-b560-46e6-9d65-6332e4b8b2d4\") " Nov 27 09:58:07 crc kubenswrapper[4971]: I1127 09:58:07.396397 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1fb050e0-b560-46e6-9d65-6332e4b8b2d4-utilities\") pod \"1fb050e0-b560-46e6-9d65-6332e4b8b2d4\" (UID: \"1fb050e0-b560-46e6-9d65-6332e4b8b2d4\") " Nov 27 09:58:07 crc kubenswrapper[4971]: I1127 09:58:07.396453 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m95zh\" (UniqueName: \"kubernetes.io/projected/1fb050e0-b560-46e6-9d65-6332e4b8b2d4-kube-api-access-m95zh\") pod \"1fb050e0-b560-46e6-9d65-6332e4b8b2d4\" (UID: \"1fb050e0-b560-46e6-9d65-6332e4b8b2d4\") " Nov 27 09:58:07 crc kubenswrapper[4971]: I1127 09:58:07.397475 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1fb050e0-b560-46e6-9d65-6332e4b8b2d4-utilities" (OuterVolumeSpecName: "utilities") pod "1fb050e0-b560-46e6-9d65-6332e4b8b2d4" (UID: "1fb050e0-b560-46e6-9d65-6332e4b8b2d4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:58:07 crc kubenswrapper[4971]: I1127 09:58:07.404810 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fb050e0-b560-46e6-9d65-6332e4b8b2d4-kube-api-access-m95zh" (OuterVolumeSpecName: "kube-api-access-m95zh") pod "1fb050e0-b560-46e6-9d65-6332e4b8b2d4" (UID: "1fb050e0-b560-46e6-9d65-6332e4b8b2d4"). InnerVolumeSpecName "kube-api-access-m95zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 09:58:07 crc kubenswrapper[4971]: I1127 09:58:07.411141 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jj2vv" event={"ID":"1fb050e0-b560-46e6-9d65-6332e4b8b2d4","Type":"ContainerDied","Data":"e30bf59b1baede1e8b2e663acacefafc47ef63ca3258a8e4cd420946c873f5c5"} Nov 27 09:58:07 crc kubenswrapper[4971]: I1127 09:58:07.411204 4971 scope.go:117] "RemoveContainer" containerID="6b42ef2276ada3817ad0a78185c8ccc86ef5075ac748995319a13ab72cfb4a35" Nov 27 09:58:07 crc kubenswrapper[4971]: I1127 09:58:07.411214 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jj2vv" Nov 27 09:58:07 crc kubenswrapper[4971]: I1127 09:58:07.470763 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1fb050e0-b560-46e6-9d65-6332e4b8b2d4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1fb050e0-b560-46e6-9d65-6332e4b8b2d4" (UID: "1fb050e0-b560-46e6-9d65-6332e4b8b2d4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 09:58:07 crc kubenswrapper[4971]: I1127 09:58:07.487154 4971 scope.go:117] "RemoveContainer" containerID="5c5fa219935d46d09efc5f39c8f670f6ff1baca585f3885e14ac30679a11496b" Nov 27 09:58:07 crc kubenswrapper[4971]: I1127 09:58:07.499733 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1fb050e0-b560-46e6-9d65-6332e4b8b2d4-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 09:58:07 crc kubenswrapper[4971]: I1127 09:58:07.500550 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1fb050e0-b560-46e6-9d65-6332e4b8b2d4-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 09:58:07 crc kubenswrapper[4971]: I1127 09:58:07.500604 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m95zh\" (UniqueName: \"kubernetes.io/projected/1fb050e0-b560-46e6-9d65-6332e4b8b2d4-kube-api-access-m95zh\") on node \"crc\" DevicePath \"\"" Nov 27 09:58:07 crc kubenswrapper[4971]: I1127 09:58:07.510710 4971 scope.go:117] "RemoveContainer" containerID="530d529c12b1bf62a848bcf5628cdfef865cbf1c74de216c8292ddceccbed96d" Nov 27 09:58:07 crc kubenswrapper[4971]: I1127 09:58:07.756142 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jj2vv"] Nov 27 09:58:07 crc kubenswrapper[4971]: I1127 09:58:07.773923 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-jj2vv"] Nov 27 09:58:08 crc kubenswrapper[4971]: I1127 09:58:08.567478 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1fb050e0-b560-46e6-9d65-6332e4b8b2d4" path="/var/lib/kubelet/pods/1fb050e0-b560-46e6-9d65-6332e4b8b2d4/volumes" Nov 27 09:58:26 crc kubenswrapper[4971]: I1127 09:58:26.413001 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 09:58:26 crc kubenswrapper[4971]: I1127 09:58:26.413709 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 09:58:56 crc kubenswrapper[4971]: I1127 09:58:56.413005 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 09:58:56 crc kubenswrapper[4971]: I1127 09:58:56.413575 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 09:59:26 crc kubenswrapper[4971]: I1127 09:59:26.413462 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 09:59:26 crc kubenswrapper[4971]: I1127 09:59:26.414271 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 09:59:26 crc kubenswrapper[4971]: I1127 09:59:26.414778 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 09:59:26 crc kubenswrapper[4971]: I1127 09:59:26.426397 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"32a90286264c9bed67c869ab67cc4bfe55ac520174eb5ee9f1f031fb4d0905da"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 09:59:26 crc kubenswrapper[4971]: I1127 09:59:26.426586 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://32a90286264c9bed67c869ab67cc4bfe55ac520174eb5ee9f1f031fb4d0905da" gracePeriod=600 Nov 27 09:59:26 crc kubenswrapper[4971]: E1127 09:59:26.563818 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:59:27 crc kubenswrapper[4971]: I1127 09:59:27.438289 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="32a90286264c9bed67c869ab67cc4bfe55ac520174eb5ee9f1f031fb4d0905da" exitCode=0 Nov 27 09:59:27 crc kubenswrapper[4971]: I1127 09:59:27.438372 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"32a90286264c9bed67c869ab67cc4bfe55ac520174eb5ee9f1f031fb4d0905da"} Nov 27 09:59:27 crc kubenswrapper[4971]: I1127 09:59:27.438865 4971 scope.go:117] "RemoveContainer" containerID="abfb28f790ede5cd72fbb5dc60b5a5e8a898d243568dbe3ace67e693e4e0d682" Nov 27 09:59:27 crc kubenswrapper[4971]: I1127 09:59:27.440019 4971 scope.go:117] "RemoveContainer" containerID="32a90286264c9bed67c869ab67cc4bfe55ac520174eb5ee9f1f031fb4d0905da" Nov 27 09:59:27 crc kubenswrapper[4971]: E1127 09:59:27.440632 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:59:41 crc kubenswrapper[4971]: I1127 09:59:41.551392 4971 scope.go:117] "RemoveContainer" containerID="32a90286264c9bed67c869ab67cc4bfe55ac520174eb5ee9f1f031fb4d0905da" Nov 27 09:59:41 crc kubenswrapper[4971]: E1127 09:59:41.552838 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:59:55 crc kubenswrapper[4971]: I1127 09:59:55.554698 4971 scope.go:117] "RemoveContainer" containerID="32a90286264c9bed67c869ab67cc4bfe55ac520174eb5ee9f1f031fb4d0905da" Nov 27 09:59:55 crc kubenswrapper[4971]: E1127 09:59:55.555491 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.431453 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Nov 27 09:59:59 crc kubenswrapper[4971]: E1127 09:59:59.432300 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fb050e0-b560-46e6-9d65-6332e4b8b2d4" containerName="extract-utilities" Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.432318 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fb050e0-b560-46e6-9d65-6332e4b8b2d4" containerName="extract-utilities" Nov 27 09:59:59 crc kubenswrapper[4971]: E1127 09:59:59.432333 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fb050e0-b560-46e6-9d65-6332e4b8b2d4" containerName="registry-server" Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.432341 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fb050e0-b560-46e6-9d65-6332e4b8b2d4" containerName="registry-server" Nov 27 09:59:59 crc kubenswrapper[4971]: E1127 09:59:59.432367 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fb050e0-b560-46e6-9d65-6332e4b8b2d4" containerName="extract-content" Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.432374 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fb050e0-b560-46e6-9d65-6332e4b8b2d4" containerName="extract-content" Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.432632 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fb050e0-b560-46e6-9d65-6332e4b8b2d4" containerName="registry-server" Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.433516 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.439985 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.440267 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.440759 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-8nz9c" Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.440829 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.460485 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.513201 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/5dede177-0f43-453e-b75b-e5ac63add3da-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"5dede177-0f43-453e-b75b-e5ac63add3da\") " pod="openstack/tempest-tests-tempest" Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.513255 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5dede177-0f43-453e-b75b-e5ac63add3da-config-data\") pod \"tempest-tests-tempest\" (UID: \"5dede177-0f43-453e-b75b-e5ac63add3da\") " pod="openstack/tempest-tests-tempest" Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.513281 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"tempest-tests-tempest\" (UID: \"5dede177-0f43-453e-b75b-e5ac63add3da\") " pod="openstack/tempest-tests-tempest" Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.513527 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5dede177-0f43-453e-b75b-e5ac63add3da-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"5dede177-0f43-453e-b75b-e5ac63add3da\") " pod="openstack/tempest-tests-tempest" Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.513664 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5dede177-0f43-453e-b75b-e5ac63add3da-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"5dede177-0f43-453e-b75b-e5ac63add3da\") " pod="openstack/tempest-tests-tempest" Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.513718 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/5dede177-0f43-453e-b75b-e5ac63add3da-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"5dede177-0f43-453e-b75b-e5ac63add3da\") " pod="openstack/tempest-tests-tempest" Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.513992 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/5dede177-0f43-453e-b75b-e5ac63add3da-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"5dede177-0f43-453e-b75b-e5ac63add3da\") " pod="openstack/tempest-tests-tempest" Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.514062 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nwlg9\" (UniqueName: \"kubernetes.io/projected/5dede177-0f43-453e-b75b-e5ac63add3da-kube-api-access-nwlg9\") pod \"tempest-tests-tempest\" (UID: \"5dede177-0f43-453e-b75b-e5ac63add3da\") " pod="openstack/tempest-tests-tempest" Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.514192 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5dede177-0f43-453e-b75b-e5ac63add3da-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"5dede177-0f43-453e-b75b-e5ac63add3da\") " pod="openstack/tempest-tests-tempest" Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.615843 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5dede177-0f43-453e-b75b-e5ac63add3da-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"5dede177-0f43-453e-b75b-e5ac63add3da\") " pod="openstack/tempest-tests-tempest" Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.615921 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5dede177-0f43-453e-b75b-e5ac63add3da-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"5dede177-0f43-453e-b75b-e5ac63add3da\") " pod="openstack/tempest-tests-tempest" Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.615950 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/5dede177-0f43-453e-b75b-e5ac63add3da-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"5dede177-0f43-453e-b75b-e5ac63add3da\") " pod="openstack/tempest-tests-tempest" Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.616023 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/5dede177-0f43-453e-b75b-e5ac63add3da-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"5dede177-0f43-453e-b75b-e5ac63add3da\") " pod="openstack/tempest-tests-tempest" Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.616046 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nwlg9\" (UniqueName: \"kubernetes.io/projected/5dede177-0f43-453e-b75b-e5ac63add3da-kube-api-access-nwlg9\") pod \"tempest-tests-tempest\" (UID: \"5dede177-0f43-453e-b75b-e5ac63add3da\") " pod="openstack/tempest-tests-tempest" Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.616106 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5dede177-0f43-453e-b75b-e5ac63add3da-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"5dede177-0f43-453e-b75b-e5ac63add3da\") " pod="openstack/tempest-tests-tempest" Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.616170 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/5dede177-0f43-453e-b75b-e5ac63add3da-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"5dede177-0f43-453e-b75b-e5ac63add3da\") " pod="openstack/tempest-tests-tempest" Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.616197 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5dede177-0f43-453e-b75b-e5ac63add3da-config-data\") pod \"tempest-tests-tempest\" (UID: \"5dede177-0f43-453e-b75b-e5ac63add3da\") " pod="openstack/tempest-tests-tempest" Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.616220 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"tempest-tests-tempest\" (UID: \"5dede177-0f43-453e-b75b-e5ac63add3da\") " pod="openstack/tempest-tests-tempest" Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.617288 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/5dede177-0f43-453e-b75b-e5ac63add3da-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"5dede177-0f43-453e-b75b-e5ac63add3da\") " pod="openstack/tempest-tests-tempest" Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.617393 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/5dede177-0f43-453e-b75b-e5ac63add3da-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"5dede177-0f43-453e-b75b-e5ac63add3da\") " pod="openstack/tempest-tests-tempest" Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.618094 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5dede177-0f43-453e-b75b-e5ac63add3da-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"5dede177-0f43-453e-b75b-e5ac63add3da\") " pod="openstack/tempest-tests-tempest" Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.618327 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5dede177-0f43-453e-b75b-e5ac63add3da-config-data\") pod \"tempest-tests-tempest\" (UID: \"5dede177-0f43-453e-b75b-e5ac63add3da\") " pod="openstack/tempest-tests-tempest" Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.622688 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/5dede177-0f43-453e-b75b-e5ac63add3da-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"5dede177-0f43-453e-b75b-e5ac63add3da\") " pod="openstack/tempest-tests-tempest" Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.623031 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5dede177-0f43-453e-b75b-e5ac63add3da-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"5dede177-0f43-453e-b75b-e5ac63add3da\") " pod="openstack/tempest-tests-tempest" Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.623424 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5dede177-0f43-453e-b75b-e5ac63add3da-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"5dede177-0f43-453e-b75b-e5ac63add3da\") " pod="openstack/tempest-tests-tempest" Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.641353 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nwlg9\" (UniqueName: \"kubernetes.io/projected/5dede177-0f43-453e-b75b-e5ac63add3da-kube-api-access-nwlg9\") pod \"tempest-tests-tempest\" (UID: \"5dede177-0f43-453e-b75b-e5ac63add3da\") " pod="openstack/tempest-tests-tempest" Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.649308 4971 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"tempest-tests-tempest\" (UID: \"5dede177-0f43-453e-b75b-e5ac63add3da\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/tempest-tests-tempest" Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.685349 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"tempest-tests-tempest\" (UID: \"5dede177-0f43-453e-b75b-e5ac63add3da\") " pod="openstack/tempest-tests-tempest" Nov 27 09:59:59 crc kubenswrapper[4971]: I1127 09:59:59.761630 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Nov 27 10:00:00 crc kubenswrapper[4971]: I1127 10:00:00.164230 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403960-gvpt4"] Nov 27 10:00:00 crc kubenswrapper[4971]: I1127 10:00:00.166122 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403960-gvpt4" Nov 27 10:00:00 crc kubenswrapper[4971]: I1127 10:00:00.170567 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 27 10:00:00 crc kubenswrapper[4971]: I1127 10:00:00.170863 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 27 10:00:00 crc kubenswrapper[4971]: I1127 10:00:00.178316 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403960-gvpt4"] Nov 27 10:00:00 crc kubenswrapper[4971]: I1127 10:00:00.229216 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a38b5b3e-cff5-4d26-9c57-d8e452d99ea6-secret-volume\") pod \"collect-profiles-29403960-gvpt4\" (UID: \"a38b5b3e-cff5-4d26-9c57-d8e452d99ea6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403960-gvpt4" Nov 27 10:00:00 crc kubenswrapper[4971]: I1127 10:00:00.229382 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9xcfp\" (UniqueName: \"kubernetes.io/projected/a38b5b3e-cff5-4d26-9c57-d8e452d99ea6-kube-api-access-9xcfp\") pod \"collect-profiles-29403960-gvpt4\" (UID: \"a38b5b3e-cff5-4d26-9c57-d8e452d99ea6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403960-gvpt4" Nov 27 10:00:00 crc kubenswrapper[4971]: I1127 10:00:00.229467 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a38b5b3e-cff5-4d26-9c57-d8e452d99ea6-config-volume\") pod \"collect-profiles-29403960-gvpt4\" (UID: \"a38b5b3e-cff5-4d26-9c57-d8e452d99ea6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403960-gvpt4" Nov 27 10:00:00 crc kubenswrapper[4971]: I1127 10:00:00.265771 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Nov 27 10:00:00 crc kubenswrapper[4971]: I1127 10:00:00.331342 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a38b5b3e-cff5-4d26-9c57-d8e452d99ea6-secret-volume\") pod \"collect-profiles-29403960-gvpt4\" (UID: \"a38b5b3e-cff5-4d26-9c57-d8e452d99ea6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403960-gvpt4" Nov 27 10:00:00 crc kubenswrapper[4971]: I1127 10:00:00.331489 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9xcfp\" (UniqueName: \"kubernetes.io/projected/a38b5b3e-cff5-4d26-9c57-d8e452d99ea6-kube-api-access-9xcfp\") pod \"collect-profiles-29403960-gvpt4\" (UID: \"a38b5b3e-cff5-4d26-9c57-d8e452d99ea6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403960-gvpt4" Nov 27 10:00:00 crc kubenswrapper[4971]: I1127 10:00:00.331571 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a38b5b3e-cff5-4d26-9c57-d8e452d99ea6-config-volume\") pod \"collect-profiles-29403960-gvpt4\" (UID: \"a38b5b3e-cff5-4d26-9c57-d8e452d99ea6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403960-gvpt4" Nov 27 10:00:00 crc kubenswrapper[4971]: I1127 10:00:00.332695 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a38b5b3e-cff5-4d26-9c57-d8e452d99ea6-config-volume\") pod \"collect-profiles-29403960-gvpt4\" (UID: \"a38b5b3e-cff5-4d26-9c57-d8e452d99ea6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403960-gvpt4" Nov 27 10:00:00 crc kubenswrapper[4971]: I1127 10:00:00.336776 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a38b5b3e-cff5-4d26-9c57-d8e452d99ea6-secret-volume\") pod \"collect-profiles-29403960-gvpt4\" (UID: \"a38b5b3e-cff5-4d26-9c57-d8e452d99ea6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403960-gvpt4" Nov 27 10:00:00 crc kubenswrapper[4971]: I1127 10:00:00.347102 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9xcfp\" (UniqueName: \"kubernetes.io/projected/a38b5b3e-cff5-4d26-9c57-d8e452d99ea6-kube-api-access-9xcfp\") pod \"collect-profiles-29403960-gvpt4\" (UID: \"a38b5b3e-cff5-4d26-9c57-d8e452d99ea6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403960-gvpt4" Nov 27 10:00:00 crc kubenswrapper[4971]: I1127 10:00:00.503867 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403960-gvpt4" Nov 27 10:00:00 crc kubenswrapper[4971]: I1127 10:00:00.924080 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"5dede177-0f43-453e-b75b-e5ac63add3da","Type":"ContainerStarted","Data":"0c34220b148bc83cc0ad0b5efde75beda39609be65cfacab6a7269a25ece36a3"} Nov 27 10:00:01 crc kubenswrapper[4971]: I1127 10:00:01.045229 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403960-gvpt4"] Nov 27 10:00:01 crc kubenswrapper[4971]: I1127 10:00:01.935389 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403960-gvpt4" event={"ID":"a38b5b3e-cff5-4d26-9c57-d8e452d99ea6","Type":"ContainerStarted","Data":"e5f38ccb7d915eba963e5fcf65a7e09881ee728af2a7fa8099b6466353014e3b"} Nov 27 10:00:01 crc kubenswrapper[4971]: I1127 10:00:01.935751 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403960-gvpt4" event={"ID":"a38b5b3e-cff5-4d26-9c57-d8e452d99ea6","Type":"ContainerStarted","Data":"4171b58c1cedf0cb5f86678cd552265b64e97e6ce5ae68aa2e98e6f3f2d06274"} Nov 27 10:00:01 crc kubenswrapper[4971]: I1127 10:00:01.967043 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29403960-gvpt4" podStartSLOduration=1.9670203960000001 podStartE2EDuration="1.967020396s" podCreationTimestamp="2025-11-27 10:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 10:00:01.95626985 +0000 UTC m=+11240.148313768" watchObservedRunningTime="2025-11-27 10:00:01.967020396 +0000 UTC m=+11240.159064334" Nov 27 10:00:02 crc kubenswrapper[4971]: I1127 10:00:02.951402 4971 generic.go:334] "Generic (PLEG): container finished" podID="a38b5b3e-cff5-4d26-9c57-d8e452d99ea6" containerID="e5f38ccb7d915eba963e5fcf65a7e09881ee728af2a7fa8099b6466353014e3b" exitCode=0 Nov 27 10:00:02 crc kubenswrapper[4971]: I1127 10:00:02.951503 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403960-gvpt4" event={"ID":"a38b5b3e-cff5-4d26-9c57-d8e452d99ea6","Type":"ContainerDied","Data":"e5f38ccb7d915eba963e5fcf65a7e09881ee728af2a7fa8099b6466353014e3b"} Nov 27 10:00:04 crc kubenswrapper[4971]: I1127 10:00:04.355212 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403960-gvpt4" Nov 27 10:00:04 crc kubenswrapper[4971]: I1127 10:00:04.477233 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a38b5b3e-cff5-4d26-9c57-d8e452d99ea6-secret-volume\") pod \"a38b5b3e-cff5-4d26-9c57-d8e452d99ea6\" (UID: \"a38b5b3e-cff5-4d26-9c57-d8e452d99ea6\") " Nov 27 10:00:04 crc kubenswrapper[4971]: I1127 10:00:04.477308 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a38b5b3e-cff5-4d26-9c57-d8e452d99ea6-config-volume\") pod \"a38b5b3e-cff5-4d26-9c57-d8e452d99ea6\" (UID: \"a38b5b3e-cff5-4d26-9c57-d8e452d99ea6\") " Nov 27 10:00:04 crc kubenswrapper[4971]: I1127 10:00:04.477951 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a38b5b3e-cff5-4d26-9c57-d8e452d99ea6-config-volume" (OuterVolumeSpecName: "config-volume") pod "a38b5b3e-cff5-4d26-9c57-d8e452d99ea6" (UID: "a38b5b3e-cff5-4d26-9c57-d8e452d99ea6"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 10:00:04 crc kubenswrapper[4971]: I1127 10:00:04.478342 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xcfp\" (UniqueName: \"kubernetes.io/projected/a38b5b3e-cff5-4d26-9c57-d8e452d99ea6-kube-api-access-9xcfp\") pod \"a38b5b3e-cff5-4d26-9c57-d8e452d99ea6\" (UID: \"a38b5b3e-cff5-4d26-9c57-d8e452d99ea6\") " Nov 27 10:00:04 crc kubenswrapper[4971]: I1127 10:00:04.478942 4971 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a38b5b3e-cff5-4d26-9c57-d8e452d99ea6-config-volume\") on node \"crc\" DevicePath \"\"" Nov 27 10:00:04 crc kubenswrapper[4971]: I1127 10:00:04.482656 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a38b5b3e-cff5-4d26-9c57-d8e452d99ea6-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "a38b5b3e-cff5-4d26-9c57-d8e452d99ea6" (UID: "a38b5b3e-cff5-4d26-9c57-d8e452d99ea6"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 10:00:04 crc kubenswrapper[4971]: I1127 10:00:04.483862 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a38b5b3e-cff5-4d26-9c57-d8e452d99ea6-kube-api-access-9xcfp" (OuterVolumeSpecName: "kube-api-access-9xcfp") pod "a38b5b3e-cff5-4d26-9c57-d8e452d99ea6" (UID: "a38b5b3e-cff5-4d26-9c57-d8e452d99ea6"). InnerVolumeSpecName "kube-api-access-9xcfp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 10:00:04 crc kubenswrapper[4971]: I1127 10:00:04.586702 4971 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a38b5b3e-cff5-4d26-9c57-d8e452d99ea6-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 27 10:00:04 crc kubenswrapper[4971]: I1127 10:00:04.586773 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xcfp\" (UniqueName: \"kubernetes.io/projected/a38b5b3e-cff5-4d26-9c57-d8e452d99ea6-kube-api-access-9xcfp\") on node \"crc\" DevicePath \"\"" Nov 27 10:00:04 crc kubenswrapper[4971]: I1127 10:00:04.977555 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403960-gvpt4" event={"ID":"a38b5b3e-cff5-4d26-9c57-d8e452d99ea6","Type":"ContainerDied","Data":"4171b58c1cedf0cb5f86678cd552265b64e97e6ce5ae68aa2e98e6f3f2d06274"} Nov 27 10:00:04 crc kubenswrapper[4971]: I1127 10:00:04.977646 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4171b58c1cedf0cb5f86678cd552265b64e97e6ce5ae68aa2e98e6f3f2d06274" Nov 27 10:00:04 crc kubenswrapper[4971]: I1127 10:00:04.977657 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403960-gvpt4" Nov 27 10:00:05 crc kubenswrapper[4971]: I1127 10:00:05.048570 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403915-6gkn2"] Nov 27 10:00:05 crc kubenswrapper[4971]: I1127 10:00:05.058472 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403915-6gkn2"] Nov 27 10:00:06 crc kubenswrapper[4971]: I1127 10:00:06.562906 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="90e15afa-3845-4e37-bd64-c7be4048bb7e" path="/var/lib/kubelet/pods/90e15afa-3845-4e37-bd64-c7be4048bb7e/volumes" Nov 27 10:00:10 crc kubenswrapper[4971]: I1127 10:00:10.551454 4971 scope.go:117] "RemoveContainer" containerID="32a90286264c9bed67c869ab67cc4bfe55ac520174eb5ee9f1f031fb4d0905da" Nov 27 10:00:10 crc kubenswrapper[4971]: E1127 10:00:10.554901 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:00:22 crc kubenswrapper[4971]: I1127 10:00:22.561163 4971 scope.go:117] "RemoveContainer" containerID="32a90286264c9bed67c869ab67cc4bfe55ac520174eb5ee9f1f031fb4d0905da" Nov 27 10:00:22 crc kubenswrapper[4971]: E1127 10:00:22.561985 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:00:37 crc kubenswrapper[4971]: I1127 10:00:37.550626 4971 scope.go:117] "RemoveContainer" containerID="32a90286264c9bed67c869ab67cc4bfe55ac520174eb5ee9f1f031fb4d0905da" Nov 27 10:00:37 crc kubenswrapper[4971]: E1127 10:00:37.551819 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:00:37 crc kubenswrapper[4971]: I1127 10:00:37.865969 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-ndghw"] Nov 27 10:00:37 crc kubenswrapper[4971]: E1127 10:00:37.866635 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a38b5b3e-cff5-4d26-9c57-d8e452d99ea6" containerName="collect-profiles" Nov 27 10:00:37 crc kubenswrapper[4971]: I1127 10:00:37.866654 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="a38b5b3e-cff5-4d26-9c57-d8e452d99ea6" containerName="collect-profiles" Nov 27 10:00:37 crc kubenswrapper[4971]: I1127 10:00:37.866946 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="a38b5b3e-cff5-4d26-9c57-d8e452d99ea6" containerName="collect-profiles" Nov 27 10:00:37 crc kubenswrapper[4971]: I1127 10:00:37.868954 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ndghw" Nov 27 10:00:37 crc kubenswrapper[4971]: I1127 10:00:37.884340 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ndghw"] Nov 27 10:00:37 crc kubenswrapper[4971]: I1127 10:00:37.919934 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1cb306c1-aef2-4d15-aa78-621eec32f349-catalog-content\") pod \"redhat-operators-ndghw\" (UID: \"1cb306c1-aef2-4d15-aa78-621eec32f349\") " pod="openshift-marketplace/redhat-operators-ndghw" Nov 27 10:00:37 crc kubenswrapper[4971]: I1127 10:00:37.920151 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xpr9d\" (UniqueName: \"kubernetes.io/projected/1cb306c1-aef2-4d15-aa78-621eec32f349-kube-api-access-xpr9d\") pod \"redhat-operators-ndghw\" (UID: \"1cb306c1-aef2-4d15-aa78-621eec32f349\") " pod="openshift-marketplace/redhat-operators-ndghw" Nov 27 10:00:37 crc kubenswrapper[4971]: I1127 10:00:37.920428 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1cb306c1-aef2-4d15-aa78-621eec32f349-utilities\") pod \"redhat-operators-ndghw\" (UID: \"1cb306c1-aef2-4d15-aa78-621eec32f349\") " pod="openshift-marketplace/redhat-operators-ndghw" Nov 27 10:00:38 crc kubenswrapper[4971]: I1127 10:00:38.022393 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xpr9d\" (UniqueName: \"kubernetes.io/projected/1cb306c1-aef2-4d15-aa78-621eec32f349-kube-api-access-xpr9d\") pod \"redhat-operators-ndghw\" (UID: \"1cb306c1-aef2-4d15-aa78-621eec32f349\") " pod="openshift-marketplace/redhat-operators-ndghw" Nov 27 10:00:38 crc kubenswrapper[4971]: I1127 10:00:38.022607 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1cb306c1-aef2-4d15-aa78-621eec32f349-utilities\") pod \"redhat-operators-ndghw\" (UID: \"1cb306c1-aef2-4d15-aa78-621eec32f349\") " pod="openshift-marketplace/redhat-operators-ndghw" Nov 27 10:00:38 crc kubenswrapper[4971]: I1127 10:00:38.022685 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1cb306c1-aef2-4d15-aa78-621eec32f349-catalog-content\") pod \"redhat-operators-ndghw\" (UID: \"1cb306c1-aef2-4d15-aa78-621eec32f349\") " pod="openshift-marketplace/redhat-operators-ndghw" Nov 27 10:00:38 crc kubenswrapper[4971]: I1127 10:00:38.023150 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1cb306c1-aef2-4d15-aa78-621eec32f349-catalog-content\") pod \"redhat-operators-ndghw\" (UID: \"1cb306c1-aef2-4d15-aa78-621eec32f349\") " pod="openshift-marketplace/redhat-operators-ndghw" Nov 27 10:00:38 crc kubenswrapper[4971]: I1127 10:00:38.023177 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1cb306c1-aef2-4d15-aa78-621eec32f349-utilities\") pod \"redhat-operators-ndghw\" (UID: \"1cb306c1-aef2-4d15-aa78-621eec32f349\") " pod="openshift-marketplace/redhat-operators-ndghw" Nov 27 10:00:38 crc kubenswrapper[4971]: I1127 10:00:38.054094 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-wpkdl"] Nov 27 10:00:38 crc kubenswrapper[4971]: I1127 10:00:38.056922 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wpkdl" Nov 27 10:00:38 crc kubenswrapper[4971]: I1127 10:00:38.059904 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xpr9d\" (UniqueName: \"kubernetes.io/projected/1cb306c1-aef2-4d15-aa78-621eec32f349-kube-api-access-xpr9d\") pod \"redhat-operators-ndghw\" (UID: \"1cb306c1-aef2-4d15-aa78-621eec32f349\") " pod="openshift-marketplace/redhat-operators-ndghw" Nov 27 10:00:38 crc kubenswrapper[4971]: I1127 10:00:38.064371 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wpkdl"] Nov 27 10:00:38 crc kubenswrapper[4971]: I1127 10:00:38.125027 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62e99d9e-1c29-4b27-80fa-e1d9faec4141-catalog-content\") pod \"certified-operators-wpkdl\" (UID: \"62e99d9e-1c29-4b27-80fa-e1d9faec4141\") " pod="openshift-marketplace/certified-operators-wpkdl" Nov 27 10:00:38 crc kubenswrapper[4971]: I1127 10:00:38.125086 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmvnl\" (UniqueName: \"kubernetes.io/projected/62e99d9e-1c29-4b27-80fa-e1d9faec4141-kube-api-access-wmvnl\") pod \"certified-operators-wpkdl\" (UID: \"62e99d9e-1c29-4b27-80fa-e1d9faec4141\") " pod="openshift-marketplace/certified-operators-wpkdl" Nov 27 10:00:38 crc kubenswrapper[4971]: I1127 10:00:38.127249 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62e99d9e-1c29-4b27-80fa-e1d9faec4141-utilities\") pod \"certified-operators-wpkdl\" (UID: \"62e99d9e-1c29-4b27-80fa-e1d9faec4141\") " pod="openshift-marketplace/certified-operators-wpkdl" Nov 27 10:00:38 crc kubenswrapper[4971]: I1127 10:00:38.189929 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ndghw" Nov 27 10:00:38 crc kubenswrapper[4971]: I1127 10:00:38.229406 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62e99d9e-1c29-4b27-80fa-e1d9faec4141-catalog-content\") pod \"certified-operators-wpkdl\" (UID: \"62e99d9e-1c29-4b27-80fa-e1d9faec4141\") " pod="openshift-marketplace/certified-operators-wpkdl" Nov 27 10:00:38 crc kubenswrapper[4971]: I1127 10:00:38.229748 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmvnl\" (UniqueName: \"kubernetes.io/projected/62e99d9e-1c29-4b27-80fa-e1d9faec4141-kube-api-access-wmvnl\") pod \"certified-operators-wpkdl\" (UID: \"62e99d9e-1c29-4b27-80fa-e1d9faec4141\") " pod="openshift-marketplace/certified-operators-wpkdl" Nov 27 10:00:38 crc kubenswrapper[4971]: I1127 10:00:38.229792 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62e99d9e-1c29-4b27-80fa-e1d9faec4141-utilities\") pod \"certified-operators-wpkdl\" (UID: \"62e99d9e-1c29-4b27-80fa-e1d9faec4141\") " pod="openshift-marketplace/certified-operators-wpkdl" Nov 27 10:00:38 crc kubenswrapper[4971]: I1127 10:00:38.230427 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62e99d9e-1c29-4b27-80fa-e1d9faec4141-catalog-content\") pod \"certified-operators-wpkdl\" (UID: \"62e99d9e-1c29-4b27-80fa-e1d9faec4141\") " pod="openshift-marketplace/certified-operators-wpkdl" Nov 27 10:00:38 crc kubenswrapper[4971]: I1127 10:00:38.230547 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62e99d9e-1c29-4b27-80fa-e1d9faec4141-utilities\") pod \"certified-operators-wpkdl\" (UID: \"62e99d9e-1c29-4b27-80fa-e1d9faec4141\") " pod="openshift-marketplace/certified-operators-wpkdl" Nov 27 10:00:38 crc kubenswrapper[4971]: I1127 10:00:38.254896 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmvnl\" (UniqueName: \"kubernetes.io/projected/62e99d9e-1c29-4b27-80fa-e1d9faec4141-kube-api-access-wmvnl\") pod \"certified-operators-wpkdl\" (UID: \"62e99d9e-1c29-4b27-80fa-e1d9faec4141\") " pod="openshift-marketplace/certified-operators-wpkdl" Nov 27 10:00:38 crc kubenswrapper[4971]: I1127 10:00:38.425816 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wpkdl" Nov 27 10:00:51 crc kubenswrapper[4971]: E1127 10:00:51.285817 4971 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-tempest-all:5de932becd5a40e79a67f43820fb5336" Nov 27 10:00:51 crc kubenswrapper[4971]: E1127 10:00:51.286685 4971 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-tempest-all:5de932becd5a40e79a67f43820fb5336" Nov 27 10:00:51 crc kubenswrapper[4971]: E1127 10:00:51.286896 4971 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.rdoproject.org/podified-antelope-centos9/openstack-tempest-all:5de932becd5a40e79a67f43820fb5336,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nwlg9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(5dede177-0f43-453e-b75b-e5ac63add3da): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 27 10:00:51 crc kubenswrapper[4971]: E1127 10:00:51.288249 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="5dede177-0f43-453e-b75b-e5ac63add3da" Nov 27 10:00:51 crc kubenswrapper[4971]: E1127 10:00:51.528755 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-antelope-centos9/openstack-tempest-all:5de932becd5a40e79a67f43820fb5336\\\"\"" pod="openstack/tempest-tests-tempest" podUID="5dede177-0f43-453e-b75b-e5ac63add3da" Nov 27 10:00:51 crc kubenswrapper[4971]: I1127 10:00:51.550825 4971 scope.go:117] "RemoveContainer" containerID="32a90286264c9bed67c869ab67cc4bfe55ac520174eb5ee9f1f031fb4d0905da" Nov 27 10:00:51 crc kubenswrapper[4971]: E1127 10:00:51.551133 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:00:51 crc kubenswrapper[4971]: I1127 10:00:51.767563 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wpkdl"] Nov 27 10:00:51 crc kubenswrapper[4971]: W1127 10:00:51.845804 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1cb306c1_aef2_4d15_aa78_621eec32f349.slice/crio-ad3e0e4145b48aa909b76776c585013b84a485434592f32ad25ed88c9a8c3c19 WatchSource:0}: Error finding container ad3e0e4145b48aa909b76776c585013b84a485434592f32ad25ed88c9a8c3c19: Status 404 returned error can't find the container with id ad3e0e4145b48aa909b76776c585013b84a485434592f32ad25ed88c9a8c3c19 Nov 27 10:00:51 crc kubenswrapper[4971]: I1127 10:00:51.850642 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ndghw"] Nov 27 10:00:52 crc kubenswrapper[4971]: I1127 10:00:52.541498 4971 generic.go:334] "Generic (PLEG): container finished" podID="62e99d9e-1c29-4b27-80fa-e1d9faec4141" containerID="efc37c1b0f770bf52697ae5a310cb473d82cf007f0e484ad40fdf9b123ea16c2" exitCode=0 Nov 27 10:00:52 crc kubenswrapper[4971]: I1127 10:00:52.541571 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wpkdl" event={"ID":"62e99d9e-1c29-4b27-80fa-e1d9faec4141","Type":"ContainerDied","Data":"efc37c1b0f770bf52697ae5a310cb473d82cf007f0e484ad40fdf9b123ea16c2"} Nov 27 10:00:52 crc kubenswrapper[4971]: I1127 10:00:52.542096 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wpkdl" event={"ID":"62e99d9e-1c29-4b27-80fa-e1d9faec4141","Type":"ContainerStarted","Data":"20a378f3faae8662956f2a6befee34b19603629e3d580706055956020d1ba9cc"} Nov 27 10:00:52 crc kubenswrapper[4971]: I1127 10:00:52.548374 4971 generic.go:334] "Generic (PLEG): container finished" podID="1cb306c1-aef2-4d15-aa78-621eec32f349" containerID="462dcf397274c905aa0790384f3a2924e870dba5912ab99c83225e0d99aa6dd2" exitCode=0 Nov 27 10:00:52 crc kubenswrapper[4971]: I1127 10:00:52.548442 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ndghw" event={"ID":"1cb306c1-aef2-4d15-aa78-621eec32f349","Type":"ContainerDied","Data":"462dcf397274c905aa0790384f3a2924e870dba5912ab99c83225e0d99aa6dd2"} Nov 27 10:00:52 crc kubenswrapper[4971]: I1127 10:00:52.548492 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ndghw" event={"ID":"1cb306c1-aef2-4d15-aa78-621eec32f349","Type":"ContainerStarted","Data":"ad3e0e4145b48aa909b76776c585013b84a485434592f32ad25ed88c9a8c3c19"} Nov 27 10:00:55 crc kubenswrapper[4971]: I1127 10:00:55.582814 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wpkdl" event={"ID":"62e99d9e-1c29-4b27-80fa-e1d9faec4141","Type":"ContainerStarted","Data":"e377d241133a0c5b8bbb860c1c04688e655970d8a2cbc508d2c271f8be04aec9"} Nov 27 10:00:55 crc kubenswrapper[4971]: I1127 10:00:55.586945 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ndghw" event={"ID":"1cb306c1-aef2-4d15-aa78-621eec32f349","Type":"ContainerStarted","Data":"a38f26d1646ccaf01ad1a732421ece4c5e9143f79ad5ff312cf0943df7466d0c"} Nov 27 10:00:57 crc kubenswrapper[4971]: I1127 10:00:57.619517 4971 generic.go:334] "Generic (PLEG): container finished" podID="62e99d9e-1c29-4b27-80fa-e1d9faec4141" containerID="e377d241133a0c5b8bbb860c1c04688e655970d8a2cbc508d2c271f8be04aec9" exitCode=0 Nov 27 10:00:57 crc kubenswrapper[4971]: I1127 10:00:57.619655 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wpkdl" event={"ID":"62e99d9e-1c29-4b27-80fa-e1d9faec4141","Type":"ContainerDied","Data":"e377d241133a0c5b8bbb860c1c04688e655970d8a2cbc508d2c271f8be04aec9"} Nov 27 10:00:59 crc kubenswrapper[4971]: I1127 10:00:59.645438 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wpkdl" event={"ID":"62e99d9e-1c29-4b27-80fa-e1d9faec4141","Type":"ContainerStarted","Data":"1585bf4714eb73c6debb07af11b5ae61d70b595a9e11f072d646e91337736140"} Nov 27 10:00:59 crc kubenswrapper[4971]: I1127 10:00:59.682046 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-wpkdl" podStartSLOduration=14.978652594 podStartE2EDuration="21.682008513s" podCreationTimestamp="2025-11-27 10:00:38 +0000 UTC" firstStartedPulling="2025-11-27 10:00:52.544986941 +0000 UTC m=+11290.737030889" lastFinishedPulling="2025-11-27 10:00:59.24834289 +0000 UTC m=+11297.440386808" observedRunningTime="2025-11-27 10:00:59.67316024 +0000 UTC m=+11297.865204178" watchObservedRunningTime="2025-11-27 10:00:59.682008513 +0000 UTC m=+11297.874052491" Nov 27 10:01:00 crc kubenswrapper[4971]: I1127 10:01:00.161503 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29403961-2r79p"] Nov 27 10:01:00 crc kubenswrapper[4971]: I1127 10:01:00.163711 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29403961-2r79p" Nov 27 10:01:00 crc kubenswrapper[4971]: I1127 10:01:00.176320 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29403961-2r79p"] Nov 27 10:01:00 crc kubenswrapper[4971]: I1127 10:01:00.178623 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28af66d8-2264-4581-b51a-dfab3e4a7456-config-data\") pod \"keystone-cron-29403961-2r79p\" (UID: \"28af66d8-2264-4581-b51a-dfab3e4a7456\") " pod="openstack/keystone-cron-29403961-2r79p" Nov 27 10:01:00 crc kubenswrapper[4971]: I1127 10:01:00.178694 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/28af66d8-2264-4581-b51a-dfab3e4a7456-fernet-keys\") pod \"keystone-cron-29403961-2r79p\" (UID: \"28af66d8-2264-4581-b51a-dfab3e4a7456\") " pod="openstack/keystone-cron-29403961-2r79p" Nov 27 10:01:00 crc kubenswrapper[4971]: I1127 10:01:00.178868 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28af66d8-2264-4581-b51a-dfab3e4a7456-combined-ca-bundle\") pod \"keystone-cron-29403961-2r79p\" (UID: \"28af66d8-2264-4581-b51a-dfab3e4a7456\") " pod="openstack/keystone-cron-29403961-2r79p" Nov 27 10:01:00 crc kubenswrapper[4971]: I1127 10:01:00.179170 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qff6t\" (UniqueName: \"kubernetes.io/projected/28af66d8-2264-4581-b51a-dfab3e4a7456-kube-api-access-qff6t\") pod \"keystone-cron-29403961-2r79p\" (UID: \"28af66d8-2264-4581-b51a-dfab3e4a7456\") " pod="openstack/keystone-cron-29403961-2r79p" Nov 27 10:01:00 crc kubenswrapper[4971]: I1127 10:01:00.280796 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qff6t\" (UniqueName: \"kubernetes.io/projected/28af66d8-2264-4581-b51a-dfab3e4a7456-kube-api-access-qff6t\") pod \"keystone-cron-29403961-2r79p\" (UID: \"28af66d8-2264-4581-b51a-dfab3e4a7456\") " pod="openstack/keystone-cron-29403961-2r79p" Nov 27 10:01:00 crc kubenswrapper[4971]: I1127 10:01:00.280912 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28af66d8-2264-4581-b51a-dfab3e4a7456-config-data\") pod \"keystone-cron-29403961-2r79p\" (UID: \"28af66d8-2264-4581-b51a-dfab3e4a7456\") " pod="openstack/keystone-cron-29403961-2r79p" Nov 27 10:01:00 crc kubenswrapper[4971]: I1127 10:01:00.280953 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/28af66d8-2264-4581-b51a-dfab3e4a7456-fernet-keys\") pod \"keystone-cron-29403961-2r79p\" (UID: \"28af66d8-2264-4581-b51a-dfab3e4a7456\") " pod="openstack/keystone-cron-29403961-2r79p" Nov 27 10:01:00 crc kubenswrapper[4971]: I1127 10:01:00.281004 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28af66d8-2264-4581-b51a-dfab3e4a7456-combined-ca-bundle\") pod \"keystone-cron-29403961-2r79p\" (UID: \"28af66d8-2264-4581-b51a-dfab3e4a7456\") " pod="openstack/keystone-cron-29403961-2r79p" Nov 27 10:01:00 crc kubenswrapper[4971]: I1127 10:01:00.287672 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28af66d8-2264-4581-b51a-dfab3e4a7456-combined-ca-bundle\") pod \"keystone-cron-29403961-2r79p\" (UID: \"28af66d8-2264-4581-b51a-dfab3e4a7456\") " pod="openstack/keystone-cron-29403961-2r79p" Nov 27 10:01:00 crc kubenswrapper[4971]: I1127 10:01:00.288440 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/28af66d8-2264-4581-b51a-dfab3e4a7456-fernet-keys\") pod \"keystone-cron-29403961-2r79p\" (UID: \"28af66d8-2264-4581-b51a-dfab3e4a7456\") " pod="openstack/keystone-cron-29403961-2r79p" Nov 27 10:01:00 crc kubenswrapper[4971]: I1127 10:01:00.289355 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28af66d8-2264-4581-b51a-dfab3e4a7456-config-data\") pod \"keystone-cron-29403961-2r79p\" (UID: \"28af66d8-2264-4581-b51a-dfab3e4a7456\") " pod="openstack/keystone-cron-29403961-2r79p" Nov 27 10:01:00 crc kubenswrapper[4971]: I1127 10:01:00.296993 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qff6t\" (UniqueName: \"kubernetes.io/projected/28af66d8-2264-4581-b51a-dfab3e4a7456-kube-api-access-qff6t\") pod \"keystone-cron-29403961-2r79p\" (UID: \"28af66d8-2264-4581-b51a-dfab3e4a7456\") " pod="openstack/keystone-cron-29403961-2r79p" Nov 27 10:01:00 crc kubenswrapper[4971]: I1127 10:01:00.483006 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29403961-2r79p" Nov 27 10:01:01 crc kubenswrapper[4971]: W1127 10:01:01.030040 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod28af66d8_2264_4581_b51a_dfab3e4a7456.slice/crio-571d8c1343730e3c3c8872d57f1db0ace3984b69b6f1d3a05a83085e879b43af WatchSource:0}: Error finding container 571d8c1343730e3c3c8872d57f1db0ace3984b69b6f1d3a05a83085e879b43af: Status 404 returned error can't find the container with id 571d8c1343730e3c3c8872d57f1db0ace3984b69b6f1d3a05a83085e879b43af Nov 27 10:01:01 crc kubenswrapper[4971]: I1127 10:01:01.030378 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29403961-2r79p"] Nov 27 10:01:01 crc kubenswrapper[4971]: I1127 10:01:01.717877 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29403961-2r79p" event={"ID":"28af66d8-2264-4581-b51a-dfab3e4a7456","Type":"ContainerStarted","Data":"36f14d09e5a17d86355bb06592bcceed1c8038b0c297ad6112f68b3d5b2712da"} Nov 27 10:01:01 crc kubenswrapper[4971]: I1127 10:01:01.718611 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29403961-2r79p" event={"ID":"28af66d8-2264-4581-b51a-dfab3e4a7456","Type":"ContainerStarted","Data":"571d8c1343730e3c3c8872d57f1db0ace3984b69b6f1d3a05a83085e879b43af"} Nov 27 10:01:01 crc kubenswrapper[4971]: I1127 10:01:01.749228 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29403961-2r79p" podStartSLOduration=1.74920548 podStartE2EDuration="1.74920548s" podCreationTimestamp="2025-11-27 10:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 10:01:01.741076998 +0000 UTC m=+11299.933120956" watchObservedRunningTime="2025-11-27 10:01:01.74920548 +0000 UTC m=+11299.941249408" Nov 27 10:01:01 crc kubenswrapper[4971]: I1127 10:01:01.965798 4971 scope.go:117] "RemoveContainer" containerID="dbcb1396dba14457ae596606fcf861328a602f0d6d7c409228b681b96b433242" Nov 27 10:01:06 crc kubenswrapper[4971]: I1127 10:01:06.550651 4971 scope.go:117] "RemoveContainer" containerID="32a90286264c9bed67c869ab67cc4bfe55ac520174eb5ee9f1f031fb4d0905da" Nov 27 10:01:06 crc kubenswrapper[4971]: E1127 10:01:06.551481 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:01:07 crc kubenswrapper[4971]: I1127 10:01:07.563793 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Nov 27 10:01:07 crc kubenswrapper[4971]: I1127 10:01:07.802741 4971 generic.go:334] "Generic (PLEG): container finished" podID="1cb306c1-aef2-4d15-aa78-621eec32f349" containerID="a38f26d1646ccaf01ad1a732421ece4c5e9143f79ad5ff312cf0943df7466d0c" exitCode=0 Nov 27 10:01:07 crc kubenswrapper[4971]: I1127 10:01:07.802809 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ndghw" event={"ID":"1cb306c1-aef2-4d15-aa78-621eec32f349","Type":"ContainerDied","Data":"a38f26d1646ccaf01ad1a732421ece4c5e9143f79ad5ff312cf0943df7466d0c"} Nov 27 10:01:08 crc kubenswrapper[4971]: I1127 10:01:08.426984 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-wpkdl" Nov 27 10:01:08 crc kubenswrapper[4971]: I1127 10:01:08.427037 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-wpkdl" Nov 27 10:01:08 crc kubenswrapper[4971]: I1127 10:01:08.820085 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ndghw" event={"ID":"1cb306c1-aef2-4d15-aa78-621eec32f349","Type":"ContainerStarted","Data":"7f7f7ff94be7af2246c5ef6a6f1a58d61d4f9e9755017f84073e53a6c742f7c9"} Nov 27 10:01:08 crc kubenswrapper[4971]: I1127 10:01:08.842362 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-ndghw" podStartSLOduration=15.865124985 podStartE2EDuration="31.842341239s" podCreationTimestamp="2025-11-27 10:00:37 +0000 UTC" firstStartedPulling="2025-11-27 10:00:52.551022684 +0000 UTC m=+11290.743066642" lastFinishedPulling="2025-11-27 10:01:08.528238978 +0000 UTC m=+11306.720282896" observedRunningTime="2025-11-27 10:01:08.837276185 +0000 UTC m=+11307.029320113" watchObservedRunningTime="2025-11-27 10:01:08.842341239 +0000 UTC m=+11307.034385157" Nov 27 10:01:09 crc kubenswrapper[4971]: I1127 10:01:09.485512 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-wpkdl" podUID="62e99d9e-1c29-4b27-80fa-e1d9faec4141" containerName="registry-server" probeResult="failure" output=< Nov 27 10:01:09 crc kubenswrapper[4971]: timeout: failed to connect service ":50051" within 1s Nov 27 10:01:09 crc kubenswrapper[4971]: > Nov 27 10:01:09 crc kubenswrapper[4971]: I1127 10:01:09.833731 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"5dede177-0f43-453e-b75b-e5ac63add3da","Type":"ContainerStarted","Data":"2bfccb3cd6682978b90262d2c56142155694719a5b975a28cbbc44b8e11dff2a"} Nov 27 10:01:09 crc kubenswrapper[4971]: I1127 10:01:09.852838 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=4.567001453 podStartE2EDuration="1m11.852803149s" podCreationTimestamp="2025-11-27 09:59:58 +0000 UTC" firstStartedPulling="2025-11-27 10:00:00.274911439 +0000 UTC m=+11238.466955347" lastFinishedPulling="2025-11-27 10:01:07.560713115 +0000 UTC m=+11305.752757043" observedRunningTime="2025-11-27 10:01:09.850497303 +0000 UTC m=+11308.042541221" watchObservedRunningTime="2025-11-27 10:01:09.852803149 +0000 UTC m=+11308.044847067" Nov 27 10:01:10 crc kubenswrapper[4971]: I1127 10:01:10.850843 4971 generic.go:334] "Generic (PLEG): container finished" podID="28af66d8-2264-4581-b51a-dfab3e4a7456" containerID="36f14d09e5a17d86355bb06592bcceed1c8038b0c297ad6112f68b3d5b2712da" exitCode=0 Nov 27 10:01:10 crc kubenswrapper[4971]: I1127 10:01:10.850897 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29403961-2r79p" event={"ID":"28af66d8-2264-4581-b51a-dfab3e4a7456","Type":"ContainerDied","Data":"36f14d09e5a17d86355bb06592bcceed1c8038b0c297ad6112f68b3d5b2712da"} Nov 27 10:01:12 crc kubenswrapper[4971]: I1127 10:01:12.283972 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29403961-2r79p" Nov 27 10:01:12 crc kubenswrapper[4971]: I1127 10:01:12.461223 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28af66d8-2264-4581-b51a-dfab3e4a7456-config-data\") pod \"28af66d8-2264-4581-b51a-dfab3e4a7456\" (UID: \"28af66d8-2264-4581-b51a-dfab3e4a7456\") " Nov 27 10:01:12 crc kubenswrapper[4971]: I1127 10:01:12.461269 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qff6t\" (UniqueName: \"kubernetes.io/projected/28af66d8-2264-4581-b51a-dfab3e4a7456-kube-api-access-qff6t\") pod \"28af66d8-2264-4581-b51a-dfab3e4a7456\" (UID: \"28af66d8-2264-4581-b51a-dfab3e4a7456\") " Nov 27 10:01:12 crc kubenswrapper[4971]: I1127 10:01:12.461340 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28af66d8-2264-4581-b51a-dfab3e4a7456-combined-ca-bundle\") pod \"28af66d8-2264-4581-b51a-dfab3e4a7456\" (UID: \"28af66d8-2264-4581-b51a-dfab3e4a7456\") " Nov 27 10:01:12 crc kubenswrapper[4971]: I1127 10:01:12.461566 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/28af66d8-2264-4581-b51a-dfab3e4a7456-fernet-keys\") pod \"28af66d8-2264-4581-b51a-dfab3e4a7456\" (UID: \"28af66d8-2264-4581-b51a-dfab3e4a7456\") " Nov 27 10:01:12 crc kubenswrapper[4971]: I1127 10:01:12.478668 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28af66d8-2264-4581-b51a-dfab3e4a7456-kube-api-access-qff6t" (OuterVolumeSpecName: "kube-api-access-qff6t") pod "28af66d8-2264-4581-b51a-dfab3e4a7456" (UID: "28af66d8-2264-4581-b51a-dfab3e4a7456"). InnerVolumeSpecName "kube-api-access-qff6t". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 10:01:12 crc kubenswrapper[4971]: I1127 10:01:12.486359 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28af66d8-2264-4581-b51a-dfab3e4a7456-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "28af66d8-2264-4581-b51a-dfab3e4a7456" (UID: "28af66d8-2264-4581-b51a-dfab3e4a7456"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 10:01:12 crc kubenswrapper[4971]: I1127 10:01:12.505278 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28af66d8-2264-4581-b51a-dfab3e4a7456-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "28af66d8-2264-4581-b51a-dfab3e4a7456" (UID: "28af66d8-2264-4581-b51a-dfab3e4a7456"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 10:01:12 crc kubenswrapper[4971]: I1127 10:01:12.539651 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28af66d8-2264-4581-b51a-dfab3e4a7456-config-data" (OuterVolumeSpecName: "config-data") pod "28af66d8-2264-4581-b51a-dfab3e4a7456" (UID: "28af66d8-2264-4581-b51a-dfab3e4a7456"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 10:01:12 crc kubenswrapper[4971]: I1127 10:01:12.564340 4971 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/28af66d8-2264-4581-b51a-dfab3e4a7456-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 27 10:01:12 crc kubenswrapper[4971]: I1127 10:01:12.564377 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28af66d8-2264-4581-b51a-dfab3e4a7456-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 10:01:12 crc kubenswrapper[4971]: I1127 10:01:12.564392 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qff6t\" (UniqueName: \"kubernetes.io/projected/28af66d8-2264-4581-b51a-dfab3e4a7456-kube-api-access-qff6t\") on node \"crc\" DevicePath \"\"" Nov 27 10:01:12 crc kubenswrapper[4971]: I1127 10:01:12.564403 4971 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28af66d8-2264-4581-b51a-dfab3e4a7456-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 27 10:01:12 crc kubenswrapper[4971]: I1127 10:01:12.876712 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29403961-2r79p" event={"ID":"28af66d8-2264-4581-b51a-dfab3e4a7456","Type":"ContainerDied","Data":"571d8c1343730e3c3c8872d57f1db0ace3984b69b6f1d3a05a83085e879b43af"} Nov 27 10:01:12 crc kubenswrapper[4971]: I1127 10:01:12.876767 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="571d8c1343730e3c3c8872d57f1db0ace3984b69b6f1d3a05a83085e879b43af" Nov 27 10:01:12 crc kubenswrapper[4971]: I1127 10:01:12.876781 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29403961-2r79p" Nov 27 10:01:18 crc kubenswrapper[4971]: I1127 10:01:18.190114 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-ndghw" Nov 27 10:01:18 crc kubenswrapper[4971]: I1127 10:01:18.191117 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-ndghw" Nov 27 10:01:18 crc kubenswrapper[4971]: I1127 10:01:18.245142 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-ndghw" Nov 27 10:01:18 crc kubenswrapper[4971]: I1127 10:01:18.499305 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-wpkdl" Nov 27 10:01:18 crc kubenswrapper[4971]: I1127 10:01:18.550420 4971 scope.go:117] "RemoveContainer" containerID="32a90286264c9bed67c869ab67cc4bfe55ac520174eb5ee9f1f031fb4d0905da" Nov 27 10:01:18 crc kubenswrapper[4971]: E1127 10:01:18.551117 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:01:18 crc kubenswrapper[4971]: I1127 10:01:18.565402 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-wpkdl" Nov 27 10:01:18 crc kubenswrapper[4971]: I1127 10:01:18.993306 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-ndghw" Nov 27 10:01:19 crc kubenswrapper[4971]: I1127 10:01:19.894673 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wpkdl"] Nov 27 10:01:19 crc kubenswrapper[4971]: I1127 10:01:19.951819 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-wpkdl" podUID="62e99d9e-1c29-4b27-80fa-e1d9faec4141" containerName="registry-server" containerID="cri-o://1585bf4714eb73c6debb07af11b5ae61d70b595a9e11f072d646e91337736140" gracePeriod=2 Nov 27 10:01:20 crc kubenswrapper[4971]: I1127 10:01:20.474552 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wpkdl" Nov 27 10:01:20 crc kubenswrapper[4971]: I1127 10:01:20.637476 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62e99d9e-1c29-4b27-80fa-e1d9faec4141-catalog-content\") pod \"62e99d9e-1c29-4b27-80fa-e1d9faec4141\" (UID: \"62e99d9e-1c29-4b27-80fa-e1d9faec4141\") " Nov 27 10:01:20 crc kubenswrapper[4971]: I1127 10:01:20.637639 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62e99d9e-1c29-4b27-80fa-e1d9faec4141-utilities\") pod \"62e99d9e-1c29-4b27-80fa-e1d9faec4141\" (UID: \"62e99d9e-1c29-4b27-80fa-e1d9faec4141\") " Nov 27 10:01:20 crc kubenswrapper[4971]: I1127 10:01:20.637734 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wmvnl\" (UniqueName: \"kubernetes.io/projected/62e99d9e-1c29-4b27-80fa-e1d9faec4141-kube-api-access-wmvnl\") pod \"62e99d9e-1c29-4b27-80fa-e1d9faec4141\" (UID: \"62e99d9e-1c29-4b27-80fa-e1d9faec4141\") " Nov 27 10:01:20 crc kubenswrapper[4971]: I1127 10:01:20.638695 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62e99d9e-1c29-4b27-80fa-e1d9faec4141-utilities" (OuterVolumeSpecName: "utilities") pod "62e99d9e-1c29-4b27-80fa-e1d9faec4141" (UID: "62e99d9e-1c29-4b27-80fa-e1d9faec4141"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 10:01:20 crc kubenswrapper[4971]: I1127 10:01:20.646299 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62e99d9e-1c29-4b27-80fa-e1d9faec4141-kube-api-access-wmvnl" (OuterVolumeSpecName: "kube-api-access-wmvnl") pod "62e99d9e-1c29-4b27-80fa-e1d9faec4141" (UID: "62e99d9e-1c29-4b27-80fa-e1d9faec4141"). InnerVolumeSpecName "kube-api-access-wmvnl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 10:01:20 crc kubenswrapper[4971]: I1127 10:01:20.687170 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62e99d9e-1c29-4b27-80fa-e1d9faec4141-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "62e99d9e-1c29-4b27-80fa-e1d9faec4141" (UID: "62e99d9e-1c29-4b27-80fa-e1d9faec4141"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 10:01:20 crc kubenswrapper[4971]: I1127 10:01:20.740898 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62e99d9e-1c29-4b27-80fa-e1d9faec4141-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 10:01:20 crc kubenswrapper[4971]: I1127 10:01:20.740948 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62e99d9e-1c29-4b27-80fa-e1d9faec4141-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 10:01:20 crc kubenswrapper[4971]: I1127 10:01:20.740961 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wmvnl\" (UniqueName: \"kubernetes.io/projected/62e99d9e-1c29-4b27-80fa-e1d9faec4141-kube-api-access-wmvnl\") on node \"crc\" DevicePath \"\"" Nov 27 10:01:20 crc kubenswrapper[4971]: I1127 10:01:20.966852 4971 generic.go:334] "Generic (PLEG): container finished" podID="62e99d9e-1c29-4b27-80fa-e1d9faec4141" containerID="1585bf4714eb73c6debb07af11b5ae61d70b595a9e11f072d646e91337736140" exitCode=0 Nov 27 10:01:20 crc kubenswrapper[4971]: I1127 10:01:20.966933 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wpkdl" event={"ID":"62e99d9e-1c29-4b27-80fa-e1d9faec4141","Type":"ContainerDied","Data":"1585bf4714eb73c6debb07af11b5ae61d70b595a9e11f072d646e91337736140"} Nov 27 10:01:20 crc kubenswrapper[4971]: I1127 10:01:20.967021 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wpkdl" event={"ID":"62e99d9e-1c29-4b27-80fa-e1d9faec4141","Type":"ContainerDied","Data":"20a378f3faae8662956f2a6befee34b19603629e3d580706055956020d1ba9cc"} Nov 27 10:01:20 crc kubenswrapper[4971]: I1127 10:01:20.967055 4971 scope.go:117] "RemoveContainer" containerID="1585bf4714eb73c6debb07af11b5ae61d70b595a9e11f072d646e91337736140" Nov 27 10:01:20 crc kubenswrapper[4971]: I1127 10:01:20.967069 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wpkdl" Nov 27 10:01:21 crc kubenswrapper[4971]: I1127 10:01:21.004448 4971 scope.go:117] "RemoveContainer" containerID="e377d241133a0c5b8bbb860c1c04688e655970d8a2cbc508d2c271f8be04aec9" Nov 27 10:01:21 crc kubenswrapper[4971]: I1127 10:01:21.014973 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wpkdl"] Nov 27 10:01:21 crc kubenswrapper[4971]: I1127 10:01:21.028937 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-wpkdl"] Nov 27 10:01:21 crc kubenswrapper[4971]: I1127 10:01:21.037942 4971 scope.go:117] "RemoveContainer" containerID="efc37c1b0f770bf52697ae5a310cb473d82cf007f0e484ad40fdf9b123ea16c2" Nov 27 10:01:21 crc kubenswrapper[4971]: I1127 10:01:21.096408 4971 scope.go:117] "RemoveContainer" containerID="1585bf4714eb73c6debb07af11b5ae61d70b595a9e11f072d646e91337736140" Nov 27 10:01:21 crc kubenswrapper[4971]: E1127 10:01:21.096937 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1585bf4714eb73c6debb07af11b5ae61d70b595a9e11f072d646e91337736140\": container with ID starting with 1585bf4714eb73c6debb07af11b5ae61d70b595a9e11f072d646e91337736140 not found: ID does not exist" containerID="1585bf4714eb73c6debb07af11b5ae61d70b595a9e11f072d646e91337736140" Nov 27 10:01:21 crc kubenswrapper[4971]: I1127 10:01:21.096986 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1585bf4714eb73c6debb07af11b5ae61d70b595a9e11f072d646e91337736140"} err="failed to get container status \"1585bf4714eb73c6debb07af11b5ae61d70b595a9e11f072d646e91337736140\": rpc error: code = NotFound desc = could not find container \"1585bf4714eb73c6debb07af11b5ae61d70b595a9e11f072d646e91337736140\": container with ID starting with 1585bf4714eb73c6debb07af11b5ae61d70b595a9e11f072d646e91337736140 not found: ID does not exist" Nov 27 10:01:21 crc kubenswrapper[4971]: I1127 10:01:21.097008 4971 scope.go:117] "RemoveContainer" containerID="e377d241133a0c5b8bbb860c1c04688e655970d8a2cbc508d2c271f8be04aec9" Nov 27 10:01:21 crc kubenswrapper[4971]: E1127 10:01:21.097388 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e377d241133a0c5b8bbb860c1c04688e655970d8a2cbc508d2c271f8be04aec9\": container with ID starting with e377d241133a0c5b8bbb860c1c04688e655970d8a2cbc508d2c271f8be04aec9 not found: ID does not exist" containerID="e377d241133a0c5b8bbb860c1c04688e655970d8a2cbc508d2c271f8be04aec9" Nov 27 10:01:21 crc kubenswrapper[4971]: I1127 10:01:21.097410 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e377d241133a0c5b8bbb860c1c04688e655970d8a2cbc508d2c271f8be04aec9"} err="failed to get container status \"e377d241133a0c5b8bbb860c1c04688e655970d8a2cbc508d2c271f8be04aec9\": rpc error: code = NotFound desc = could not find container \"e377d241133a0c5b8bbb860c1c04688e655970d8a2cbc508d2c271f8be04aec9\": container with ID starting with e377d241133a0c5b8bbb860c1c04688e655970d8a2cbc508d2c271f8be04aec9 not found: ID does not exist" Nov 27 10:01:21 crc kubenswrapper[4971]: I1127 10:01:21.097443 4971 scope.go:117] "RemoveContainer" containerID="efc37c1b0f770bf52697ae5a310cb473d82cf007f0e484ad40fdf9b123ea16c2" Nov 27 10:01:21 crc kubenswrapper[4971]: E1127 10:01:21.097780 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"efc37c1b0f770bf52697ae5a310cb473d82cf007f0e484ad40fdf9b123ea16c2\": container with ID starting with efc37c1b0f770bf52697ae5a310cb473d82cf007f0e484ad40fdf9b123ea16c2 not found: ID does not exist" containerID="efc37c1b0f770bf52697ae5a310cb473d82cf007f0e484ad40fdf9b123ea16c2" Nov 27 10:01:21 crc kubenswrapper[4971]: I1127 10:01:21.097824 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"efc37c1b0f770bf52697ae5a310cb473d82cf007f0e484ad40fdf9b123ea16c2"} err="failed to get container status \"efc37c1b0f770bf52697ae5a310cb473d82cf007f0e484ad40fdf9b123ea16c2\": rpc error: code = NotFound desc = could not find container \"efc37c1b0f770bf52697ae5a310cb473d82cf007f0e484ad40fdf9b123ea16c2\": container with ID starting with efc37c1b0f770bf52697ae5a310cb473d82cf007f0e484ad40fdf9b123ea16c2 not found: ID does not exist" Nov 27 10:01:21 crc kubenswrapper[4971]: I1127 10:01:21.279232 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ndghw"] Nov 27 10:01:21 crc kubenswrapper[4971]: I1127 10:01:21.279474 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-ndghw" podUID="1cb306c1-aef2-4d15-aa78-621eec32f349" containerName="registry-server" containerID="cri-o://7f7f7ff94be7af2246c5ef6a6f1a58d61d4f9e9755017f84073e53a6c742f7c9" gracePeriod=2 Nov 27 10:01:21 crc kubenswrapper[4971]: I1127 10:01:21.848605 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ndghw" Nov 27 10:01:21 crc kubenswrapper[4971]: I1127 10:01:21.979804 4971 generic.go:334] "Generic (PLEG): container finished" podID="1cb306c1-aef2-4d15-aa78-621eec32f349" containerID="7f7f7ff94be7af2246c5ef6a6f1a58d61d4f9e9755017f84073e53a6c742f7c9" exitCode=0 Nov 27 10:01:21 crc kubenswrapper[4971]: I1127 10:01:21.979848 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ndghw" event={"ID":"1cb306c1-aef2-4d15-aa78-621eec32f349","Type":"ContainerDied","Data":"7f7f7ff94be7af2246c5ef6a6f1a58d61d4f9e9755017f84073e53a6c742f7c9"} Nov 27 10:01:21 crc kubenswrapper[4971]: I1127 10:01:21.979863 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ndghw" Nov 27 10:01:21 crc kubenswrapper[4971]: I1127 10:01:21.979885 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ndghw" event={"ID":"1cb306c1-aef2-4d15-aa78-621eec32f349","Type":"ContainerDied","Data":"ad3e0e4145b48aa909b76776c585013b84a485434592f32ad25ed88c9a8c3c19"} Nov 27 10:01:21 crc kubenswrapper[4971]: I1127 10:01:21.979933 4971 scope.go:117] "RemoveContainer" containerID="7f7f7ff94be7af2246c5ef6a6f1a58d61d4f9e9755017f84073e53a6c742f7c9" Nov 27 10:01:21 crc kubenswrapper[4971]: I1127 10:01:21.997066 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xpr9d\" (UniqueName: \"kubernetes.io/projected/1cb306c1-aef2-4d15-aa78-621eec32f349-kube-api-access-xpr9d\") pod \"1cb306c1-aef2-4d15-aa78-621eec32f349\" (UID: \"1cb306c1-aef2-4d15-aa78-621eec32f349\") " Nov 27 10:01:21 crc kubenswrapper[4971]: I1127 10:01:21.997224 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1cb306c1-aef2-4d15-aa78-621eec32f349-utilities\") pod \"1cb306c1-aef2-4d15-aa78-621eec32f349\" (UID: \"1cb306c1-aef2-4d15-aa78-621eec32f349\") " Nov 27 10:01:21 crc kubenswrapper[4971]: I1127 10:01:21.997333 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1cb306c1-aef2-4d15-aa78-621eec32f349-catalog-content\") pod \"1cb306c1-aef2-4d15-aa78-621eec32f349\" (UID: \"1cb306c1-aef2-4d15-aa78-621eec32f349\") " Nov 27 10:01:21 crc kubenswrapper[4971]: I1127 10:01:21.998103 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1cb306c1-aef2-4d15-aa78-621eec32f349-utilities" (OuterVolumeSpecName: "utilities") pod "1cb306c1-aef2-4d15-aa78-621eec32f349" (UID: "1cb306c1-aef2-4d15-aa78-621eec32f349"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 10:01:22 crc kubenswrapper[4971]: I1127 10:01:22.003564 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1cb306c1-aef2-4d15-aa78-621eec32f349-kube-api-access-xpr9d" (OuterVolumeSpecName: "kube-api-access-xpr9d") pod "1cb306c1-aef2-4d15-aa78-621eec32f349" (UID: "1cb306c1-aef2-4d15-aa78-621eec32f349"). InnerVolumeSpecName "kube-api-access-xpr9d". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 10:01:22 crc kubenswrapper[4971]: I1127 10:01:22.022703 4971 scope.go:117] "RemoveContainer" containerID="a38f26d1646ccaf01ad1a732421ece4c5e9143f79ad5ff312cf0943df7466d0c" Nov 27 10:01:22 crc kubenswrapper[4971]: I1127 10:01:22.071733 4971 scope.go:117] "RemoveContainer" containerID="462dcf397274c905aa0790384f3a2924e870dba5912ab99c83225e0d99aa6dd2" Nov 27 10:01:22 crc kubenswrapper[4971]: I1127 10:01:22.099518 4971 scope.go:117] "RemoveContainer" containerID="7f7f7ff94be7af2246c5ef6a6f1a58d61d4f9e9755017f84073e53a6c742f7c9" Nov 27 10:01:22 crc kubenswrapper[4971]: E1127 10:01:22.100096 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f7f7ff94be7af2246c5ef6a6f1a58d61d4f9e9755017f84073e53a6c742f7c9\": container with ID starting with 7f7f7ff94be7af2246c5ef6a6f1a58d61d4f9e9755017f84073e53a6c742f7c9 not found: ID does not exist" containerID="7f7f7ff94be7af2246c5ef6a6f1a58d61d4f9e9755017f84073e53a6c742f7c9" Nov 27 10:01:22 crc kubenswrapper[4971]: I1127 10:01:22.100130 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f7f7ff94be7af2246c5ef6a6f1a58d61d4f9e9755017f84073e53a6c742f7c9"} err="failed to get container status \"7f7f7ff94be7af2246c5ef6a6f1a58d61d4f9e9755017f84073e53a6c742f7c9\": rpc error: code = NotFound desc = could not find container \"7f7f7ff94be7af2246c5ef6a6f1a58d61d4f9e9755017f84073e53a6c742f7c9\": container with ID starting with 7f7f7ff94be7af2246c5ef6a6f1a58d61d4f9e9755017f84073e53a6c742f7c9 not found: ID does not exist" Nov 27 10:01:22 crc kubenswrapper[4971]: I1127 10:01:22.100156 4971 scope.go:117] "RemoveContainer" containerID="a38f26d1646ccaf01ad1a732421ece4c5e9143f79ad5ff312cf0943df7466d0c" Nov 27 10:01:22 crc kubenswrapper[4971]: E1127 10:01:22.100385 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a38f26d1646ccaf01ad1a732421ece4c5e9143f79ad5ff312cf0943df7466d0c\": container with ID starting with a38f26d1646ccaf01ad1a732421ece4c5e9143f79ad5ff312cf0943df7466d0c not found: ID does not exist" containerID="a38f26d1646ccaf01ad1a732421ece4c5e9143f79ad5ff312cf0943df7466d0c" Nov 27 10:01:22 crc kubenswrapper[4971]: I1127 10:01:22.100413 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a38f26d1646ccaf01ad1a732421ece4c5e9143f79ad5ff312cf0943df7466d0c"} err="failed to get container status \"a38f26d1646ccaf01ad1a732421ece4c5e9143f79ad5ff312cf0943df7466d0c\": rpc error: code = NotFound desc = could not find container \"a38f26d1646ccaf01ad1a732421ece4c5e9143f79ad5ff312cf0943df7466d0c\": container with ID starting with a38f26d1646ccaf01ad1a732421ece4c5e9143f79ad5ff312cf0943df7466d0c not found: ID does not exist" Nov 27 10:01:22 crc kubenswrapper[4971]: I1127 10:01:22.100428 4971 scope.go:117] "RemoveContainer" containerID="462dcf397274c905aa0790384f3a2924e870dba5912ab99c83225e0d99aa6dd2" Nov 27 10:01:22 crc kubenswrapper[4971]: E1127 10:01:22.100716 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"462dcf397274c905aa0790384f3a2924e870dba5912ab99c83225e0d99aa6dd2\": container with ID starting with 462dcf397274c905aa0790384f3a2924e870dba5912ab99c83225e0d99aa6dd2 not found: ID does not exist" containerID="462dcf397274c905aa0790384f3a2924e870dba5912ab99c83225e0d99aa6dd2" Nov 27 10:01:22 crc kubenswrapper[4971]: I1127 10:01:22.100743 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"462dcf397274c905aa0790384f3a2924e870dba5912ab99c83225e0d99aa6dd2"} err="failed to get container status \"462dcf397274c905aa0790384f3a2924e870dba5912ab99c83225e0d99aa6dd2\": rpc error: code = NotFound desc = could not find container \"462dcf397274c905aa0790384f3a2924e870dba5912ab99c83225e0d99aa6dd2\": container with ID starting with 462dcf397274c905aa0790384f3a2924e870dba5912ab99c83225e0d99aa6dd2 not found: ID does not exist" Nov 27 10:01:22 crc kubenswrapper[4971]: I1127 10:01:22.107137 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xpr9d\" (UniqueName: \"kubernetes.io/projected/1cb306c1-aef2-4d15-aa78-621eec32f349-kube-api-access-xpr9d\") on node \"crc\" DevicePath \"\"" Nov 27 10:01:22 crc kubenswrapper[4971]: I1127 10:01:22.107240 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1cb306c1-aef2-4d15-aa78-621eec32f349-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 10:01:22 crc kubenswrapper[4971]: I1127 10:01:22.118994 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1cb306c1-aef2-4d15-aa78-621eec32f349-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1cb306c1-aef2-4d15-aa78-621eec32f349" (UID: "1cb306c1-aef2-4d15-aa78-621eec32f349"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 10:01:22 crc kubenswrapper[4971]: I1127 10:01:22.209493 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1cb306c1-aef2-4d15-aa78-621eec32f349-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 10:01:22 crc kubenswrapper[4971]: I1127 10:01:22.325643 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ndghw"] Nov 27 10:01:22 crc kubenswrapper[4971]: I1127 10:01:22.341880 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-ndghw"] Nov 27 10:01:22 crc kubenswrapper[4971]: I1127 10:01:22.581801 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1cb306c1-aef2-4d15-aa78-621eec32f349" path="/var/lib/kubelet/pods/1cb306c1-aef2-4d15-aa78-621eec32f349/volumes" Nov 27 10:01:22 crc kubenswrapper[4971]: I1127 10:01:22.583877 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62e99d9e-1c29-4b27-80fa-e1d9faec4141" path="/var/lib/kubelet/pods/62e99d9e-1c29-4b27-80fa-e1d9faec4141/volumes" Nov 27 10:01:26 crc kubenswrapper[4971]: I1127 10:01:26.127581 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-jsz55"] Nov 27 10:01:26 crc kubenswrapper[4971]: E1127 10:01:26.128787 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28af66d8-2264-4581-b51a-dfab3e4a7456" containerName="keystone-cron" Nov 27 10:01:26 crc kubenswrapper[4971]: I1127 10:01:26.128807 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="28af66d8-2264-4581-b51a-dfab3e4a7456" containerName="keystone-cron" Nov 27 10:01:26 crc kubenswrapper[4971]: E1127 10:01:26.128838 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62e99d9e-1c29-4b27-80fa-e1d9faec4141" containerName="extract-utilities" Nov 27 10:01:26 crc kubenswrapper[4971]: I1127 10:01:26.128846 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="62e99d9e-1c29-4b27-80fa-e1d9faec4141" containerName="extract-utilities" Nov 27 10:01:26 crc kubenswrapper[4971]: E1127 10:01:26.128866 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cb306c1-aef2-4d15-aa78-621eec32f349" containerName="registry-server" Nov 27 10:01:26 crc kubenswrapper[4971]: I1127 10:01:26.128875 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cb306c1-aef2-4d15-aa78-621eec32f349" containerName="registry-server" Nov 27 10:01:26 crc kubenswrapper[4971]: E1127 10:01:26.128885 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62e99d9e-1c29-4b27-80fa-e1d9faec4141" containerName="extract-content" Nov 27 10:01:26 crc kubenswrapper[4971]: I1127 10:01:26.128893 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="62e99d9e-1c29-4b27-80fa-e1d9faec4141" containerName="extract-content" Nov 27 10:01:26 crc kubenswrapper[4971]: E1127 10:01:26.128908 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cb306c1-aef2-4d15-aa78-621eec32f349" containerName="extract-content" Nov 27 10:01:26 crc kubenswrapper[4971]: I1127 10:01:26.128916 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cb306c1-aef2-4d15-aa78-621eec32f349" containerName="extract-content" Nov 27 10:01:26 crc kubenswrapper[4971]: E1127 10:01:26.128932 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62e99d9e-1c29-4b27-80fa-e1d9faec4141" containerName="registry-server" Nov 27 10:01:26 crc kubenswrapper[4971]: I1127 10:01:26.128939 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="62e99d9e-1c29-4b27-80fa-e1d9faec4141" containerName="registry-server" Nov 27 10:01:26 crc kubenswrapper[4971]: E1127 10:01:26.128967 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cb306c1-aef2-4d15-aa78-621eec32f349" containerName="extract-utilities" Nov 27 10:01:26 crc kubenswrapper[4971]: I1127 10:01:26.128976 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cb306c1-aef2-4d15-aa78-621eec32f349" containerName="extract-utilities" Nov 27 10:01:26 crc kubenswrapper[4971]: I1127 10:01:26.129258 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="62e99d9e-1c29-4b27-80fa-e1d9faec4141" containerName="registry-server" Nov 27 10:01:26 crc kubenswrapper[4971]: I1127 10:01:26.129275 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="28af66d8-2264-4581-b51a-dfab3e4a7456" containerName="keystone-cron" Nov 27 10:01:26 crc kubenswrapper[4971]: I1127 10:01:26.129308 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cb306c1-aef2-4d15-aa78-621eec32f349" containerName="registry-server" Nov 27 10:01:26 crc kubenswrapper[4971]: I1127 10:01:26.131262 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jsz55" Nov 27 10:01:26 crc kubenswrapper[4971]: I1127 10:01:26.141442 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jsz55"] Nov 27 10:01:26 crc kubenswrapper[4971]: I1127 10:01:26.196476 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99ckv\" (UniqueName: \"kubernetes.io/projected/ed0a008d-d4d3-4c54-8cb6-8df087df4abc-kube-api-access-99ckv\") pod \"redhat-marketplace-jsz55\" (UID: \"ed0a008d-d4d3-4c54-8cb6-8df087df4abc\") " pod="openshift-marketplace/redhat-marketplace-jsz55" Nov 27 10:01:26 crc kubenswrapper[4971]: I1127 10:01:26.197055 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed0a008d-d4d3-4c54-8cb6-8df087df4abc-catalog-content\") pod \"redhat-marketplace-jsz55\" (UID: \"ed0a008d-d4d3-4c54-8cb6-8df087df4abc\") " pod="openshift-marketplace/redhat-marketplace-jsz55" Nov 27 10:01:26 crc kubenswrapper[4971]: I1127 10:01:26.197138 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed0a008d-d4d3-4c54-8cb6-8df087df4abc-utilities\") pod \"redhat-marketplace-jsz55\" (UID: \"ed0a008d-d4d3-4c54-8cb6-8df087df4abc\") " pod="openshift-marketplace/redhat-marketplace-jsz55" Nov 27 10:01:26 crc kubenswrapper[4971]: I1127 10:01:26.298576 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed0a008d-d4d3-4c54-8cb6-8df087df4abc-catalog-content\") pod \"redhat-marketplace-jsz55\" (UID: \"ed0a008d-d4d3-4c54-8cb6-8df087df4abc\") " pod="openshift-marketplace/redhat-marketplace-jsz55" Nov 27 10:01:26 crc kubenswrapper[4971]: I1127 10:01:26.298719 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed0a008d-d4d3-4c54-8cb6-8df087df4abc-utilities\") pod \"redhat-marketplace-jsz55\" (UID: \"ed0a008d-d4d3-4c54-8cb6-8df087df4abc\") " pod="openshift-marketplace/redhat-marketplace-jsz55" Nov 27 10:01:26 crc kubenswrapper[4971]: I1127 10:01:26.298826 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99ckv\" (UniqueName: \"kubernetes.io/projected/ed0a008d-d4d3-4c54-8cb6-8df087df4abc-kube-api-access-99ckv\") pod \"redhat-marketplace-jsz55\" (UID: \"ed0a008d-d4d3-4c54-8cb6-8df087df4abc\") " pod="openshift-marketplace/redhat-marketplace-jsz55" Nov 27 10:01:26 crc kubenswrapper[4971]: I1127 10:01:26.299229 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed0a008d-d4d3-4c54-8cb6-8df087df4abc-catalog-content\") pod \"redhat-marketplace-jsz55\" (UID: \"ed0a008d-d4d3-4c54-8cb6-8df087df4abc\") " pod="openshift-marketplace/redhat-marketplace-jsz55" Nov 27 10:01:26 crc kubenswrapper[4971]: I1127 10:01:26.299589 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed0a008d-d4d3-4c54-8cb6-8df087df4abc-utilities\") pod \"redhat-marketplace-jsz55\" (UID: \"ed0a008d-d4d3-4c54-8cb6-8df087df4abc\") " pod="openshift-marketplace/redhat-marketplace-jsz55" Nov 27 10:01:26 crc kubenswrapper[4971]: I1127 10:01:26.332914 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99ckv\" (UniqueName: \"kubernetes.io/projected/ed0a008d-d4d3-4c54-8cb6-8df087df4abc-kube-api-access-99ckv\") pod \"redhat-marketplace-jsz55\" (UID: \"ed0a008d-d4d3-4c54-8cb6-8df087df4abc\") " pod="openshift-marketplace/redhat-marketplace-jsz55" Nov 27 10:01:26 crc kubenswrapper[4971]: I1127 10:01:26.467374 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jsz55" Nov 27 10:01:27 crc kubenswrapper[4971]: I1127 10:01:27.190565 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jsz55"] Nov 27 10:01:28 crc kubenswrapper[4971]: I1127 10:01:28.075858 4971 generic.go:334] "Generic (PLEG): container finished" podID="ed0a008d-d4d3-4c54-8cb6-8df087df4abc" containerID="7f7fb8f2258a6b847399a371c3a0edda836f15eb9b7de7fa46ddd3f3c211066a" exitCode=0 Nov 27 10:01:28 crc kubenswrapper[4971]: I1127 10:01:28.075928 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jsz55" event={"ID":"ed0a008d-d4d3-4c54-8cb6-8df087df4abc","Type":"ContainerDied","Data":"7f7fb8f2258a6b847399a371c3a0edda836f15eb9b7de7fa46ddd3f3c211066a"} Nov 27 10:01:28 crc kubenswrapper[4971]: I1127 10:01:28.076161 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jsz55" event={"ID":"ed0a008d-d4d3-4c54-8cb6-8df087df4abc","Type":"ContainerStarted","Data":"4a2927c5c38173a07ca67a1f988603eb472f993cb5e8d0e84e218e63efd4990a"} Nov 27 10:01:30 crc kubenswrapper[4971]: I1127 10:01:30.551200 4971 scope.go:117] "RemoveContainer" containerID="32a90286264c9bed67c869ab67cc4bfe55ac520174eb5ee9f1f031fb4d0905da" Nov 27 10:01:30 crc kubenswrapper[4971]: E1127 10:01:30.552302 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:01:31 crc kubenswrapper[4971]: I1127 10:01:31.116150 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jsz55" event={"ID":"ed0a008d-d4d3-4c54-8cb6-8df087df4abc","Type":"ContainerStarted","Data":"bbfab134e794cff7fee2a61586b957d42cb3224f30fc97ba99cc534eefc84bc7"} Nov 27 10:01:33 crc kubenswrapper[4971]: I1127 10:01:33.145874 4971 generic.go:334] "Generic (PLEG): container finished" podID="ed0a008d-d4d3-4c54-8cb6-8df087df4abc" containerID="bbfab134e794cff7fee2a61586b957d42cb3224f30fc97ba99cc534eefc84bc7" exitCode=0 Nov 27 10:01:33 crc kubenswrapper[4971]: I1127 10:01:33.145971 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jsz55" event={"ID":"ed0a008d-d4d3-4c54-8cb6-8df087df4abc","Type":"ContainerDied","Data":"bbfab134e794cff7fee2a61586b957d42cb3224f30fc97ba99cc534eefc84bc7"} Nov 27 10:01:35 crc kubenswrapper[4971]: I1127 10:01:35.168451 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jsz55" event={"ID":"ed0a008d-d4d3-4c54-8cb6-8df087df4abc","Type":"ContainerStarted","Data":"f46570f49004f7757130544589b5d5729dbbc668448e2fd29f830b0c819620b3"} Nov 27 10:01:35 crc kubenswrapper[4971]: I1127 10:01:35.195154 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-jsz55" podStartSLOduration=3.319588282 podStartE2EDuration="9.195129462s" podCreationTimestamp="2025-11-27 10:01:26 +0000 UTC" firstStartedPulling="2025-11-27 10:01:28.077928067 +0000 UTC m=+11326.269971985" lastFinishedPulling="2025-11-27 10:01:33.953469237 +0000 UTC m=+11332.145513165" observedRunningTime="2025-11-27 10:01:35.188553625 +0000 UTC m=+11333.380597553" watchObservedRunningTime="2025-11-27 10:01:35.195129462 +0000 UTC m=+11333.387173390" Nov 27 10:01:36 crc kubenswrapper[4971]: I1127 10:01:36.467583 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-jsz55" Nov 27 10:01:36 crc kubenswrapper[4971]: I1127 10:01:36.467909 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-jsz55" Nov 27 10:01:36 crc kubenswrapper[4971]: I1127 10:01:36.527937 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-jsz55" Nov 27 10:01:45 crc kubenswrapper[4971]: I1127 10:01:45.550660 4971 scope.go:117] "RemoveContainer" containerID="32a90286264c9bed67c869ab67cc4bfe55ac520174eb5ee9f1f031fb4d0905da" Nov 27 10:01:45 crc kubenswrapper[4971]: E1127 10:01:45.551670 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:01:46 crc kubenswrapper[4971]: I1127 10:01:46.537799 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-jsz55" Nov 27 10:01:46 crc kubenswrapper[4971]: I1127 10:01:46.605912 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jsz55"] Nov 27 10:01:47 crc kubenswrapper[4971]: I1127 10:01:47.301993 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-jsz55" podUID="ed0a008d-d4d3-4c54-8cb6-8df087df4abc" containerName="registry-server" containerID="cri-o://f46570f49004f7757130544589b5d5729dbbc668448e2fd29f830b0c819620b3" gracePeriod=2 Nov 27 10:01:48 crc kubenswrapper[4971]: I1127 10:01:48.326246 4971 generic.go:334] "Generic (PLEG): container finished" podID="ed0a008d-d4d3-4c54-8cb6-8df087df4abc" containerID="f46570f49004f7757130544589b5d5729dbbc668448e2fd29f830b0c819620b3" exitCode=0 Nov 27 10:01:48 crc kubenswrapper[4971]: I1127 10:01:48.326524 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jsz55" event={"ID":"ed0a008d-d4d3-4c54-8cb6-8df087df4abc","Type":"ContainerDied","Data":"f46570f49004f7757130544589b5d5729dbbc668448e2fd29f830b0c819620b3"} Nov 27 10:01:48 crc kubenswrapper[4971]: I1127 10:01:48.438652 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jsz55" Nov 27 10:01:48 crc kubenswrapper[4971]: I1127 10:01:48.593862 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-99ckv\" (UniqueName: \"kubernetes.io/projected/ed0a008d-d4d3-4c54-8cb6-8df087df4abc-kube-api-access-99ckv\") pod \"ed0a008d-d4d3-4c54-8cb6-8df087df4abc\" (UID: \"ed0a008d-d4d3-4c54-8cb6-8df087df4abc\") " Nov 27 10:01:48 crc kubenswrapper[4971]: I1127 10:01:48.594081 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed0a008d-d4d3-4c54-8cb6-8df087df4abc-utilities\") pod \"ed0a008d-d4d3-4c54-8cb6-8df087df4abc\" (UID: \"ed0a008d-d4d3-4c54-8cb6-8df087df4abc\") " Nov 27 10:01:48 crc kubenswrapper[4971]: I1127 10:01:48.594189 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed0a008d-d4d3-4c54-8cb6-8df087df4abc-catalog-content\") pod \"ed0a008d-d4d3-4c54-8cb6-8df087df4abc\" (UID: \"ed0a008d-d4d3-4c54-8cb6-8df087df4abc\") " Nov 27 10:01:48 crc kubenswrapper[4971]: I1127 10:01:48.595523 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed0a008d-d4d3-4c54-8cb6-8df087df4abc-utilities" (OuterVolumeSpecName: "utilities") pod "ed0a008d-d4d3-4c54-8cb6-8df087df4abc" (UID: "ed0a008d-d4d3-4c54-8cb6-8df087df4abc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 10:01:48 crc kubenswrapper[4971]: I1127 10:01:48.617789 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed0a008d-d4d3-4c54-8cb6-8df087df4abc-kube-api-access-99ckv" (OuterVolumeSpecName: "kube-api-access-99ckv") pod "ed0a008d-d4d3-4c54-8cb6-8df087df4abc" (UID: "ed0a008d-d4d3-4c54-8cb6-8df087df4abc"). InnerVolumeSpecName "kube-api-access-99ckv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 10:01:48 crc kubenswrapper[4971]: I1127 10:01:48.619597 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed0a008d-d4d3-4c54-8cb6-8df087df4abc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ed0a008d-d4d3-4c54-8cb6-8df087df4abc" (UID: "ed0a008d-d4d3-4c54-8cb6-8df087df4abc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 10:01:48 crc kubenswrapper[4971]: I1127 10:01:48.699226 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed0a008d-d4d3-4c54-8cb6-8df087df4abc-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 10:01:48 crc kubenswrapper[4971]: I1127 10:01:48.699273 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed0a008d-d4d3-4c54-8cb6-8df087df4abc-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 10:01:48 crc kubenswrapper[4971]: I1127 10:01:48.699285 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-99ckv\" (UniqueName: \"kubernetes.io/projected/ed0a008d-d4d3-4c54-8cb6-8df087df4abc-kube-api-access-99ckv\") on node \"crc\" DevicePath \"\"" Nov 27 10:01:49 crc kubenswrapper[4971]: I1127 10:01:49.341165 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jsz55" event={"ID":"ed0a008d-d4d3-4c54-8cb6-8df087df4abc","Type":"ContainerDied","Data":"4a2927c5c38173a07ca67a1f988603eb472f993cb5e8d0e84e218e63efd4990a"} Nov 27 10:01:49 crc kubenswrapper[4971]: I1127 10:01:49.341225 4971 scope.go:117] "RemoveContainer" containerID="f46570f49004f7757130544589b5d5729dbbc668448e2fd29f830b0c819620b3" Nov 27 10:01:49 crc kubenswrapper[4971]: I1127 10:01:49.341254 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jsz55" Nov 27 10:01:49 crc kubenswrapper[4971]: I1127 10:01:49.376846 4971 scope.go:117] "RemoveContainer" containerID="bbfab134e794cff7fee2a61586b957d42cb3224f30fc97ba99cc534eefc84bc7" Nov 27 10:01:49 crc kubenswrapper[4971]: I1127 10:01:49.385601 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jsz55"] Nov 27 10:01:49 crc kubenswrapper[4971]: I1127 10:01:49.393499 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-jsz55"] Nov 27 10:01:49 crc kubenswrapper[4971]: I1127 10:01:49.404276 4971 scope.go:117] "RemoveContainer" containerID="7f7fb8f2258a6b847399a371c3a0edda836f15eb9b7de7fa46ddd3f3c211066a" Nov 27 10:01:50 crc kubenswrapper[4971]: I1127 10:01:50.565820 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed0a008d-d4d3-4c54-8cb6-8df087df4abc" path="/var/lib/kubelet/pods/ed0a008d-d4d3-4c54-8cb6-8df087df4abc/volumes" Nov 27 10:01:59 crc kubenswrapper[4971]: I1127 10:01:59.550392 4971 scope.go:117] "RemoveContainer" containerID="32a90286264c9bed67c869ab67cc4bfe55ac520174eb5ee9f1f031fb4d0905da" Nov 27 10:01:59 crc kubenswrapper[4971]: E1127 10:01:59.551241 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:02:14 crc kubenswrapper[4971]: I1127 10:02:14.550410 4971 scope.go:117] "RemoveContainer" containerID="32a90286264c9bed67c869ab67cc4bfe55ac520174eb5ee9f1f031fb4d0905da" Nov 27 10:02:14 crc kubenswrapper[4971]: E1127 10:02:14.551151 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:02:27 crc kubenswrapper[4971]: I1127 10:02:27.550884 4971 scope.go:117] "RemoveContainer" containerID="32a90286264c9bed67c869ab67cc4bfe55ac520174eb5ee9f1f031fb4d0905da" Nov 27 10:02:27 crc kubenswrapper[4971]: E1127 10:02:27.551545 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:02:39 crc kubenswrapper[4971]: I1127 10:02:39.550762 4971 scope.go:117] "RemoveContainer" containerID="32a90286264c9bed67c869ab67cc4bfe55ac520174eb5ee9f1f031fb4d0905da" Nov 27 10:02:39 crc kubenswrapper[4971]: E1127 10:02:39.551547 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:02:52 crc kubenswrapper[4971]: I1127 10:02:52.570914 4971 scope.go:117] "RemoveContainer" containerID="32a90286264c9bed67c869ab67cc4bfe55ac520174eb5ee9f1f031fb4d0905da" Nov 27 10:02:52 crc kubenswrapper[4971]: E1127 10:02:52.574882 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:03:06 crc kubenswrapper[4971]: I1127 10:03:06.551359 4971 scope.go:117] "RemoveContainer" containerID="32a90286264c9bed67c869ab67cc4bfe55ac520174eb5ee9f1f031fb4d0905da" Nov 27 10:03:06 crc kubenswrapper[4971]: E1127 10:03:06.552160 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:03:21 crc kubenswrapper[4971]: I1127 10:03:21.550963 4971 scope.go:117] "RemoveContainer" containerID="32a90286264c9bed67c869ab67cc4bfe55ac520174eb5ee9f1f031fb4d0905da" Nov 27 10:03:21 crc kubenswrapper[4971]: E1127 10:03:21.552378 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:03:32 crc kubenswrapper[4971]: I1127 10:03:32.556812 4971 scope.go:117] "RemoveContainer" containerID="32a90286264c9bed67c869ab67cc4bfe55ac520174eb5ee9f1f031fb4d0905da" Nov 27 10:03:32 crc kubenswrapper[4971]: E1127 10:03:32.557695 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:03:43 crc kubenswrapper[4971]: I1127 10:03:43.550983 4971 scope.go:117] "RemoveContainer" containerID="32a90286264c9bed67c869ab67cc4bfe55ac520174eb5ee9f1f031fb4d0905da" Nov 27 10:03:43 crc kubenswrapper[4971]: E1127 10:03:43.551728 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:03:54 crc kubenswrapper[4971]: I1127 10:03:54.552674 4971 scope.go:117] "RemoveContainer" containerID="32a90286264c9bed67c869ab67cc4bfe55ac520174eb5ee9f1f031fb4d0905da" Nov 27 10:03:54 crc kubenswrapper[4971]: E1127 10:03:54.553657 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:04:08 crc kubenswrapper[4971]: I1127 10:04:08.551058 4971 scope.go:117] "RemoveContainer" containerID="32a90286264c9bed67c869ab67cc4bfe55ac520174eb5ee9f1f031fb4d0905da" Nov 27 10:04:08 crc kubenswrapper[4971]: E1127 10:04:08.551768 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:04:22 crc kubenswrapper[4971]: I1127 10:04:22.558361 4971 scope.go:117] "RemoveContainer" containerID="32a90286264c9bed67c869ab67cc4bfe55ac520174eb5ee9f1f031fb4d0905da" Nov 27 10:04:22 crc kubenswrapper[4971]: E1127 10:04:22.559281 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:04:34 crc kubenswrapper[4971]: I1127 10:04:34.550900 4971 scope.go:117] "RemoveContainer" containerID="32a90286264c9bed67c869ab67cc4bfe55ac520174eb5ee9f1f031fb4d0905da" Nov 27 10:04:35 crc kubenswrapper[4971]: I1127 10:04:35.163810 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"c7d82975514866933c5ee8fe8a228f029908e4cbdf64051c8a60d36ef2d769e5"} Nov 27 10:06:56 crc kubenswrapper[4971]: I1127 10:06:56.413076 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 10:06:56 crc kubenswrapper[4971]: I1127 10:06:56.413659 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 10:07:26 crc kubenswrapper[4971]: I1127 10:07:26.413617 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 10:07:26 crc kubenswrapper[4971]: I1127 10:07:26.414282 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 10:07:56 crc kubenswrapper[4971]: I1127 10:07:56.413925 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 10:07:56 crc kubenswrapper[4971]: I1127 10:07:56.414789 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 10:07:56 crc kubenswrapper[4971]: I1127 10:07:56.414841 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 10:07:56 crc kubenswrapper[4971]: I1127 10:07:56.415738 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c7d82975514866933c5ee8fe8a228f029908e4cbdf64051c8a60d36ef2d769e5"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 10:07:56 crc kubenswrapper[4971]: I1127 10:07:56.415807 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://c7d82975514866933c5ee8fe8a228f029908e4cbdf64051c8a60d36ef2d769e5" gracePeriod=600 Nov 27 10:07:57 crc kubenswrapper[4971]: I1127 10:07:57.470896 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="c7d82975514866933c5ee8fe8a228f029908e4cbdf64051c8a60d36ef2d769e5" exitCode=0 Nov 27 10:07:57 crc kubenswrapper[4971]: I1127 10:07:57.470958 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"c7d82975514866933c5ee8fe8a228f029908e4cbdf64051c8a60d36ef2d769e5"} Nov 27 10:07:57 crc kubenswrapper[4971]: I1127 10:07:57.471762 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"9fdd88e884f417697e53aaebe9b72e3b1bf740cd0fdca34e017b4d4d6723f1ce"} Nov 27 10:07:57 crc kubenswrapper[4971]: I1127 10:07:57.471786 4971 scope.go:117] "RemoveContainer" containerID="32a90286264c9bed67c869ab67cc4bfe55ac520174eb5ee9f1f031fb4d0905da" Nov 27 10:08:19 crc kubenswrapper[4971]: I1127 10:08:19.351077 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-dm6pq"] Nov 27 10:08:19 crc kubenswrapper[4971]: E1127 10:08:19.352162 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed0a008d-d4d3-4c54-8cb6-8df087df4abc" containerName="extract-utilities" Nov 27 10:08:19 crc kubenswrapper[4971]: I1127 10:08:19.352179 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed0a008d-d4d3-4c54-8cb6-8df087df4abc" containerName="extract-utilities" Nov 27 10:08:19 crc kubenswrapper[4971]: E1127 10:08:19.352207 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed0a008d-d4d3-4c54-8cb6-8df087df4abc" containerName="extract-content" Nov 27 10:08:19 crc kubenswrapper[4971]: I1127 10:08:19.352214 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed0a008d-d4d3-4c54-8cb6-8df087df4abc" containerName="extract-content" Nov 27 10:08:19 crc kubenswrapper[4971]: E1127 10:08:19.352227 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed0a008d-d4d3-4c54-8cb6-8df087df4abc" containerName="registry-server" Nov 27 10:08:19 crc kubenswrapper[4971]: I1127 10:08:19.352234 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed0a008d-d4d3-4c54-8cb6-8df087df4abc" containerName="registry-server" Nov 27 10:08:19 crc kubenswrapper[4971]: I1127 10:08:19.352476 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed0a008d-d4d3-4c54-8cb6-8df087df4abc" containerName="registry-server" Nov 27 10:08:19 crc kubenswrapper[4971]: I1127 10:08:19.355857 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dm6pq" Nov 27 10:08:19 crc kubenswrapper[4971]: I1127 10:08:19.364287 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dm6pq"] Nov 27 10:08:19 crc kubenswrapper[4971]: I1127 10:08:19.522624 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrcjt\" (UniqueName: \"kubernetes.io/projected/2f550be8-4da6-4b9b-a275-472341f42367-kube-api-access-qrcjt\") pod \"community-operators-dm6pq\" (UID: \"2f550be8-4da6-4b9b-a275-472341f42367\") " pod="openshift-marketplace/community-operators-dm6pq" Nov 27 10:08:19 crc kubenswrapper[4971]: I1127 10:08:19.522706 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f550be8-4da6-4b9b-a275-472341f42367-utilities\") pod \"community-operators-dm6pq\" (UID: \"2f550be8-4da6-4b9b-a275-472341f42367\") " pod="openshift-marketplace/community-operators-dm6pq" Nov 27 10:08:19 crc kubenswrapper[4971]: I1127 10:08:19.522900 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f550be8-4da6-4b9b-a275-472341f42367-catalog-content\") pod \"community-operators-dm6pq\" (UID: \"2f550be8-4da6-4b9b-a275-472341f42367\") " pod="openshift-marketplace/community-operators-dm6pq" Nov 27 10:08:19 crc kubenswrapper[4971]: I1127 10:08:19.627232 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrcjt\" (UniqueName: \"kubernetes.io/projected/2f550be8-4da6-4b9b-a275-472341f42367-kube-api-access-qrcjt\") pod \"community-operators-dm6pq\" (UID: \"2f550be8-4da6-4b9b-a275-472341f42367\") " pod="openshift-marketplace/community-operators-dm6pq" Nov 27 10:08:19 crc kubenswrapper[4971]: I1127 10:08:19.627329 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f550be8-4da6-4b9b-a275-472341f42367-utilities\") pod \"community-operators-dm6pq\" (UID: \"2f550be8-4da6-4b9b-a275-472341f42367\") " pod="openshift-marketplace/community-operators-dm6pq" Nov 27 10:08:19 crc kubenswrapper[4971]: I1127 10:08:19.627882 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f550be8-4da6-4b9b-a275-472341f42367-catalog-content\") pod \"community-operators-dm6pq\" (UID: \"2f550be8-4da6-4b9b-a275-472341f42367\") " pod="openshift-marketplace/community-operators-dm6pq" Nov 27 10:08:19 crc kubenswrapper[4971]: I1127 10:08:19.628476 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f550be8-4da6-4b9b-a275-472341f42367-catalog-content\") pod \"community-operators-dm6pq\" (UID: \"2f550be8-4da6-4b9b-a275-472341f42367\") " pod="openshift-marketplace/community-operators-dm6pq" Nov 27 10:08:19 crc kubenswrapper[4971]: I1127 10:08:19.628962 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f550be8-4da6-4b9b-a275-472341f42367-utilities\") pod \"community-operators-dm6pq\" (UID: \"2f550be8-4da6-4b9b-a275-472341f42367\") " pod="openshift-marketplace/community-operators-dm6pq" Nov 27 10:08:19 crc kubenswrapper[4971]: I1127 10:08:19.648138 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrcjt\" (UniqueName: \"kubernetes.io/projected/2f550be8-4da6-4b9b-a275-472341f42367-kube-api-access-qrcjt\") pod \"community-operators-dm6pq\" (UID: \"2f550be8-4da6-4b9b-a275-472341f42367\") " pod="openshift-marketplace/community-operators-dm6pq" Nov 27 10:08:19 crc kubenswrapper[4971]: I1127 10:08:19.676832 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dm6pq" Nov 27 10:08:20 crc kubenswrapper[4971]: I1127 10:08:20.717942 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dm6pq"] Nov 27 10:08:20 crc kubenswrapper[4971]: I1127 10:08:20.774606 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dm6pq" event={"ID":"2f550be8-4da6-4b9b-a275-472341f42367","Type":"ContainerStarted","Data":"da3e9cb91f43b811014aadda5412a3c62d5623c6429a216a71968bc8f8a6de25"} Nov 27 10:08:21 crc kubenswrapper[4971]: I1127 10:08:21.792208 4971 generic.go:334] "Generic (PLEG): container finished" podID="2f550be8-4da6-4b9b-a275-472341f42367" containerID="b9550bae11dbe0d809d54c0087d8ca7abe43ad327b6a721e02479e69c5c933ac" exitCode=0 Nov 27 10:08:21 crc kubenswrapper[4971]: I1127 10:08:21.792741 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dm6pq" event={"ID":"2f550be8-4da6-4b9b-a275-472341f42367","Type":"ContainerDied","Data":"b9550bae11dbe0d809d54c0087d8ca7abe43ad327b6a721e02479e69c5c933ac"} Nov 27 10:08:21 crc kubenswrapper[4971]: I1127 10:08:21.801285 4971 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 27 10:08:23 crc kubenswrapper[4971]: I1127 10:08:23.818057 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dm6pq" event={"ID":"2f550be8-4da6-4b9b-a275-472341f42367","Type":"ContainerStarted","Data":"0691967c342d1bbb6fb91ae0d0a67742a3e9e2dfaeb8fd9c781f8a48011eb51a"} Nov 27 10:08:24 crc kubenswrapper[4971]: I1127 10:08:24.831355 4971 generic.go:334] "Generic (PLEG): container finished" podID="2f550be8-4da6-4b9b-a275-472341f42367" containerID="0691967c342d1bbb6fb91ae0d0a67742a3e9e2dfaeb8fd9c781f8a48011eb51a" exitCode=0 Nov 27 10:08:24 crc kubenswrapper[4971]: I1127 10:08:24.832989 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dm6pq" event={"ID":"2f550be8-4da6-4b9b-a275-472341f42367","Type":"ContainerDied","Data":"0691967c342d1bbb6fb91ae0d0a67742a3e9e2dfaeb8fd9c781f8a48011eb51a"} Nov 27 10:08:25 crc kubenswrapper[4971]: I1127 10:08:25.845394 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dm6pq" event={"ID":"2f550be8-4da6-4b9b-a275-472341f42367","Type":"ContainerStarted","Data":"ce62e6d53a0bf78620abeabbbca3750c3b1d349aba60c3917f090108d2cdb00c"} Nov 27 10:08:25 crc kubenswrapper[4971]: I1127 10:08:25.863079 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-dm6pq" podStartSLOduration=3.296341361 podStartE2EDuration="6.863061332s" podCreationTimestamp="2025-11-27 10:08:19 +0000 UTC" firstStartedPulling="2025-11-27 10:08:21.800953163 +0000 UTC m=+11739.992997081" lastFinishedPulling="2025-11-27 10:08:25.367673134 +0000 UTC m=+11743.559717052" observedRunningTime="2025-11-27 10:08:25.861252241 +0000 UTC m=+11744.053296169" watchObservedRunningTime="2025-11-27 10:08:25.863061332 +0000 UTC m=+11744.055105250" Nov 27 10:08:29 crc kubenswrapper[4971]: I1127 10:08:29.678320 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-dm6pq" Nov 27 10:08:29 crc kubenswrapper[4971]: I1127 10:08:29.679531 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-dm6pq" Nov 27 10:08:29 crc kubenswrapper[4971]: I1127 10:08:29.737581 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-dm6pq" Nov 27 10:08:39 crc kubenswrapper[4971]: I1127 10:08:39.737639 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-dm6pq" Nov 27 10:08:39 crc kubenswrapper[4971]: I1127 10:08:39.823187 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dm6pq"] Nov 27 10:08:39 crc kubenswrapper[4971]: I1127 10:08:39.995030 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-dm6pq" podUID="2f550be8-4da6-4b9b-a275-472341f42367" containerName="registry-server" containerID="cri-o://ce62e6d53a0bf78620abeabbbca3750c3b1d349aba60c3917f090108d2cdb00c" gracePeriod=2 Nov 27 10:08:40 crc kubenswrapper[4971]: I1127 10:08:40.719651 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dm6pq" Nov 27 10:08:40 crc kubenswrapper[4971]: I1127 10:08:40.838993 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qrcjt\" (UniqueName: \"kubernetes.io/projected/2f550be8-4da6-4b9b-a275-472341f42367-kube-api-access-qrcjt\") pod \"2f550be8-4da6-4b9b-a275-472341f42367\" (UID: \"2f550be8-4da6-4b9b-a275-472341f42367\") " Nov 27 10:08:40 crc kubenswrapper[4971]: I1127 10:08:40.839094 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f550be8-4da6-4b9b-a275-472341f42367-utilities\") pod \"2f550be8-4da6-4b9b-a275-472341f42367\" (UID: \"2f550be8-4da6-4b9b-a275-472341f42367\") " Nov 27 10:08:40 crc kubenswrapper[4971]: I1127 10:08:40.839191 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f550be8-4da6-4b9b-a275-472341f42367-catalog-content\") pod \"2f550be8-4da6-4b9b-a275-472341f42367\" (UID: \"2f550be8-4da6-4b9b-a275-472341f42367\") " Nov 27 10:08:40 crc kubenswrapper[4971]: I1127 10:08:40.840175 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f550be8-4da6-4b9b-a275-472341f42367-utilities" (OuterVolumeSpecName: "utilities") pod "2f550be8-4da6-4b9b-a275-472341f42367" (UID: "2f550be8-4da6-4b9b-a275-472341f42367"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 10:08:40 crc kubenswrapper[4971]: I1127 10:08:40.846667 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f550be8-4da6-4b9b-a275-472341f42367-kube-api-access-qrcjt" (OuterVolumeSpecName: "kube-api-access-qrcjt") pod "2f550be8-4da6-4b9b-a275-472341f42367" (UID: "2f550be8-4da6-4b9b-a275-472341f42367"). InnerVolumeSpecName "kube-api-access-qrcjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 10:08:40 crc kubenswrapper[4971]: I1127 10:08:40.911843 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f550be8-4da6-4b9b-a275-472341f42367-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2f550be8-4da6-4b9b-a275-472341f42367" (UID: "2f550be8-4da6-4b9b-a275-472341f42367"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 10:08:40 crc kubenswrapper[4971]: I1127 10:08:40.941558 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f550be8-4da6-4b9b-a275-472341f42367-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 10:08:40 crc kubenswrapper[4971]: I1127 10:08:40.941596 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f550be8-4da6-4b9b-a275-472341f42367-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 10:08:40 crc kubenswrapper[4971]: I1127 10:08:40.941610 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qrcjt\" (UniqueName: \"kubernetes.io/projected/2f550be8-4da6-4b9b-a275-472341f42367-kube-api-access-qrcjt\") on node \"crc\" DevicePath \"\"" Nov 27 10:08:41 crc kubenswrapper[4971]: I1127 10:08:41.012371 4971 generic.go:334] "Generic (PLEG): container finished" podID="2f550be8-4da6-4b9b-a275-472341f42367" containerID="ce62e6d53a0bf78620abeabbbca3750c3b1d349aba60c3917f090108d2cdb00c" exitCode=0 Nov 27 10:08:41 crc kubenswrapper[4971]: I1127 10:08:41.012418 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dm6pq" event={"ID":"2f550be8-4da6-4b9b-a275-472341f42367","Type":"ContainerDied","Data":"ce62e6d53a0bf78620abeabbbca3750c3b1d349aba60c3917f090108d2cdb00c"} Nov 27 10:08:41 crc kubenswrapper[4971]: I1127 10:08:41.012464 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dm6pq" event={"ID":"2f550be8-4da6-4b9b-a275-472341f42367","Type":"ContainerDied","Data":"da3e9cb91f43b811014aadda5412a3c62d5623c6429a216a71968bc8f8a6de25"} Nov 27 10:08:41 crc kubenswrapper[4971]: I1127 10:08:41.012487 4971 scope.go:117] "RemoveContainer" containerID="ce62e6d53a0bf78620abeabbbca3750c3b1d349aba60c3917f090108d2cdb00c" Nov 27 10:08:41 crc kubenswrapper[4971]: I1127 10:08:41.012517 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dm6pq" Nov 27 10:08:41 crc kubenswrapper[4971]: I1127 10:08:41.048331 4971 scope.go:117] "RemoveContainer" containerID="0691967c342d1bbb6fb91ae0d0a67742a3e9e2dfaeb8fd9c781f8a48011eb51a" Nov 27 10:08:41 crc kubenswrapper[4971]: I1127 10:08:41.094838 4971 scope.go:117] "RemoveContainer" containerID="b9550bae11dbe0d809d54c0087d8ca7abe43ad327b6a721e02479e69c5c933ac" Nov 27 10:08:41 crc kubenswrapper[4971]: I1127 10:08:41.099601 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dm6pq"] Nov 27 10:08:41 crc kubenswrapper[4971]: I1127 10:08:41.114616 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-dm6pq"] Nov 27 10:08:41 crc kubenswrapper[4971]: I1127 10:08:41.142777 4971 scope.go:117] "RemoveContainer" containerID="ce62e6d53a0bf78620abeabbbca3750c3b1d349aba60c3917f090108d2cdb00c" Nov 27 10:08:41 crc kubenswrapper[4971]: E1127 10:08:41.146036 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce62e6d53a0bf78620abeabbbca3750c3b1d349aba60c3917f090108d2cdb00c\": container with ID starting with ce62e6d53a0bf78620abeabbbca3750c3b1d349aba60c3917f090108d2cdb00c not found: ID does not exist" containerID="ce62e6d53a0bf78620abeabbbca3750c3b1d349aba60c3917f090108d2cdb00c" Nov 27 10:08:41 crc kubenswrapper[4971]: I1127 10:08:41.146088 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce62e6d53a0bf78620abeabbbca3750c3b1d349aba60c3917f090108d2cdb00c"} err="failed to get container status \"ce62e6d53a0bf78620abeabbbca3750c3b1d349aba60c3917f090108d2cdb00c\": rpc error: code = NotFound desc = could not find container \"ce62e6d53a0bf78620abeabbbca3750c3b1d349aba60c3917f090108d2cdb00c\": container with ID starting with ce62e6d53a0bf78620abeabbbca3750c3b1d349aba60c3917f090108d2cdb00c not found: ID does not exist" Nov 27 10:08:41 crc kubenswrapper[4971]: I1127 10:08:41.146123 4971 scope.go:117] "RemoveContainer" containerID="0691967c342d1bbb6fb91ae0d0a67742a3e9e2dfaeb8fd9c781f8a48011eb51a" Nov 27 10:08:41 crc kubenswrapper[4971]: E1127 10:08:41.149962 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0691967c342d1bbb6fb91ae0d0a67742a3e9e2dfaeb8fd9c781f8a48011eb51a\": container with ID starting with 0691967c342d1bbb6fb91ae0d0a67742a3e9e2dfaeb8fd9c781f8a48011eb51a not found: ID does not exist" containerID="0691967c342d1bbb6fb91ae0d0a67742a3e9e2dfaeb8fd9c781f8a48011eb51a" Nov 27 10:08:41 crc kubenswrapper[4971]: I1127 10:08:41.149997 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0691967c342d1bbb6fb91ae0d0a67742a3e9e2dfaeb8fd9c781f8a48011eb51a"} err="failed to get container status \"0691967c342d1bbb6fb91ae0d0a67742a3e9e2dfaeb8fd9c781f8a48011eb51a\": rpc error: code = NotFound desc = could not find container \"0691967c342d1bbb6fb91ae0d0a67742a3e9e2dfaeb8fd9c781f8a48011eb51a\": container with ID starting with 0691967c342d1bbb6fb91ae0d0a67742a3e9e2dfaeb8fd9c781f8a48011eb51a not found: ID does not exist" Nov 27 10:08:41 crc kubenswrapper[4971]: I1127 10:08:41.150018 4971 scope.go:117] "RemoveContainer" containerID="b9550bae11dbe0d809d54c0087d8ca7abe43ad327b6a721e02479e69c5c933ac" Nov 27 10:08:41 crc kubenswrapper[4971]: E1127 10:08:41.153351 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9550bae11dbe0d809d54c0087d8ca7abe43ad327b6a721e02479e69c5c933ac\": container with ID starting with b9550bae11dbe0d809d54c0087d8ca7abe43ad327b6a721e02479e69c5c933ac not found: ID does not exist" containerID="b9550bae11dbe0d809d54c0087d8ca7abe43ad327b6a721e02479e69c5c933ac" Nov 27 10:08:41 crc kubenswrapper[4971]: I1127 10:08:41.153387 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9550bae11dbe0d809d54c0087d8ca7abe43ad327b6a721e02479e69c5c933ac"} err="failed to get container status \"b9550bae11dbe0d809d54c0087d8ca7abe43ad327b6a721e02479e69c5c933ac\": rpc error: code = NotFound desc = could not find container \"b9550bae11dbe0d809d54c0087d8ca7abe43ad327b6a721e02479e69c5c933ac\": container with ID starting with b9550bae11dbe0d809d54c0087d8ca7abe43ad327b6a721e02479e69c5c933ac not found: ID does not exist" Nov 27 10:08:41 crc kubenswrapper[4971]: E1127 10:08:41.204694 4971 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2f550be8_4da6_4b9b_a275_472341f42367.slice/crio-da3e9cb91f43b811014aadda5412a3c62d5623c6429a216a71968bc8f8a6de25\": RecentStats: unable to find data in memory cache]" Nov 27 10:08:42 crc kubenswrapper[4971]: I1127 10:08:42.564303 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f550be8-4da6-4b9b-a275-472341f42367" path="/var/lib/kubelet/pods/2f550be8-4da6-4b9b-a275-472341f42367/volumes" Nov 27 10:09:56 crc kubenswrapper[4971]: I1127 10:09:56.412801 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 10:09:56 crc kubenswrapper[4971]: I1127 10:09:56.413326 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 10:10:26 crc kubenswrapper[4971]: I1127 10:10:26.413383 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 10:10:26 crc kubenswrapper[4971]: I1127 10:10:26.414115 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 10:10:56 crc kubenswrapper[4971]: I1127 10:10:56.412841 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 10:10:56 crc kubenswrapper[4971]: I1127 10:10:56.413589 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 10:10:56 crc kubenswrapper[4971]: I1127 10:10:56.413647 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 10:10:56 crc kubenswrapper[4971]: I1127 10:10:56.414759 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9fdd88e884f417697e53aaebe9b72e3b1bf740cd0fdca34e017b4d4d6723f1ce"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 10:10:56 crc kubenswrapper[4971]: I1127 10:10:56.414822 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://9fdd88e884f417697e53aaebe9b72e3b1bf740cd0fdca34e017b4d4d6723f1ce" gracePeriod=600 Nov 27 10:10:56 crc kubenswrapper[4971]: E1127 10:10:56.540731 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:10:56 crc kubenswrapper[4971]: I1127 10:10:56.556667 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="9fdd88e884f417697e53aaebe9b72e3b1bf740cd0fdca34e017b4d4d6723f1ce" exitCode=0 Nov 27 10:10:56 crc kubenswrapper[4971]: I1127 10:10:56.578930 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"9fdd88e884f417697e53aaebe9b72e3b1bf740cd0fdca34e017b4d4d6723f1ce"} Nov 27 10:10:56 crc kubenswrapper[4971]: I1127 10:10:56.578989 4971 scope.go:117] "RemoveContainer" containerID="c7d82975514866933c5ee8fe8a228f029908e4cbdf64051c8a60d36ef2d769e5" Nov 27 10:10:56 crc kubenswrapper[4971]: I1127 10:10:56.579881 4971 scope.go:117] "RemoveContainer" containerID="9fdd88e884f417697e53aaebe9b72e3b1bf740cd0fdca34e017b4d4d6723f1ce" Nov 27 10:10:56 crc kubenswrapper[4971]: E1127 10:10:56.580217 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:11:09 crc kubenswrapper[4971]: I1127 10:11:09.551541 4971 scope.go:117] "RemoveContainer" containerID="9fdd88e884f417697e53aaebe9b72e3b1bf740cd0fdca34e017b4d4d6723f1ce" Nov 27 10:11:09 crc kubenswrapper[4971]: E1127 10:11:09.552239 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:11:23 crc kubenswrapper[4971]: I1127 10:11:23.550381 4971 scope.go:117] "RemoveContainer" containerID="9fdd88e884f417697e53aaebe9b72e3b1bf740cd0fdca34e017b4d4d6723f1ce" Nov 27 10:11:23 crc kubenswrapper[4971]: E1127 10:11:23.551215 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:11:34 crc kubenswrapper[4971]: I1127 10:11:34.552007 4971 scope.go:117] "RemoveContainer" containerID="9fdd88e884f417697e53aaebe9b72e3b1bf740cd0fdca34e017b4d4d6723f1ce" Nov 27 10:11:34 crc kubenswrapper[4971]: E1127 10:11:34.552877 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:11:49 crc kubenswrapper[4971]: I1127 10:11:49.550441 4971 scope.go:117] "RemoveContainer" containerID="9fdd88e884f417697e53aaebe9b72e3b1bf740cd0fdca34e017b4d4d6723f1ce" Nov 27 10:11:49 crc kubenswrapper[4971]: E1127 10:11:49.551323 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:11:57 crc kubenswrapper[4971]: I1127 10:11:57.795744 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-jddgb"] Nov 27 10:11:57 crc kubenswrapper[4971]: E1127 10:11:57.796772 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f550be8-4da6-4b9b-a275-472341f42367" containerName="extract-content" Nov 27 10:11:57 crc kubenswrapper[4971]: I1127 10:11:57.796786 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f550be8-4da6-4b9b-a275-472341f42367" containerName="extract-content" Nov 27 10:11:57 crc kubenswrapper[4971]: E1127 10:11:57.796808 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f550be8-4da6-4b9b-a275-472341f42367" containerName="registry-server" Nov 27 10:11:57 crc kubenswrapper[4971]: I1127 10:11:57.796814 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f550be8-4da6-4b9b-a275-472341f42367" containerName="registry-server" Nov 27 10:11:57 crc kubenswrapper[4971]: E1127 10:11:57.796823 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f550be8-4da6-4b9b-a275-472341f42367" containerName="extract-utilities" Nov 27 10:11:57 crc kubenswrapper[4971]: I1127 10:11:57.796830 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f550be8-4da6-4b9b-a275-472341f42367" containerName="extract-utilities" Nov 27 10:11:57 crc kubenswrapper[4971]: I1127 10:11:57.797055 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f550be8-4da6-4b9b-a275-472341f42367" containerName="registry-server" Nov 27 10:11:57 crc kubenswrapper[4971]: I1127 10:11:57.798685 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jddgb" Nov 27 10:11:57 crc kubenswrapper[4971]: I1127 10:11:57.822201 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jddgb"] Nov 27 10:11:57 crc kubenswrapper[4971]: I1127 10:11:57.911872 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-29dlx\" (UniqueName: \"kubernetes.io/projected/02bcaa16-3617-43f1-864b-e91f271cf409-kube-api-access-29dlx\") pod \"certified-operators-jddgb\" (UID: \"02bcaa16-3617-43f1-864b-e91f271cf409\") " pod="openshift-marketplace/certified-operators-jddgb" Nov 27 10:11:57 crc kubenswrapper[4971]: I1127 10:11:57.912415 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02bcaa16-3617-43f1-864b-e91f271cf409-utilities\") pod \"certified-operators-jddgb\" (UID: \"02bcaa16-3617-43f1-864b-e91f271cf409\") " pod="openshift-marketplace/certified-operators-jddgb" Nov 27 10:11:57 crc kubenswrapper[4971]: I1127 10:11:57.912610 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02bcaa16-3617-43f1-864b-e91f271cf409-catalog-content\") pod \"certified-operators-jddgb\" (UID: \"02bcaa16-3617-43f1-864b-e91f271cf409\") " pod="openshift-marketplace/certified-operators-jddgb" Nov 27 10:11:58 crc kubenswrapper[4971]: I1127 10:11:58.015429 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02bcaa16-3617-43f1-864b-e91f271cf409-utilities\") pod \"certified-operators-jddgb\" (UID: \"02bcaa16-3617-43f1-864b-e91f271cf409\") " pod="openshift-marketplace/certified-operators-jddgb" Nov 27 10:11:58 crc kubenswrapper[4971]: I1127 10:11:58.015507 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02bcaa16-3617-43f1-864b-e91f271cf409-catalog-content\") pod \"certified-operators-jddgb\" (UID: \"02bcaa16-3617-43f1-864b-e91f271cf409\") " pod="openshift-marketplace/certified-operators-jddgb" Nov 27 10:11:58 crc kubenswrapper[4971]: I1127 10:11:58.015653 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-29dlx\" (UniqueName: \"kubernetes.io/projected/02bcaa16-3617-43f1-864b-e91f271cf409-kube-api-access-29dlx\") pod \"certified-operators-jddgb\" (UID: \"02bcaa16-3617-43f1-864b-e91f271cf409\") " pod="openshift-marketplace/certified-operators-jddgb" Nov 27 10:11:58 crc kubenswrapper[4971]: I1127 10:11:58.016551 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02bcaa16-3617-43f1-864b-e91f271cf409-utilities\") pod \"certified-operators-jddgb\" (UID: \"02bcaa16-3617-43f1-864b-e91f271cf409\") " pod="openshift-marketplace/certified-operators-jddgb" Nov 27 10:11:58 crc kubenswrapper[4971]: I1127 10:11:58.016656 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02bcaa16-3617-43f1-864b-e91f271cf409-catalog-content\") pod \"certified-operators-jddgb\" (UID: \"02bcaa16-3617-43f1-864b-e91f271cf409\") " pod="openshift-marketplace/certified-operators-jddgb" Nov 27 10:11:58 crc kubenswrapper[4971]: I1127 10:11:58.052188 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-29dlx\" (UniqueName: \"kubernetes.io/projected/02bcaa16-3617-43f1-864b-e91f271cf409-kube-api-access-29dlx\") pod \"certified-operators-jddgb\" (UID: \"02bcaa16-3617-43f1-864b-e91f271cf409\") " pod="openshift-marketplace/certified-operators-jddgb" Nov 27 10:11:58 crc kubenswrapper[4971]: I1127 10:11:58.129343 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jddgb" Nov 27 10:11:58 crc kubenswrapper[4971]: I1127 10:11:58.729486 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jddgb"] Nov 27 10:11:59 crc kubenswrapper[4971]: I1127 10:11:59.761337 4971 generic.go:334] "Generic (PLEG): container finished" podID="02bcaa16-3617-43f1-864b-e91f271cf409" containerID="ea479256cbdc82658e74c2162b10e054afc26682c67826cf45bfa33b957505bd" exitCode=0 Nov 27 10:11:59 crc kubenswrapper[4971]: I1127 10:11:59.761734 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jddgb" event={"ID":"02bcaa16-3617-43f1-864b-e91f271cf409","Type":"ContainerDied","Data":"ea479256cbdc82658e74c2162b10e054afc26682c67826cf45bfa33b957505bd"} Nov 27 10:11:59 crc kubenswrapper[4971]: I1127 10:11:59.761814 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jddgb" event={"ID":"02bcaa16-3617-43f1-864b-e91f271cf409","Type":"ContainerStarted","Data":"d480fa6813ac4ac50287e8652e77b5134489e67ec1d498d64017db5e35f10101"} Nov 27 10:12:01 crc kubenswrapper[4971]: I1127 10:12:01.551703 4971 scope.go:117] "RemoveContainer" containerID="9fdd88e884f417697e53aaebe9b72e3b1bf740cd0fdca34e017b4d4d6723f1ce" Nov 27 10:12:01 crc kubenswrapper[4971]: E1127 10:12:01.552525 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:12:01 crc kubenswrapper[4971]: I1127 10:12:01.804162 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jddgb" event={"ID":"02bcaa16-3617-43f1-864b-e91f271cf409","Type":"ContainerStarted","Data":"718d97e6eb62e9f19356166abd5606219bbd948d5845127ead6450664309cb40"} Nov 27 10:12:02 crc kubenswrapper[4971]: I1127 10:12:02.818452 4971 generic.go:334] "Generic (PLEG): container finished" podID="02bcaa16-3617-43f1-864b-e91f271cf409" containerID="718d97e6eb62e9f19356166abd5606219bbd948d5845127ead6450664309cb40" exitCode=0 Nov 27 10:12:02 crc kubenswrapper[4971]: I1127 10:12:02.818588 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jddgb" event={"ID":"02bcaa16-3617-43f1-864b-e91f271cf409","Type":"ContainerDied","Data":"718d97e6eb62e9f19356166abd5606219bbd948d5845127ead6450664309cb40"} Nov 27 10:12:03 crc kubenswrapper[4971]: I1127 10:12:03.832876 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jddgb" event={"ID":"02bcaa16-3617-43f1-864b-e91f271cf409","Type":"ContainerStarted","Data":"95fd2471ef4eafa08ed0824d14d69502c68538d01272cc6d24fe429ec31182f2"} Nov 27 10:12:03 crc kubenswrapper[4971]: I1127 10:12:03.867091 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-jddgb" podStartSLOduration=3.357570859 podStartE2EDuration="6.867067566s" podCreationTimestamp="2025-11-27 10:11:57 +0000 UTC" firstStartedPulling="2025-11-27 10:11:59.763940016 +0000 UTC m=+11957.955983934" lastFinishedPulling="2025-11-27 10:12:03.273436723 +0000 UTC m=+11961.465480641" observedRunningTime="2025-11-27 10:12:03.852657132 +0000 UTC m=+11962.044701060" watchObservedRunningTime="2025-11-27 10:12:03.867067566 +0000 UTC m=+11962.059111494" Nov 27 10:12:08 crc kubenswrapper[4971]: I1127 10:12:08.130059 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-jddgb" Nov 27 10:12:08 crc kubenswrapper[4971]: I1127 10:12:08.130516 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-jddgb" Nov 27 10:12:08 crc kubenswrapper[4971]: I1127 10:12:08.193302 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-jddgb" Nov 27 10:12:08 crc kubenswrapper[4971]: I1127 10:12:08.962579 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-jddgb" Nov 27 10:12:09 crc kubenswrapper[4971]: I1127 10:12:09.038366 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jddgb"] Nov 27 10:12:10 crc kubenswrapper[4971]: I1127 10:12:10.858986 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6h9vs"] Nov 27 10:12:10 crc kubenswrapper[4971]: I1127 10:12:10.861911 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6h9vs" Nov 27 10:12:10 crc kubenswrapper[4971]: I1127 10:12:10.875858 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6h9vs"] Nov 27 10:12:10 crc kubenswrapper[4971]: I1127 10:12:10.920184 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-jddgb" podUID="02bcaa16-3617-43f1-864b-e91f271cf409" containerName="registry-server" containerID="cri-o://95fd2471ef4eafa08ed0824d14d69502c68538d01272cc6d24fe429ec31182f2" gracePeriod=2 Nov 27 10:12:10 crc kubenswrapper[4971]: I1127 10:12:10.954707 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c-catalog-content\") pod \"redhat-operators-6h9vs\" (UID: \"477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c\") " pod="openshift-marketplace/redhat-operators-6h9vs" Nov 27 10:12:10 crc kubenswrapper[4971]: I1127 10:12:10.954802 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c-utilities\") pod \"redhat-operators-6h9vs\" (UID: \"477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c\") " pod="openshift-marketplace/redhat-operators-6h9vs" Nov 27 10:12:10 crc kubenswrapper[4971]: I1127 10:12:10.954929 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hnzqc\" (UniqueName: \"kubernetes.io/projected/477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c-kube-api-access-hnzqc\") pod \"redhat-operators-6h9vs\" (UID: \"477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c\") " pod="openshift-marketplace/redhat-operators-6h9vs" Nov 27 10:12:11 crc kubenswrapper[4971]: I1127 10:12:11.059884 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c-catalog-content\") pod \"redhat-operators-6h9vs\" (UID: \"477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c\") " pod="openshift-marketplace/redhat-operators-6h9vs" Nov 27 10:12:11 crc kubenswrapper[4971]: I1127 10:12:11.060121 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c-utilities\") pod \"redhat-operators-6h9vs\" (UID: \"477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c\") " pod="openshift-marketplace/redhat-operators-6h9vs" Nov 27 10:12:11 crc kubenswrapper[4971]: I1127 10:12:11.060202 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hnzqc\" (UniqueName: \"kubernetes.io/projected/477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c-kube-api-access-hnzqc\") pod \"redhat-operators-6h9vs\" (UID: \"477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c\") " pod="openshift-marketplace/redhat-operators-6h9vs" Nov 27 10:12:11 crc kubenswrapper[4971]: I1127 10:12:11.063311 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c-catalog-content\") pod \"redhat-operators-6h9vs\" (UID: \"477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c\") " pod="openshift-marketplace/redhat-operators-6h9vs" Nov 27 10:12:11 crc kubenswrapper[4971]: I1127 10:12:11.063653 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c-utilities\") pod \"redhat-operators-6h9vs\" (UID: \"477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c\") " pod="openshift-marketplace/redhat-operators-6h9vs" Nov 27 10:12:11 crc kubenswrapper[4971]: I1127 10:12:11.085200 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hnzqc\" (UniqueName: \"kubernetes.io/projected/477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c-kube-api-access-hnzqc\") pod \"redhat-operators-6h9vs\" (UID: \"477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c\") " pod="openshift-marketplace/redhat-operators-6h9vs" Nov 27 10:12:11 crc kubenswrapper[4971]: I1127 10:12:11.207157 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6h9vs" Nov 27 10:12:11 crc kubenswrapper[4971]: I1127 10:12:11.580487 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jddgb" Nov 27 10:12:11 crc kubenswrapper[4971]: I1127 10:12:11.677476 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02bcaa16-3617-43f1-864b-e91f271cf409-utilities\") pod \"02bcaa16-3617-43f1-864b-e91f271cf409\" (UID: \"02bcaa16-3617-43f1-864b-e91f271cf409\") " Nov 27 10:12:11 crc kubenswrapper[4971]: I1127 10:12:11.677582 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02bcaa16-3617-43f1-864b-e91f271cf409-catalog-content\") pod \"02bcaa16-3617-43f1-864b-e91f271cf409\" (UID: \"02bcaa16-3617-43f1-864b-e91f271cf409\") " Nov 27 10:12:11 crc kubenswrapper[4971]: I1127 10:12:11.677715 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-29dlx\" (UniqueName: \"kubernetes.io/projected/02bcaa16-3617-43f1-864b-e91f271cf409-kube-api-access-29dlx\") pod \"02bcaa16-3617-43f1-864b-e91f271cf409\" (UID: \"02bcaa16-3617-43f1-864b-e91f271cf409\") " Nov 27 10:12:11 crc kubenswrapper[4971]: I1127 10:12:11.678910 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/02bcaa16-3617-43f1-864b-e91f271cf409-utilities" (OuterVolumeSpecName: "utilities") pod "02bcaa16-3617-43f1-864b-e91f271cf409" (UID: "02bcaa16-3617-43f1-864b-e91f271cf409"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 10:12:11 crc kubenswrapper[4971]: I1127 10:12:11.687134 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02bcaa16-3617-43f1-864b-e91f271cf409-kube-api-access-29dlx" (OuterVolumeSpecName: "kube-api-access-29dlx") pod "02bcaa16-3617-43f1-864b-e91f271cf409" (UID: "02bcaa16-3617-43f1-864b-e91f271cf409"). InnerVolumeSpecName "kube-api-access-29dlx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 10:12:11 crc kubenswrapper[4971]: I1127 10:12:11.753289 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/02bcaa16-3617-43f1-864b-e91f271cf409-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "02bcaa16-3617-43f1-864b-e91f271cf409" (UID: "02bcaa16-3617-43f1-864b-e91f271cf409"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 10:12:11 crc kubenswrapper[4971]: I1127 10:12:11.780631 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02bcaa16-3617-43f1-864b-e91f271cf409-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 10:12:11 crc kubenswrapper[4971]: I1127 10:12:11.780670 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02bcaa16-3617-43f1-864b-e91f271cf409-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 10:12:11 crc kubenswrapper[4971]: I1127 10:12:11.780686 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-29dlx\" (UniqueName: \"kubernetes.io/projected/02bcaa16-3617-43f1-864b-e91f271cf409-kube-api-access-29dlx\") on node \"crc\" DevicePath \"\"" Nov 27 10:12:11 crc kubenswrapper[4971]: I1127 10:12:11.914064 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6h9vs"] Nov 27 10:12:11 crc kubenswrapper[4971]: I1127 10:12:11.935355 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6h9vs" event={"ID":"477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c","Type":"ContainerStarted","Data":"57b66e6feb4b0e5ef737d3535cf992a9a29e531882929ff63c4d1687d22b6a7a"} Nov 27 10:12:11 crc kubenswrapper[4971]: I1127 10:12:11.941207 4971 generic.go:334] "Generic (PLEG): container finished" podID="02bcaa16-3617-43f1-864b-e91f271cf409" containerID="95fd2471ef4eafa08ed0824d14d69502c68538d01272cc6d24fe429ec31182f2" exitCode=0 Nov 27 10:12:11 crc kubenswrapper[4971]: I1127 10:12:11.941255 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jddgb" event={"ID":"02bcaa16-3617-43f1-864b-e91f271cf409","Type":"ContainerDied","Data":"95fd2471ef4eafa08ed0824d14d69502c68538d01272cc6d24fe429ec31182f2"} Nov 27 10:12:11 crc kubenswrapper[4971]: I1127 10:12:11.941283 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jddgb" event={"ID":"02bcaa16-3617-43f1-864b-e91f271cf409","Type":"ContainerDied","Data":"d480fa6813ac4ac50287e8652e77b5134489e67ec1d498d64017db5e35f10101"} Nov 27 10:12:11 crc kubenswrapper[4971]: I1127 10:12:11.941308 4971 scope.go:117] "RemoveContainer" containerID="95fd2471ef4eafa08ed0824d14d69502c68538d01272cc6d24fe429ec31182f2" Nov 27 10:12:11 crc kubenswrapper[4971]: I1127 10:12:11.941568 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jddgb" Nov 27 10:12:11 crc kubenswrapper[4971]: I1127 10:12:11.983994 4971 scope.go:117] "RemoveContainer" containerID="718d97e6eb62e9f19356166abd5606219bbd948d5845127ead6450664309cb40" Nov 27 10:12:12 crc kubenswrapper[4971]: I1127 10:12:12.026595 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jddgb"] Nov 27 10:12:12 crc kubenswrapper[4971]: I1127 10:12:12.047104 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-jddgb"] Nov 27 10:12:12 crc kubenswrapper[4971]: I1127 10:12:12.049572 4971 scope.go:117] "RemoveContainer" containerID="ea479256cbdc82658e74c2162b10e054afc26682c67826cf45bfa33b957505bd" Nov 27 10:12:12 crc kubenswrapper[4971]: I1127 10:12:12.139947 4971 scope.go:117] "RemoveContainer" containerID="95fd2471ef4eafa08ed0824d14d69502c68538d01272cc6d24fe429ec31182f2" Nov 27 10:12:12 crc kubenswrapper[4971]: E1127 10:12:12.143162 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"95fd2471ef4eafa08ed0824d14d69502c68538d01272cc6d24fe429ec31182f2\": container with ID starting with 95fd2471ef4eafa08ed0824d14d69502c68538d01272cc6d24fe429ec31182f2 not found: ID does not exist" containerID="95fd2471ef4eafa08ed0824d14d69502c68538d01272cc6d24fe429ec31182f2" Nov 27 10:12:12 crc kubenswrapper[4971]: I1127 10:12:12.143228 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95fd2471ef4eafa08ed0824d14d69502c68538d01272cc6d24fe429ec31182f2"} err="failed to get container status \"95fd2471ef4eafa08ed0824d14d69502c68538d01272cc6d24fe429ec31182f2\": rpc error: code = NotFound desc = could not find container \"95fd2471ef4eafa08ed0824d14d69502c68538d01272cc6d24fe429ec31182f2\": container with ID starting with 95fd2471ef4eafa08ed0824d14d69502c68538d01272cc6d24fe429ec31182f2 not found: ID does not exist" Nov 27 10:12:12 crc kubenswrapper[4971]: I1127 10:12:12.143268 4971 scope.go:117] "RemoveContainer" containerID="718d97e6eb62e9f19356166abd5606219bbd948d5845127ead6450664309cb40" Nov 27 10:12:12 crc kubenswrapper[4971]: E1127 10:12:12.143885 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"718d97e6eb62e9f19356166abd5606219bbd948d5845127ead6450664309cb40\": container with ID starting with 718d97e6eb62e9f19356166abd5606219bbd948d5845127ead6450664309cb40 not found: ID does not exist" containerID="718d97e6eb62e9f19356166abd5606219bbd948d5845127ead6450664309cb40" Nov 27 10:12:12 crc kubenswrapper[4971]: I1127 10:12:12.143921 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"718d97e6eb62e9f19356166abd5606219bbd948d5845127ead6450664309cb40"} err="failed to get container status \"718d97e6eb62e9f19356166abd5606219bbd948d5845127ead6450664309cb40\": rpc error: code = NotFound desc = could not find container \"718d97e6eb62e9f19356166abd5606219bbd948d5845127ead6450664309cb40\": container with ID starting with 718d97e6eb62e9f19356166abd5606219bbd948d5845127ead6450664309cb40 not found: ID does not exist" Nov 27 10:12:12 crc kubenswrapper[4971]: I1127 10:12:12.143939 4971 scope.go:117] "RemoveContainer" containerID="ea479256cbdc82658e74c2162b10e054afc26682c67826cf45bfa33b957505bd" Nov 27 10:12:12 crc kubenswrapper[4971]: E1127 10:12:12.144427 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea479256cbdc82658e74c2162b10e054afc26682c67826cf45bfa33b957505bd\": container with ID starting with ea479256cbdc82658e74c2162b10e054afc26682c67826cf45bfa33b957505bd not found: ID does not exist" containerID="ea479256cbdc82658e74c2162b10e054afc26682c67826cf45bfa33b957505bd" Nov 27 10:12:12 crc kubenswrapper[4971]: I1127 10:12:12.144463 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea479256cbdc82658e74c2162b10e054afc26682c67826cf45bfa33b957505bd"} err="failed to get container status \"ea479256cbdc82658e74c2162b10e054afc26682c67826cf45bfa33b957505bd\": rpc error: code = NotFound desc = could not find container \"ea479256cbdc82658e74c2162b10e054afc26682c67826cf45bfa33b957505bd\": container with ID starting with ea479256cbdc82658e74c2162b10e054afc26682c67826cf45bfa33b957505bd not found: ID does not exist" Nov 27 10:12:12 crc kubenswrapper[4971]: I1127 10:12:12.560692 4971 scope.go:117] "RemoveContainer" containerID="9fdd88e884f417697e53aaebe9b72e3b1bf740cd0fdca34e017b4d4d6723f1ce" Nov 27 10:12:12 crc kubenswrapper[4971]: E1127 10:12:12.561020 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:12:12 crc kubenswrapper[4971]: I1127 10:12:12.563451 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="02bcaa16-3617-43f1-864b-e91f271cf409" path="/var/lib/kubelet/pods/02bcaa16-3617-43f1-864b-e91f271cf409/volumes" Nov 27 10:12:13 crc kubenswrapper[4971]: I1127 10:12:13.010378 4971 generic.go:334] "Generic (PLEG): container finished" podID="477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c" containerID="3265551d51be651b6c56ae29ef3ec53c909eaa6b66dcd0370d75f5cfb7b64f69" exitCode=0 Nov 27 10:12:13 crc kubenswrapper[4971]: I1127 10:12:13.010578 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6h9vs" event={"ID":"477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c","Type":"ContainerDied","Data":"3265551d51be651b6c56ae29ef3ec53c909eaa6b66dcd0370d75f5cfb7b64f69"} Nov 27 10:12:13 crc kubenswrapper[4971]: I1127 10:12:13.264011 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7btvn"] Nov 27 10:12:13 crc kubenswrapper[4971]: E1127 10:12:13.265217 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02bcaa16-3617-43f1-864b-e91f271cf409" containerName="registry-server" Nov 27 10:12:13 crc kubenswrapper[4971]: I1127 10:12:13.265233 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="02bcaa16-3617-43f1-864b-e91f271cf409" containerName="registry-server" Nov 27 10:12:13 crc kubenswrapper[4971]: E1127 10:12:13.265281 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02bcaa16-3617-43f1-864b-e91f271cf409" containerName="extract-utilities" Nov 27 10:12:13 crc kubenswrapper[4971]: I1127 10:12:13.265288 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="02bcaa16-3617-43f1-864b-e91f271cf409" containerName="extract-utilities" Nov 27 10:12:13 crc kubenswrapper[4971]: E1127 10:12:13.265314 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02bcaa16-3617-43f1-864b-e91f271cf409" containerName="extract-content" Nov 27 10:12:13 crc kubenswrapper[4971]: I1127 10:12:13.265322 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="02bcaa16-3617-43f1-864b-e91f271cf409" containerName="extract-content" Nov 27 10:12:13 crc kubenswrapper[4971]: I1127 10:12:13.298179 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="02bcaa16-3617-43f1-864b-e91f271cf409" containerName="registry-server" Nov 27 10:12:13 crc kubenswrapper[4971]: I1127 10:12:13.312207 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7btvn"] Nov 27 10:12:13 crc kubenswrapper[4971]: I1127 10:12:13.312451 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7btvn" Nov 27 10:12:13 crc kubenswrapper[4971]: I1127 10:12:13.418881 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72aa0cc3-a671-4c36-8f8c-e8b3e56bc931-utilities\") pod \"redhat-marketplace-7btvn\" (UID: \"72aa0cc3-a671-4c36-8f8c-e8b3e56bc931\") " pod="openshift-marketplace/redhat-marketplace-7btvn" Nov 27 10:12:13 crc kubenswrapper[4971]: I1127 10:12:13.419362 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72aa0cc3-a671-4c36-8f8c-e8b3e56bc931-catalog-content\") pod \"redhat-marketplace-7btvn\" (UID: \"72aa0cc3-a671-4c36-8f8c-e8b3e56bc931\") " pod="openshift-marketplace/redhat-marketplace-7btvn" Nov 27 10:12:13 crc kubenswrapper[4971]: I1127 10:12:13.419570 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tk4mm\" (UniqueName: \"kubernetes.io/projected/72aa0cc3-a671-4c36-8f8c-e8b3e56bc931-kube-api-access-tk4mm\") pod \"redhat-marketplace-7btvn\" (UID: \"72aa0cc3-a671-4c36-8f8c-e8b3e56bc931\") " pod="openshift-marketplace/redhat-marketplace-7btvn" Nov 27 10:12:13 crc kubenswrapper[4971]: I1127 10:12:13.521581 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72aa0cc3-a671-4c36-8f8c-e8b3e56bc931-utilities\") pod \"redhat-marketplace-7btvn\" (UID: \"72aa0cc3-a671-4c36-8f8c-e8b3e56bc931\") " pod="openshift-marketplace/redhat-marketplace-7btvn" Nov 27 10:12:13 crc kubenswrapper[4971]: I1127 10:12:13.521675 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72aa0cc3-a671-4c36-8f8c-e8b3e56bc931-catalog-content\") pod \"redhat-marketplace-7btvn\" (UID: \"72aa0cc3-a671-4c36-8f8c-e8b3e56bc931\") " pod="openshift-marketplace/redhat-marketplace-7btvn" Nov 27 10:12:13 crc kubenswrapper[4971]: I1127 10:12:13.521744 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tk4mm\" (UniqueName: \"kubernetes.io/projected/72aa0cc3-a671-4c36-8f8c-e8b3e56bc931-kube-api-access-tk4mm\") pod \"redhat-marketplace-7btvn\" (UID: \"72aa0cc3-a671-4c36-8f8c-e8b3e56bc931\") " pod="openshift-marketplace/redhat-marketplace-7btvn" Nov 27 10:12:13 crc kubenswrapper[4971]: I1127 10:12:13.522574 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72aa0cc3-a671-4c36-8f8c-e8b3e56bc931-utilities\") pod \"redhat-marketplace-7btvn\" (UID: \"72aa0cc3-a671-4c36-8f8c-e8b3e56bc931\") " pod="openshift-marketplace/redhat-marketplace-7btvn" Nov 27 10:12:13 crc kubenswrapper[4971]: I1127 10:12:13.524371 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72aa0cc3-a671-4c36-8f8c-e8b3e56bc931-catalog-content\") pod \"redhat-marketplace-7btvn\" (UID: \"72aa0cc3-a671-4c36-8f8c-e8b3e56bc931\") " pod="openshift-marketplace/redhat-marketplace-7btvn" Nov 27 10:12:13 crc kubenswrapper[4971]: I1127 10:12:13.548571 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tk4mm\" (UniqueName: \"kubernetes.io/projected/72aa0cc3-a671-4c36-8f8c-e8b3e56bc931-kube-api-access-tk4mm\") pod \"redhat-marketplace-7btvn\" (UID: \"72aa0cc3-a671-4c36-8f8c-e8b3e56bc931\") " pod="openshift-marketplace/redhat-marketplace-7btvn" Nov 27 10:12:13 crc kubenswrapper[4971]: I1127 10:12:13.652385 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7btvn" Nov 27 10:12:14 crc kubenswrapper[4971]: I1127 10:12:14.031700 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6h9vs" event={"ID":"477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c","Type":"ContainerStarted","Data":"868bcfb778a1648c59ddebdb9ccda384505fa414b1d2d1cbacb971d33e2096a2"} Nov 27 10:12:14 crc kubenswrapper[4971]: I1127 10:12:14.219182 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7btvn"] Nov 27 10:12:15 crc kubenswrapper[4971]: I1127 10:12:15.044465 4971 generic.go:334] "Generic (PLEG): container finished" podID="72aa0cc3-a671-4c36-8f8c-e8b3e56bc931" containerID="b76a658397c2523f3bac8a8acea15f4204600cf0efcc1517cd8b739d1b4fe9a3" exitCode=0 Nov 27 10:12:15 crc kubenswrapper[4971]: I1127 10:12:15.044584 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7btvn" event={"ID":"72aa0cc3-a671-4c36-8f8c-e8b3e56bc931","Type":"ContainerDied","Data":"b76a658397c2523f3bac8a8acea15f4204600cf0efcc1517cd8b739d1b4fe9a3"} Nov 27 10:12:15 crc kubenswrapper[4971]: I1127 10:12:15.045000 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7btvn" event={"ID":"72aa0cc3-a671-4c36-8f8c-e8b3e56bc931","Type":"ContainerStarted","Data":"3caf7ab78ba909a58b09e338abb5c98e3ed65ed23c94def124466df6a7bd6140"} Nov 27 10:12:17 crc kubenswrapper[4971]: I1127 10:12:17.069753 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7btvn" event={"ID":"72aa0cc3-a671-4c36-8f8c-e8b3e56bc931","Type":"ContainerStarted","Data":"615a9f7535aee9f0f52498c668f1c820e808f3465359b05d8c27539348412a14"} Nov 27 10:12:17 crc kubenswrapper[4971]: E1127 10:12:17.757103 4971 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod72aa0cc3_a671_4c36_8f8c_e8b3e56bc931.slice/crio-615a9f7535aee9f0f52498c668f1c820e808f3465359b05d8c27539348412a14.scope\": RecentStats: unable to find data in memory cache]" Nov 27 10:12:18 crc kubenswrapper[4971]: I1127 10:12:18.086029 4971 generic.go:334] "Generic (PLEG): container finished" podID="72aa0cc3-a671-4c36-8f8c-e8b3e56bc931" containerID="615a9f7535aee9f0f52498c668f1c820e808f3465359b05d8c27539348412a14" exitCode=0 Nov 27 10:12:18 crc kubenswrapper[4971]: I1127 10:12:18.086095 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7btvn" event={"ID":"72aa0cc3-a671-4c36-8f8c-e8b3e56bc931","Type":"ContainerDied","Data":"615a9f7535aee9f0f52498c668f1c820e808f3465359b05d8c27539348412a14"} Nov 27 10:12:19 crc kubenswrapper[4971]: I1127 10:12:19.101591 4971 generic.go:334] "Generic (PLEG): container finished" podID="477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c" containerID="868bcfb778a1648c59ddebdb9ccda384505fa414b1d2d1cbacb971d33e2096a2" exitCode=0 Nov 27 10:12:19 crc kubenswrapper[4971]: I1127 10:12:19.102470 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6h9vs" event={"ID":"477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c","Type":"ContainerDied","Data":"868bcfb778a1648c59ddebdb9ccda384505fa414b1d2d1cbacb971d33e2096a2"} Nov 27 10:12:19 crc kubenswrapper[4971]: I1127 10:12:19.109465 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7btvn" event={"ID":"72aa0cc3-a671-4c36-8f8c-e8b3e56bc931","Type":"ContainerStarted","Data":"f77f1a141b3286f141c9227abee4062525768b00dd7cc7f6318b9b7b7c1ddcec"} Nov 27 10:12:19 crc kubenswrapper[4971]: I1127 10:12:19.156212 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7btvn" podStartSLOduration=2.386365953 podStartE2EDuration="6.156186188s" podCreationTimestamp="2025-11-27 10:12:13 +0000 UTC" firstStartedPulling="2025-11-27 10:12:15.046636039 +0000 UTC m=+11973.238679947" lastFinishedPulling="2025-11-27 10:12:18.816456264 +0000 UTC m=+11977.008500182" observedRunningTime="2025-11-27 10:12:19.149280935 +0000 UTC m=+11977.341324863" watchObservedRunningTime="2025-11-27 10:12:19.156186188 +0000 UTC m=+11977.348230106" Nov 27 10:12:20 crc kubenswrapper[4971]: I1127 10:12:20.123333 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6h9vs" event={"ID":"477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c","Type":"ContainerStarted","Data":"e492b90d32823a6f40f78d9d659ff13293c635f269c5807978141ed557cd0a34"} Nov 27 10:12:20 crc kubenswrapper[4971]: I1127 10:12:20.155380 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6h9vs" podStartSLOduration=3.617032401 podStartE2EDuration="10.1553547s" podCreationTimestamp="2025-11-27 10:12:10 +0000 UTC" firstStartedPulling="2025-11-27 10:12:13.014105448 +0000 UTC m=+11971.206149366" lastFinishedPulling="2025-11-27 10:12:19.552427757 +0000 UTC m=+11977.744471665" observedRunningTime="2025-11-27 10:12:20.148347393 +0000 UTC m=+11978.340391311" watchObservedRunningTime="2025-11-27 10:12:20.1553547 +0000 UTC m=+11978.347398618" Nov 27 10:12:21 crc kubenswrapper[4971]: I1127 10:12:21.208170 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6h9vs" Nov 27 10:12:21 crc kubenswrapper[4971]: I1127 10:12:21.208648 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6h9vs" Nov 27 10:12:22 crc kubenswrapper[4971]: I1127 10:12:22.266675 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-6h9vs" podUID="477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c" containerName="registry-server" probeResult="failure" output=< Nov 27 10:12:22 crc kubenswrapper[4971]: timeout: failed to connect service ":50051" within 1s Nov 27 10:12:22 crc kubenswrapper[4971]: > Nov 27 10:12:23 crc kubenswrapper[4971]: I1127 10:12:23.652572 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7btvn" Nov 27 10:12:23 crc kubenswrapper[4971]: I1127 10:12:23.652902 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7btvn" Nov 27 10:12:24 crc kubenswrapper[4971]: I1127 10:12:24.713177 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-7btvn" podUID="72aa0cc3-a671-4c36-8f8c-e8b3e56bc931" containerName="registry-server" probeResult="failure" output=< Nov 27 10:12:24 crc kubenswrapper[4971]: timeout: failed to connect service ":50051" within 1s Nov 27 10:12:24 crc kubenswrapper[4971]: > Nov 27 10:12:26 crc kubenswrapper[4971]: I1127 10:12:26.551672 4971 scope.go:117] "RemoveContainer" containerID="9fdd88e884f417697e53aaebe9b72e3b1bf740cd0fdca34e017b4d4d6723f1ce" Nov 27 10:12:26 crc kubenswrapper[4971]: E1127 10:12:26.552414 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:12:32 crc kubenswrapper[4971]: I1127 10:12:32.268509 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-6h9vs" podUID="477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c" containerName="registry-server" probeResult="failure" output=< Nov 27 10:12:32 crc kubenswrapper[4971]: timeout: failed to connect service ":50051" within 1s Nov 27 10:12:32 crc kubenswrapper[4971]: > Nov 27 10:12:33 crc kubenswrapper[4971]: I1127 10:12:33.704762 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7btvn" Nov 27 10:12:33 crc kubenswrapper[4971]: I1127 10:12:33.770776 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7btvn" Nov 27 10:12:33 crc kubenswrapper[4971]: I1127 10:12:33.944116 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7btvn"] Nov 27 10:12:35 crc kubenswrapper[4971]: I1127 10:12:35.309929 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-7btvn" podUID="72aa0cc3-a671-4c36-8f8c-e8b3e56bc931" containerName="registry-server" containerID="cri-o://f77f1a141b3286f141c9227abee4062525768b00dd7cc7f6318b9b7b7c1ddcec" gracePeriod=2 Nov 27 10:12:35 crc kubenswrapper[4971]: I1127 10:12:35.970657 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7btvn" Nov 27 10:12:36 crc kubenswrapper[4971]: I1127 10:12:36.053040 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk4mm\" (UniqueName: \"kubernetes.io/projected/72aa0cc3-a671-4c36-8f8c-e8b3e56bc931-kube-api-access-tk4mm\") pod \"72aa0cc3-a671-4c36-8f8c-e8b3e56bc931\" (UID: \"72aa0cc3-a671-4c36-8f8c-e8b3e56bc931\") " Nov 27 10:12:36 crc kubenswrapper[4971]: I1127 10:12:36.053280 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72aa0cc3-a671-4c36-8f8c-e8b3e56bc931-catalog-content\") pod \"72aa0cc3-a671-4c36-8f8c-e8b3e56bc931\" (UID: \"72aa0cc3-a671-4c36-8f8c-e8b3e56bc931\") " Nov 27 10:12:36 crc kubenswrapper[4971]: I1127 10:12:36.053353 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72aa0cc3-a671-4c36-8f8c-e8b3e56bc931-utilities\") pod \"72aa0cc3-a671-4c36-8f8c-e8b3e56bc931\" (UID: \"72aa0cc3-a671-4c36-8f8c-e8b3e56bc931\") " Nov 27 10:12:36 crc kubenswrapper[4971]: I1127 10:12:36.054875 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72aa0cc3-a671-4c36-8f8c-e8b3e56bc931-utilities" (OuterVolumeSpecName: "utilities") pod "72aa0cc3-a671-4c36-8f8c-e8b3e56bc931" (UID: "72aa0cc3-a671-4c36-8f8c-e8b3e56bc931"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 10:12:36 crc kubenswrapper[4971]: I1127 10:12:36.061860 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72aa0cc3-a671-4c36-8f8c-e8b3e56bc931-kube-api-access-tk4mm" (OuterVolumeSpecName: "kube-api-access-tk4mm") pod "72aa0cc3-a671-4c36-8f8c-e8b3e56bc931" (UID: "72aa0cc3-a671-4c36-8f8c-e8b3e56bc931"). InnerVolumeSpecName "kube-api-access-tk4mm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 10:12:36 crc kubenswrapper[4971]: I1127 10:12:36.069644 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72aa0cc3-a671-4c36-8f8c-e8b3e56bc931-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "72aa0cc3-a671-4c36-8f8c-e8b3e56bc931" (UID: "72aa0cc3-a671-4c36-8f8c-e8b3e56bc931"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 10:12:36 crc kubenswrapper[4971]: I1127 10:12:36.155850 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk4mm\" (UniqueName: \"kubernetes.io/projected/72aa0cc3-a671-4c36-8f8c-e8b3e56bc931-kube-api-access-tk4mm\") on node \"crc\" DevicePath \"\"" Nov 27 10:12:36 crc kubenswrapper[4971]: I1127 10:12:36.155897 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72aa0cc3-a671-4c36-8f8c-e8b3e56bc931-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 10:12:36 crc kubenswrapper[4971]: I1127 10:12:36.155909 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72aa0cc3-a671-4c36-8f8c-e8b3e56bc931-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 10:12:36 crc kubenswrapper[4971]: I1127 10:12:36.327225 4971 generic.go:334] "Generic (PLEG): container finished" podID="72aa0cc3-a671-4c36-8f8c-e8b3e56bc931" containerID="f77f1a141b3286f141c9227abee4062525768b00dd7cc7f6318b9b7b7c1ddcec" exitCode=0 Nov 27 10:12:36 crc kubenswrapper[4971]: I1127 10:12:36.327405 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7btvn" event={"ID":"72aa0cc3-a671-4c36-8f8c-e8b3e56bc931","Type":"ContainerDied","Data":"f77f1a141b3286f141c9227abee4062525768b00dd7cc7f6318b9b7b7c1ddcec"} Nov 27 10:12:36 crc kubenswrapper[4971]: I1127 10:12:36.327636 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7btvn" event={"ID":"72aa0cc3-a671-4c36-8f8c-e8b3e56bc931","Type":"ContainerDied","Data":"3caf7ab78ba909a58b09e338abb5c98e3ed65ed23c94def124466df6a7bd6140"} Nov 27 10:12:36 crc kubenswrapper[4971]: I1127 10:12:36.327660 4971 scope.go:117] "RemoveContainer" containerID="f77f1a141b3286f141c9227abee4062525768b00dd7cc7f6318b9b7b7c1ddcec" Nov 27 10:12:36 crc kubenswrapper[4971]: I1127 10:12:36.327476 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7btvn" Nov 27 10:12:36 crc kubenswrapper[4971]: I1127 10:12:36.369211 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7btvn"] Nov 27 10:12:36 crc kubenswrapper[4971]: I1127 10:12:36.376441 4971 scope.go:117] "RemoveContainer" containerID="615a9f7535aee9f0f52498c668f1c820e808f3465359b05d8c27539348412a14" Nov 27 10:12:36 crc kubenswrapper[4971]: I1127 10:12:36.379829 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-7btvn"] Nov 27 10:12:36 crc kubenswrapper[4971]: I1127 10:12:36.424049 4971 scope.go:117] "RemoveContainer" containerID="b76a658397c2523f3bac8a8acea15f4204600cf0efcc1517cd8b739d1b4fe9a3" Nov 27 10:12:36 crc kubenswrapper[4971]: I1127 10:12:36.490386 4971 scope.go:117] "RemoveContainer" containerID="f77f1a141b3286f141c9227abee4062525768b00dd7cc7f6318b9b7b7c1ddcec" Nov 27 10:12:36 crc kubenswrapper[4971]: E1127 10:12:36.491170 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f77f1a141b3286f141c9227abee4062525768b00dd7cc7f6318b9b7b7c1ddcec\": container with ID starting with f77f1a141b3286f141c9227abee4062525768b00dd7cc7f6318b9b7b7c1ddcec not found: ID does not exist" containerID="f77f1a141b3286f141c9227abee4062525768b00dd7cc7f6318b9b7b7c1ddcec" Nov 27 10:12:36 crc kubenswrapper[4971]: I1127 10:12:36.491227 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f77f1a141b3286f141c9227abee4062525768b00dd7cc7f6318b9b7b7c1ddcec"} err="failed to get container status \"f77f1a141b3286f141c9227abee4062525768b00dd7cc7f6318b9b7b7c1ddcec\": rpc error: code = NotFound desc = could not find container \"f77f1a141b3286f141c9227abee4062525768b00dd7cc7f6318b9b7b7c1ddcec\": container with ID starting with f77f1a141b3286f141c9227abee4062525768b00dd7cc7f6318b9b7b7c1ddcec not found: ID does not exist" Nov 27 10:12:36 crc kubenswrapper[4971]: I1127 10:12:36.491263 4971 scope.go:117] "RemoveContainer" containerID="615a9f7535aee9f0f52498c668f1c820e808f3465359b05d8c27539348412a14" Nov 27 10:12:36 crc kubenswrapper[4971]: E1127 10:12:36.491842 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"615a9f7535aee9f0f52498c668f1c820e808f3465359b05d8c27539348412a14\": container with ID starting with 615a9f7535aee9f0f52498c668f1c820e808f3465359b05d8c27539348412a14 not found: ID does not exist" containerID="615a9f7535aee9f0f52498c668f1c820e808f3465359b05d8c27539348412a14" Nov 27 10:12:36 crc kubenswrapper[4971]: I1127 10:12:36.491878 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"615a9f7535aee9f0f52498c668f1c820e808f3465359b05d8c27539348412a14"} err="failed to get container status \"615a9f7535aee9f0f52498c668f1c820e808f3465359b05d8c27539348412a14\": rpc error: code = NotFound desc = could not find container \"615a9f7535aee9f0f52498c668f1c820e808f3465359b05d8c27539348412a14\": container with ID starting with 615a9f7535aee9f0f52498c668f1c820e808f3465359b05d8c27539348412a14 not found: ID does not exist" Nov 27 10:12:36 crc kubenswrapper[4971]: I1127 10:12:36.491905 4971 scope.go:117] "RemoveContainer" containerID="b76a658397c2523f3bac8a8acea15f4204600cf0efcc1517cd8b739d1b4fe9a3" Nov 27 10:12:36 crc kubenswrapper[4971]: E1127 10:12:36.492410 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b76a658397c2523f3bac8a8acea15f4204600cf0efcc1517cd8b739d1b4fe9a3\": container with ID starting with b76a658397c2523f3bac8a8acea15f4204600cf0efcc1517cd8b739d1b4fe9a3 not found: ID does not exist" containerID="b76a658397c2523f3bac8a8acea15f4204600cf0efcc1517cd8b739d1b4fe9a3" Nov 27 10:12:36 crc kubenswrapper[4971]: I1127 10:12:36.492458 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b76a658397c2523f3bac8a8acea15f4204600cf0efcc1517cd8b739d1b4fe9a3"} err="failed to get container status \"b76a658397c2523f3bac8a8acea15f4204600cf0efcc1517cd8b739d1b4fe9a3\": rpc error: code = NotFound desc = could not find container \"b76a658397c2523f3bac8a8acea15f4204600cf0efcc1517cd8b739d1b4fe9a3\": container with ID starting with b76a658397c2523f3bac8a8acea15f4204600cf0efcc1517cd8b739d1b4fe9a3 not found: ID does not exist" Nov 27 10:12:36 crc kubenswrapper[4971]: I1127 10:12:36.562824 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72aa0cc3-a671-4c36-8f8c-e8b3e56bc931" path="/var/lib/kubelet/pods/72aa0cc3-a671-4c36-8f8c-e8b3e56bc931/volumes" Nov 27 10:12:38 crc kubenswrapper[4971]: I1127 10:12:38.554015 4971 scope.go:117] "RemoveContainer" containerID="9fdd88e884f417697e53aaebe9b72e3b1bf740cd0fdca34e017b4d4d6723f1ce" Nov 27 10:12:38 crc kubenswrapper[4971]: E1127 10:12:38.557705 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:12:42 crc kubenswrapper[4971]: I1127 10:12:42.256619 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-6h9vs" podUID="477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c" containerName="registry-server" probeResult="failure" output=< Nov 27 10:12:42 crc kubenswrapper[4971]: timeout: failed to connect service ":50051" within 1s Nov 27 10:12:42 crc kubenswrapper[4971]: > Nov 27 10:12:51 crc kubenswrapper[4971]: I1127 10:12:51.265458 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6h9vs" Nov 27 10:12:51 crc kubenswrapper[4971]: I1127 10:12:51.323275 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6h9vs" Nov 27 10:12:51 crc kubenswrapper[4971]: I1127 10:12:51.508993 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6h9vs"] Nov 27 10:12:52 crc kubenswrapper[4971]: I1127 10:12:52.512213 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-6h9vs" podUID="477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c" containerName="registry-server" containerID="cri-o://e492b90d32823a6f40f78d9d659ff13293c635f269c5807978141ed557cd0a34" gracePeriod=2 Nov 27 10:12:53 crc kubenswrapper[4971]: I1127 10:12:53.222450 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6h9vs" Nov 27 10:12:53 crc kubenswrapper[4971]: I1127 10:12:53.359596 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hnzqc\" (UniqueName: \"kubernetes.io/projected/477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c-kube-api-access-hnzqc\") pod \"477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c\" (UID: \"477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c\") " Nov 27 10:12:53 crc kubenswrapper[4971]: I1127 10:12:53.361024 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c-catalog-content\") pod \"477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c\" (UID: \"477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c\") " Nov 27 10:12:53 crc kubenswrapper[4971]: I1127 10:12:53.361120 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c-utilities\") pod \"477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c\" (UID: \"477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c\") " Nov 27 10:12:53 crc kubenswrapper[4971]: I1127 10:12:53.362726 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c-utilities" (OuterVolumeSpecName: "utilities") pod "477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c" (UID: "477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 10:12:53 crc kubenswrapper[4971]: I1127 10:12:53.366797 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c-kube-api-access-hnzqc" (OuterVolumeSpecName: "kube-api-access-hnzqc") pod "477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c" (UID: "477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c"). InnerVolumeSpecName "kube-api-access-hnzqc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 10:12:53 crc kubenswrapper[4971]: I1127 10:12:53.465403 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 10:12:53 crc kubenswrapper[4971]: I1127 10:12:53.465452 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hnzqc\" (UniqueName: \"kubernetes.io/projected/477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c-kube-api-access-hnzqc\") on node \"crc\" DevicePath \"\"" Nov 27 10:12:53 crc kubenswrapper[4971]: I1127 10:12:53.478183 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c" (UID: "477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 10:12:53 crc kubenswrapper[4971]: I1127 10:12:53.528638 4971 generic.go:334] "Generic (PLEG): container finished" podID="477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c" containerID="e492b90d32823a6f40f78d9d659ff13293c635f269c5807978141ed557cd0a34" exitCode=0 Nov 27 10:12:53 crc kubenswrapper[4971]: I1127 10:12:53.528701 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6h9vs" event={"ID":"477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c","Type":"ContainerDied","Data":"e492b90d32823a6f40f78d9d659ff13293c635f269c5807978141ed557cd0a34"} Nov 27 10:12:53 crc kubenswrapper[4971]: I1127 10:12:53.528743 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6h9vs" event={"ID":"477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c","Type":"ContainerDied","Data":"57b66e6feb4b0e5ef737d3535cf992a9a29e531882929ff63c4d1687d22b6a7a"} Nov 27 10:12:53 crc kubenswrapper[4971]: I1127 10:12:53.528770 4971 scope.go:117] "RemoveContainer" containerID="e492b90d32823a6f40f78d9d659ff13293c635f269c5807978141ed557cd0a34" Nov 27 10:12:53 crc kubenswrapper[4971]: I1127 10:12:53.528969 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6h9vs" Nov 27 10:12:53 crc kubenswrapper[4971]: I1127 10:12:53.551100 4971 scope.go:117] "RemoveContainer" containerID="9fdd88e884f417697e53aaebe9b72e3b1bf740cd0fdca34e017b4d4d6723f1ce" Nov 27 10:12:53 crc kubenswrapper[4971]: E1127 10:12:53.551590 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:12:53 crc kubenswrapper[4971]: I1127 10:12:53.566528 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 10:12:53 crc kubenswrapper[4971]: I1127 10:12:53.570407 4971 scope.go:117] "RemoveContainer" containerID="868bcfb778a1648c59ddebdb9ccda384505fa414b1d2d1cbacb971d33e2096a2" Nov 27 10:12:53 crc kubenswrapper[4971]: I1127 10:12:53.576426 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6h9vs"] Nov 27 10:12:53 crc kubenswrapper[4971]: I1127 10:12:53.587105 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-6h9vs"] Nov 27 10:12:53 crc kubenswrapper[4971]: I1127 10:12:53.610247 4971 scope.go:117] "RemoveContainer" containerID="3265551d51be651b6c56ae29ef3ec53c909eaa6b66dcd0370d75f5cfb7b64f69" Nov 27 10:12:53 crc kubenswrapper[4971]: I1127 10:12:53.653134 4971 scope.go:117] "RemoveContainer" containerID="e492b90d32823a6f40f78d9d659ff13293c635f269c5807978141ed557cd0a34" Nov 27 10:12:53 crc kubenswrapper[4971]: E1127 10:12:53.653635 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e492b90d32823a6f40f78d9d659ff13293c635f269c5807978141ed557cd0a34\": container with ID starting with e492b90d32823a6f40f78d9d659ff13293c635f269c5807978141ed557cd0a34 not found: ID does not exist" containerID="e492b90d32823a6f40f78d9d659ff13293c635f269c5807978141ed557cd0a34" Nov 27 10:12:53 crc kubenswrapper[4971]: I1127 10:12:53.653687 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e492b90d32823a6f40f78d9d659ff13293c635f269c5807978141ed557cd0a34"} err="failed to get container status \"e492b90d32823a6f40f78d9d659ff13293c635f269c5807978141ed557cd0a34\": rpc error: code = NotFound desc = could not find container \"e492b90d32823a6f40f78d9d659ff13293c635f269c5807978141ed557cd0a34\": container with ID starting with e492b90d32823a6f40f78d9d659ff13293c635f269c5807978141ed557cd0a34 not found: ID does not exist" Nov 27 10:12:53 crc kubenswrapper[4971]: I1127 10:12:53.653717 4971 scope.go:117] "RemoveContainer" containerID="868bcfb778a1648c59ddebdb9ccda384505fa414b1d2d1cbacb971d33e2096a2" Nov 27 10:12:53 crc kubenswrapper[4971]: E1127 10:12:53.654011 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"868bcfb778a1648c59ddebdb9ccda384505fa414b1d2d1cbacb971d33e2096a2\": container with ID starting with 868bcfb778a1648c59ddebdb9ccda384505fa414b1d2d1cbacb971d33e2096a2 not found: ID does not exist" containerID="868bcfb778a1648c59ddebdb9ccda384505fa414b1d2d1cbacb971d33e2096a2" Nov 27 10:12:53 crc kubenswrapper[4971]: I1127 10:12:53.654041 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"868bcfb778a1648c59ddebdb9ccda384505fa414b1d2d1cbacb971d33e2096a2"} err="failed to get container status \"868bcfb778a1648c59ddebdb9ccda384505fa414b1d2d1cbacb971d33e2096a2\": rpc error: code = NotFound desc = could not find container \"868bcfb778a1648c59ddebdb9ccda384505fa414b1d2d1cbacb971d33e2096a2\": container with ID starting with 868bcfb778a1648c59ddebdb9ccda384505fa414b1d2d1cbacb971d33e2096a2 not found: ID does not exist" Nov 27 10:12:53 crc kubenswrapper[4971]: I1127 10:12:53.654064 4971 scope.go:117] "RemoveContainer" containerID="3265551d51be651b6c56ae29ef3ec53c909eaa6b66dcd0370d75f5cfb7b64f69" Nov 27 10:12:53 crc kubenswrapper[4971]: E1127 10:12:53.654291 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3265551d51be651b6c56ae29ef3ec53c909eaa6b66dcd0370d75f5cfb7b64f69\": container with ID starting with 3265551d51be651b6c56ae29ef3ec53c909eaa6b66dcd0370d75f5cfb7b64f69 not found: ID does not exist" containerID="3265551d51be651b6c56ae29ef3ec53c909eaa6b66dcd0370d75f5cfb7b64f69" Nov 27 10:12:53 crc kubenswrapper[4971]: I1127 10:12:53.654322 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3265551d51be651b6c56ae29ef3ec53c909eaa6b66dcd0370d75f5cfb7b64f69"} err="failed to get container status \"3265551d51be651b6c56ae29ef3ec53c909eaa6b66dcd0370d75f5cfb7b64f69\": rpc error: code = NotFound desc = could not find container \"3265551d51be651b6c56ae29ef3ec53c909eaa6b66dcd0370d75f5cfb7b64f69\": container with ID starting with 3265551d51be651b6c56ae29ef3ec53c909eaa6b66dcd0370d75f5cfb7b64f69 not found: ID does not exist" Nov 27 10:12:54 crc kubenswrapper[4971]: I1127 10:12:54.568014 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c" path="/var/lib/kubelet/pods/477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c/volumes" Nov 27 10:13:06 crc kubenswrapper[4971]: I1127 10:13:06.551098 4971 scope.go:117] "RemoveContainer" containerID="9fdd88e884f417697e53aaebe9b72e3b1bf740cd0fdca34e017b4d4d6723f1ce" Nov 27 10:13:06 crc kubenswrapper[4971]: E1127 10:13:06.552195 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:13:20 crc kubenswrapper[4971]: I1127 10:13:20.551131 4971 scope.go:117] "RemoveContainer" containerID="9fdd88e884f417697e53aaebe9b72e3b1bf740cd0fdca34e017b4d4d6723f1ce" Nov 27 10:13:20 crc kubenswrapper[4971]: E1127 10:13:20.552138 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:13:34 crc kubenswrapper[4971]: I1127 10:13:34.552604 4971 scope.go:117] "RemoveContainer" containerID="9fdd88e884f417697e53aaebe9b72e3b1bf740cd0fdca34e017b4d4d6723f1ce" Nov 27 10:13:34 crc kubenswrapper[4971]: E1127 10:13:34.553806 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:13:48 crc kubenswrapper[4971]: I1127 10:13:48.550823 4971 scope.go:117] "RemoveContainer" containerID="9fdd88e884f417697e53aaebe9b72e3b1bf740cd0fdca34e017b4d4d6723f1ce" Nov 27 10:13:48 crc kubenswrapper[4971]: E1127 10:13:48.552318 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:14:02 crc kubenswrapper[4971]: I1127 10:14:02.561874 4971 scope.go:117] "RemoveContainer" containerID="9fdd88e884f417697e53aaebe9b72e3b1bf740cd0fdca34e017b4d4d6723f1ce" Nov 27 10:14:02 crc kubenswrapper[4971]: E1127 10:14:02.562861 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:14:14 crc kubenswrapper[4971]: I1127 10:14:14.554553 4971 scope.go:117] "RemoveContainer" containerID="9fdd88e884f417697e53aaebe9b72e3b1bf740cd0fdca34e017b4d4d6723f1ce" Nov 27 10:14:14 crc kubenswrapper[4971]: E1127 10:14:14.555628 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:14:27 crc kubenswrapper[4971]: I1127 10:14:27.550961 4971 scope.go:117] "RemoveContainer" containerID="9fdd88e884f417697e53aaebe9b72e3b1bf740cd0fdca34e017b4d4d6723f1ce" Nov 27 10:14:27 crc kubenswrapper[4971]: E1127 10:14:27.551755 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:14:39 crc kubenswrapper[4971]: I1127 10:14:39.550958 4971 scope.go:117] "RemoveContainer" containerID="9fdd88e884f417697e53aaebe9b72e3b1bf740cd0fdca34e017b4d4d6723f1ce" Nov 27 10:14:39 crc kubenswrapper[4971]: E1127 10:14:39.552322 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:14:51 crc kubenswrapper[4971]: I1127 10:14:51.551965 4971 scope.go:117] "RemoveContainer" containerID="9fdd88e884f417697e53aaebe9b72e3b1bf740cd0fdca34e017b4d4d6723f1ce" Nov 27 10:14:51 crc kubenswrapper[4971]: E1127 10:14:51.552813 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:15:00 crc kubenswrapper[4971]: I1127 10:15:00.167059 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403975-bctdt"] Nov 27 10:15:00 crc kubenswrapper[4971]: E1127 10:15:00.168180 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c" containerName="extract-utilities" Nov 27 10:15:00 crc kubenswrapper[4971]: I1127 10:15:00.168198 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c" containerName="extract-utilities" Nov 27 10:15:00 crc kubenswrapper[4971]: E1127 10:15:00.168210 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c" containerName="registry-server" Nov 27 10:15:00 crc kubenswrapper[4971]: I1127 10:15:00.168217 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c" containerName="registry-server" Nov 27 10:15:00 crc kubenswrapper[4971]: E1127 10:15:00.168239 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72aa0cc3-a671-4c36-8f8c-e8b3e56bc931" containerName="extract-content" Nov 27 10:15:00 crc kubenswrapper[4971]: I1127 10:15:00.168245 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="72aa0cc3-a671-4c36-8f8c-e8b3e56bc931" containerName="extract-content" Nov 27 10:15:00 crc kubenswrapper[4971]: E1127 10:15:00.168262 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c" containerName="extract-content" Nov 27 10:15:00 crc kubenswrapper[4971]: I1127 10:15:00.168267 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c" containerName="extract-content" Nov 27 10:15:00 crc kubenswrapper[4971]: E1127 10:15:00.168285 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72aa0cc3-a671-4c36-8f8c-e8b3e56bc931" containerName="registry-server" Nov 27 10:15:00 crc kubenswrapper[4971]: I1127 10:15:00.168291 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="72aa0cc3-a671-4c36-8f8c-e8b3e56bc931" containerName="registry-server" Nov 27 10:15:00 crc kubenswrapper[4971]: E1127 10:15:00.168305 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72aa0cc3-a671-4c36-8f8c-e8b3e56bc931" containerName="extract-utilities" Nov 27 10:15:00 crc kubenswrapper[4971]: I1127 10:15:00.168311 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="72aa0cc3-a671-4c36-8f8c-e8b3e56bc931" containerName="extract-utilities" Nov 27 10:15:00 crc kubenswrapper[4971]: I1127 10:15:00.168502 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="72aa0cc3-a671-4c36-8f8c-e8b3e56bc931" containerName="registry-server" Nov 27 10:15:00 crc kubenswrapper[4971]: I1127 10:15:00.168526 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="477b83be-cb4c-4ba8-8ce1-3ddbd067ad7c" containerName="registry-server" Nov 27 10:15:00 crc kubenswrapper[4971]: I1127 10:15:00.171317 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403975-bctdt" Nov 27 10:15:00 crc kubenswrapper[4971]: I1127 10:15:00.174347 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 27 10:15:00 crc kubenswrapper[4971]: I1127 10:15:00.174471 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 27 10:15:00 crc kubenswrapper[4971]: I1127 10:15:00.182051 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403975-bctdt"] Nov 27 10:15:00 crc kubenswrapper[4971]: I1127 10:15:00.275946 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zc462\" (UniqueName: \"kubernetes.io/projected/f62928fb-a4ae-4668-9c85-b9dd0ec3da69-kube-api-access-zc462\") pod \"collect-profiles-29403975-bctdt\" (UID: \"f62928fb-a4ae-4668-9c85-b9dd0ec3da69\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403975-bctdt" Nov 27 10:15:00 crc kubenswrapper[4971]: I1127 10:15:00.276603 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f62928fb-a4ae-4668-9c85-b9dd0ec3da69-config-volume\") pod \"collect-profiles-29403975-bctdt\" (UID: \"f62928fb-a4ae-4668-9c85-b9dd0ec3da69\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403975-bctdt" Nov 27 10:15:00 crc kubenswrapper[4971]: I1127 10:15:00.276770 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f62928fb-a4ae-4668-9c85-b9dd0ec3da69-secret-volume\") pod \"collect-profiles-29403975-bctdt\" (UID: \"f62928fb-a4ae-4668-9c85-b9dd0ec3da69\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403975-bctdt" Nov 27 10:15:00 crc kubenswrapper[4971]: I1127 10:15:00.379207 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f62928fb-a4ae-4668-9c85-b9dd0ec3da69-config-volume\") pod \"collect-profiles-29403975-bctdt\" (UID: \"f62928fb-a4ae-4668-9c85-b9dd0ec3da69\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403975-bctdt" Nov 27 10:15:00 crc kubenswrapper[4971]: I1127 10:15:00.379359 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f62928fb-a4ae-4668-9c85-b9dd0ec3da69-secret-volume\") pod \"collect-profiles-29403975-bctdt\" (UID: \"f62928fb-a4ae-4668-9c85-b9dd0ec3da69\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403975-bctdt" Nov 27 10:15:00 crc kubenswrapper[4971]: I1127 10:15:00.379581 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zc462\" (UniqueName: \"kubernetes.io/projected/f62928fb-a4ae-4668-9c85-b9dd0ec3da69-kube-api-access-zc462\") pod \"collect-profiles-29403975-bctdt\" (UID: \"f62928fb-a4ae-4668-9c85-b9dd0ec3da69\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403975-bctdt" Nov 27 10:15:00 crc kubenswrapper[4971]: I1127 10:15:00.380589 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f62928fb-a4ae-4668-9c85-b9dd0ec3da69-config-volume\") pod \"collect-profiles-29403975-bctdt\" (UID: \"f62928fb-a4ae-4668-9c85-b9dd0ec3da69\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403975-bctdt" Nov 27 10:15:00 crc kubenswrapper[4971]: I1127 10:15:00.387480 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f62928fb-a4ae-4668-9c85-b9dd0ec3da69-secret-volume\") pod \"collect-profiles-29403975-bctdt\" (UID: \"f62928fb-a4ae-4668-9c85-b9dd0ec3da69\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403975-bctdt" Nov 27 10:15:00 crc kubenswrapper[4971]: I1127 10:15:00.416452 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zc462\" (UniqueName: \"kubernetes.io/projected/f62928fb-a4ae-4668-9c85-b9dd0ec3da69-kube-api-access-zc462\") pod \"collect-profiles-29403975-bctdt\" (UID: \"f62928fb-a4ae-4668-9c85-b9dd0ec3da69\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403975-bctdt" Nov 27 10:15:00 crc kubenswrapper[4971]: I1127 10:15:00.547759 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403975-bctdt" Nov 27 10:15:01 crc kubenswrapper[4971]: I1127 10:15:01.025787 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403975-bctdt"] Nov 27 10:15:01 crc kubenswrapper[4971]: I1127 10:15:01.041909 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403975-bctdt" event={"ID":"f62928fb-a4ae-4668-9c85-b9dd0ec3da69","Type":"ContainerStarted","Data":"629d2f307bb4952348228c65a08cbfba61316b02d5cd85dafbd3f19812b344b3"} Nov 27 10:15:02 crc kubenswrapper[4971]: I1127 10:15:02.053518 4971 generic.go:334] "Generic (PLEG): container finished" podID="f62928fb-a4ae-4668-9c85-b9dd0ec3da69" containerID="d2727ca9ed6f13c9267d39c4987140d6ad1146608164a7b716f2d4ae921541fa" exitCode=0 Nov 27 10:15:02 crc kubenswrapper[4971]: I1127 10:15:02.053644 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403975-bctdt" event={"ID":"f62928fb-a4ae-4668-9c85-b9dd0ec3da69","Type":"ContainerDied","Data":"d2727ca9ed6f13c9267d39c4987140d6ad1146608164a7b716f2d4ae921541fa"} Nov 27 10:15:03 crc kubenswrapper[4971]: I1127 10:15:03.730555 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403975-bctdt" Nov 27 10:15:03 crc kubenswrapper[4971]: I1127 10:15:03.865392 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zc462\" (UniqueName: \"kubernetes.io/projected/f62928fb-a4ae-4668-9c85-b9dd0ec3da69-kube-api-access-zc462\") pod \"f62928fb-a4ae-4668-9c85-b9dd0ec3da69\" (UID: \"f62928fb-a4ae-4668-9c85-b9dd0ec3da69\") " Nov 27 10:15:03 crc kubenswrapper[4971]: I1127 10:15:03.865649 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f62928fb-a4ae-4668-9c85-b9dd0ec3da69-secret-volume\") pod \"f62928fb-a4ae-4668-9c85-b9dd0ec3da69\" (UID: \"f62928fb-a4ae-4668-9c85-b9dd0ec3da69\") " Nov 27 10:15:03 crc kubenswrapper[4971]: I1127 10:15:03.865756 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f62928fb-a4ae-4668-9c85-b9dd0ec3da69-config-volume\") pod \"f62928fb-a4ae-4668-9c85-b9dd0ec3da69\" (UID: \"f62928fb-a4ae-4668-9c85-b9dd0ec3da69\") " Nov 27 10:15:03 crc kubenswrapper[4971]: I1127 10:15:03.866918 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f62928fb-a4ae-4668-9c85-b9dd0ec3da69-config-volume" (OuterVolumeSpecName: "config-volume") pod "f62928fb-a4ae-4668-9c85-b9dd0ec3da69" (UID: "f62928fb-a4ae-4668-9c85-b9dd0ec3da69"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 10:15:03 crc kubenswrapper[4971]: I1127 10:15:03.873089 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f62928fb-a4ae-4668-9c85-b9dd0ec3da69-kube-api-access-zc462" (OuterVolumeSpecName: "kube-api-access-zc462") pod "f62928fb-a4ae-4668-9c85-b9dd0ec3da69" (UID: "f62928fb-a4ae-4668-9c85-b9dd0ec3da69"). InnerVolumeSpecName "kube-api-access-zc462". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 10:15:03 crc kubenswrapper[4971]: I1127 10:15:03.873834 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f62928fb-a4ae-4668-9c85-b9dd0ec3da69-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "f62928fb-a4ae-4668-9c85-b9dd0ec3da69" (UID: "f62928fb-a4ae-4668-9c85-b9dd0ec3da69"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 10:15:03 crc kubenswrapper[4971]: I1127 10:15:03.969149 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zc462\" (UniqueName: \"kubernetes.io/projected/f62928fb-a4ae-4668-9c85-b9dd0ec3da69-kube-api-access-zc462\") on node \"crc\" DevicePath \"\"" Nov 27 10:15:03 crc kubenswrapper[4971]: I1127 10:15:03.969467 4971 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f62928fb-a4ae-4668-9c85-b9dd0ec3da69-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 27 10:15:03 crc kubenswrapper[4971]: I1127 10:15:03.969593 4971 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f62928fb-a4ae-4668-9c85-b9dd0ec3da69-config-volume\") on node \"crc\" DevicePath \"\"" Nov 27 10:15:04 crc kubenswrapper[4971]: I1127 10:15:04.077273 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403975-bctdt" event={"ID":"f62928fb-a4ae-4668-9c85-b9dd0ec3da69","Type":"ContainerDied","Data":"629d2f307bb4952348228c65a08cbfba61316b02d5cd85dafbd3f19812b344b3"} Nov 27 10:15:04 crc kubenswrapper[4971]: I1127 10:15:04.077320 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="629d2f307bb4952348228c65a08cbfba61316b02d5cd85dafbd3f19812b344b3" Nov 27 10:15:04 crc kubenswrapper[4971]: I1127 10:15:04.077650 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403975-bctdt" Nov 27 10:15:04 crc kubenswrapper[4971]: I1127 10:15:04.866745 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403930-4xqpd"] Nov 27 10:15:04 crc kubenswrapper[4971]: I1127 10:15:04.883475 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403930-4xqpd"] Nov 27 10:15:05 crc kubenswrapper[4971]: I1127 10:15:05.550799 4971 scope.go:117] "RemoveContainer" containerID="9fdd88e884f417697e53aaebe9b72e3b1bf740cd0fdca34e017b4d4d6723f1ce" Nov 27 10:15:05 crc kubenswrapper[4971]: E1127 10:15:05.551728 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:15:06 crc kubenswrapper[4971]: I1127 10:15:06.563209 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ffc5a08-00af-4407-8e27-881e5ac48b51" path="/var/lib/kubelet/pods/1ffc5a08-00af-4407-8e27-881e5ac48b51/volumes" Nov 27 10:15:18 crc kubenswrapper[4971]: I1127 10:15:18.550687 4971 scope.go:117] "RemoveContainer" containerID="9fdd88e884f417697e53aaebe9b72e3b1bf740cd0fdca34e017b4d4d6723f1ce" Nov 27 10:15:18 crc kubenswrapper[4971]: E1127 10:15:18.551311 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:15:31 crc kubenswrapper[4971]: I1127 10:15:31.551115 4971 scope.go:117] "RemoveContainer" containerID="9fdd88e884f417697e53aaebe9b72e3b1bf740cd0fdca34e017b4d4d6723f1ce" Nov 27 10:15:31 crc kubenswrapper[4971]: E1127 10:15:31.552352 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:15:44 crc kubenswrapper[4971]: I1127 10:15:44.551133 4971 scope.go:117] "RemoveContainer" containerID="9fdd88e884f417697e53aaebe9b72e3b1bf740cd0fdca34e017b4d4d6723f1ce" Nov 27 10:15:44 crc kubenswrapper[4971]: E1127 10:15:44.552757 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:15:56 crc kubenswrapper[4971]: I1127 10:15:56.551363 4971 scope.go:117] "RemoveContainer" containerID="9fdd88e884f417697e53aaebe9b72e3b1bf740cd0fdca34e017b4d4d6723f1ce" Nov 27 10:15:57 crc kubenswrapper[4971]: I1127 10:15:57.743995 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"d9387d3b303ffc5dc8011c228415b19d694558cca04b8892441826aac2c78d22"} Nov 27 10:16:02 crc kubenswrapper[4971]: I1127 10:16:02.637425 4971 scope.go:117] "RemoveContainer" containerID="a1f5538d6b843fa44449ebc1690d1abb44e5b04835009b3cd10aa90035f3de4c" Nov 27 10:17:56 crc kubenswrapper[4971]: I1127 10:17:56.413001 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 10:17:56 crc kubenswrapper[4971]: I1127 10:17:56.413869 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 10:18:26 crc kubenswrapper[4971]: I1127 10:18:26.413435 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 10:18:26 crc kubenswrapper[4971]: I1127 10:18:26.414349 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 10:18:37 crc kubenswrapper[4971]: I1127 10:18:37.317067 4971 generic.go:334] "Generic (PLEG): container finished" podID="5dede177-0f43-453e-b75b-e5ac63add3da" containerID="2bfccb3cd6682978b90262d2c56142155694719a5b975a28cbbc44b8e11dff2a" exitCode=0 Nov 27 10:18:37 crc kubenswrapper[4971]: I1127 10:18:37.317181 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"5dede177-0f43-453e-b75b-e5ac63add3da","Type":"ContainerDied","Data":"2bfccb3cd6682978b90262d2c56142155694719a5b975a28cbbc44b8e11dff2a"} Nov 27 10:18:38 crc kubenswrapper[4971]: I1127 10:18:38.887558 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Nov 27 10:18:38 crc kubenswrapper[4971]: I1127 10:18:38.939257 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5dede177-0f43-453e-b75b-e5ac63add3da-openstack-config\") pod \"5dede177-0f43-453e-b75b-e5ac63add3da\" (UID: \"5dede177-0f43-453e-b75b-e5ac63add3da\") " Nov 27 10:18:38 crc kubenswrapper[4971]: I1127 10:18:38.939414 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/5dede177-0f43-453e-b75b-e5ac63add3da-ca-certs\") pod \"5dede177-0f43-453e-b75b-e5ac63add3da\" (UID: \"5dede177-0f43-453e-b75b-e5ac63add3da\") " Nov 27 10:18:38 crc kubenswrapper[4971]: I1127 10:18:38.939451 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/5dede177-0f43-453e-b75b-e5ac63add3da-test-operator-ephemeral-workdir\") pod \"5dede177-0f43-453e-b75b-e5ac63add3da\" (UID: \"5dede177-0f43-453e-b75b-e5ac63add3da\") " Nov 27 10:18:38 crc kubenswrapper[4971]: I1127 10:18:38.939551 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nwlg9\" (UniqueName: \"kubernetes.io/projected/5dede177-0f43-453e-b75b-e5ac63add3da-kube-api-access-nwlg9\") pod \"5dede177-0f43-453e-b75b-e5ac63add3da\" (UID: \"5dede177-0f43-453e-b75b-e5ac63add3da\") " Nov 27 10:18:38 crc kubenswrapper[4971]: I1127 10:18:38.939577 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5dede177-0f43-453e-b75b-e5ac63add3da-ssh-key\") pod \"5dede177-0f43-453e-b75b-e5ac63add3da\" (UID: \"5dede177-0f43-453e-b75b-e5ac63add3da\") " Nov 27 10:18:38 crc kubenswrapper[4971]: I1127 10:18:38.939625 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"5dede177-0f43-453e-b75b-e5ac63add3da\" (UID: \"5dede177-0f43-453e-b75b-e5ac63add3da\") " Nov 27 10:18:38 crc kubenswrapper[4971]: I1127 10:18:38.939661 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5dede177-0f43-453e-b75b-e5ac63add3da-config-data\") pod \"5dede177-0f43-453e-b75b-e5ac63add3da\" (UID: \"5dede177-0f43-453e-b75b-e5ac63add3da\") " Nov 27 10:18:38 crc kubenswrapper[4971]: I1127 10:18:38.939852 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/5dede177-0f43-453e-b75b-e5ac63add3da-test-operator-ephemeral-temporary\") pod \"5dede177-0f43-453e-b75b-e5ac63add3da\" (UID: \"5dede177-0f43-453e-b75b-e5ac63add3da\") " Nov 27 10:18:38 crc kubenswrapper[4971]: I1127 10:18:38.939980 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5dede177-0f43-453e-b75b-e5ac63add3da-openstack-config-secret\") pod \"5dede177-0f43-453e-b75b-e5ac63add3da\" (UID: \"5dede177-0f43-453e-b75b-e5ac63add3da\") " Nov 27 10:18:38 crc kubenswrapper[4971]: I1127 10:18:38.945071 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5dede177-0f43-453e-b75b-e5ac63add3da-config-data" (OuterVolumeSpecName: "config-data") pod "5dede177-0f43-453e-b75b-e5ac63add3da" (UID: "5dede177-0f43-453e-b75b-e5ac63add3da"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 10:18:38 crc kubenswrapper[4971]: I1127 10:18:38.946345 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5dede177-0f43-453e-b75b-e5ac63add3da-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "5dede177-0f43-453e-b75b-e5ac63add3da" (UID: "5dede177-0f43-453e-b75b-e5ac63add3da"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 10:18:38 crc kubenswrapper[4971]: I1127 10:18:38.963399 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5dede177-0f43-453e-b75b-e5ac63add3da-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "5dede177-0f43-453e-b75b-e5ac63add3da" (UID: "5dede177-0f43-453e-b75b-e5ac63add3da"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 10:18:38 crc kubenswrapper[4971]: I1127 10:18:38.965981 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "test-operator-logs") pod "5dede177-0f43-453e-b75b-e5ac63add3da" (UID: "5dede177-0f43-453e-b75b-e5ac63add3da"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 27 10:18:38 crc kubenswrapper[4971]: I1127 10:18:38.966065 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5dede177-0f43-453e-b75b-e5ac63add3da-kube-api-access-nwlg9" (OuterVolumeSpecName: "kube-api-access-nwlg9") pod "5dede177-0f43-453e-b75b-e5ac63add3da" (UID: "5dede177-0f43-453e-b75b-e5ac63add3da"). InnerVolumeSpecName "kube-api-access-nwlg9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 10:18:38 crc kubenswrapper[4971]: I1127 10:18:38.978666 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5dede177-0f43-453e-b75b-e5ac63add3da-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5dede177-0f43-453e-b75b-e5ac63add3da" (UID: "5dede177-0f43-453e-b75b-e5ac63add3da"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 10:18:38 crc kubenswrapper[4971]: I1127 10:18:38.987791 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5dede177-0f43-453e-b75b-e5ac63add3da-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "5dede177-0f43-453e-b75b-e5ac63add3da" (UID: "5dede177-0f43-453e-b75b-e5ac63add3da"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 10:18:39 crc kubenswrapper[4971]: I1127 10:18:39.000127 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5dede177-0f43-453e-b75b-e5ac63add3da-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "5dede177-0f43-453e-b75b-e5ac63add3da" (UID: "5dede177-0f43-453e-b75b-e5ac63add3da"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 10:18:39 crc kubenswrapper[4971]: I1127 10:18:39.014492 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5dede177-0f43-453e-b75b-e5ac63add3da-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "5dede177-0f43-453e-b75b-e5ac63add3da" (UID: "5dede177-0f43-453e-b75b-e5ac63add3da"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 10:18:39 crc kubenswrapper[4971]: I1127 10:18:39.043614 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nwlg9\" (UniqueName: \"kubernetes.io/projected/5dede177-0f43-453e-b75b-e5ac63add3da-kube-api-access-nwlg9\") on node \"crc\" DevicePath \"\"" Nov 27 10:18:39 crc kubenswrapper[4971]: I1127 10:18:39.043700 4971 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5dede177-0f43-453e-b75b-e5ac63add3da-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 27 10:18:39 crc kubenswrapper[4971]: I1127 10:18:39.043746 4971 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Nov 27 10:18:39 crc kubenswrapper[4971]: I1127 10:18:39.043761 4971 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5dede177-0f43-453e-b75b-e5ac63add3da-config-data\") on node \"crc\" DevicePath \"\"" Nov 27 10:18:39 crc kubenswrapper[4971]: I1127 10:18:39.043783 4971 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/5dede177-0f43-453e-b75b-e5ac63add3da-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Nov 27 10:18:39 crc kubenswrapper[4971]: I1127 10:18:39.043799 4971 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5dede177-0f43-453e-b75b-e5ac63add3da-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Nov 27 10:18:39 crc kubenswrapper[4971]: I1127 10:18:39.043815 4971 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5dede177-0f43-453e-b75b-e5ac63add3da-openstack-config\") on node \"crc\" DevicePath \"\"" Nov 27 10:18:39 crc kubenswrapper[4971]: I1127 10:18:39.043827 4971 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/5dede177-0f43-453e-b75b-e5ac63add3da-ca-certs\") on node \"crc\" DevicePath \"\"" Nov 27 10:18:39 crc kubenswrapper[4971]: I1127 10:18:39.043841 4971 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/5dede177-0f43-453e-b75b-e5ac63add3da-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Nov 27 10:18:39 crc kubenswrapper[4971]: I1127 10:18:39.069398 4971 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Nov 27 10:18:39 crc kubenswrapper[4971]: I1127 10:18:39.147628 4971 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Nov 27 10:18:39 crc kubenswrapper[4971]: I1127 10:18:39.341634 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"5dede177-0f43-453e-b75b-e5ac63add3da","Type":"ContainerDied","Data":"0c34220b148bc83cc0ad0b5efde75beda39609be65cfacab6a7269a25ece36a3"} Nov 27 10:18:39 crc kubenswrapper[4971]: I1127 10:18:39.342375 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0c34220b148bc83cc0ad0b5efde75beda39609be65cfacab6a7269a25ece36a3" Nov 27 10:18:39 crc kubenswrapper[4971]: I1127 10:18:39.341694 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Nov 27 10:18:51 crc kubenswrapper[4971]: I1127 10:18:51.623346 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Nov 27 10:18:51 crc kubenswrapper[4971]: E1127 10:18:51.625264 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f62928fb-a4ae-4668-9c85-b9dd0ec3da69" containerName="collect-profiles" Nov 27 10:18:51 crc kubenswrapper[4971]: I1127 10:18:51.625285 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="f62928fb-a4ae-4668-9c85-b9dd0ec3da69" containerName="collect-profiles" Nov 27 10:18:51 crc kubenswrapper[4971]: E1127 10:18:51.625317 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5dede177-0f43-453e-b75b-e5ac63add3da" containerName="tempest-tests-tempest-tests-runner" Nov 27 10:18:51 crc kubenswrapper[4971]: I1127 10:18:51.625326 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="5dede177-0f43-453e-b75b-e5ac63add3da" containerName="tempest-tests-tempest-tests-runner" Nov 27 10:18:51 crc kubenswrapper[4971]: I1127 10:18:51.625657 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="f62928fb-a4ae-4668-9c85-b9dd0ec3da69" containerName="collect-profiles" Nov 27 10:18:51 crc kubenswrapper[4971]: I1127 10:18:51.625690 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="5dede177-0f43-453e-b75b-e5ac63add3da" containerName="tempest-tests-tempest-tests-runner" Nov 27 10:18:51 crc kubenswrapper[4971]: I1127 10:18:51.626911 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Nov 27 10:18:51 crc kubenswrapper[4971]: I1127 10:18:51.637108 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Nov 27 10:18:51 crc kubenswrapper[4971]: I1127 10:18:51.661052 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-8nz9c" Nov 27 10:18:51 crc kubenswrapper[4971]: I1127 10:18:51.694677 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"43dadd9a-00b5-4496-9210-487a919f5955\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Nov 27 10:18:51 crc kubenswrapper[4971]: I1127 10:18:51.694799 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gvqcq\" (UniqueName: \"kubernetes.io/projected/43dadd9a-00b5-4496-9210-487a919f5955-kube-api-access-gvqcq\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"43dadd9a-00b5-4496-9210-487a919f5955\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Nov 27 10:18:51 crc kubenswrapper[4971]: I1127 10:18:51.797436 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"43dadd9a-00b5-4496-9210-487a919f5955\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Nov 27 10:18:51 crc kubenswrapper[4971]: I1127 10:18:51.797563 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvqcq\" (UniqueName: \"kubernetes.io/projected/43dadd9a-00b5-4496-9210-487a919f5955-kube-api-access-gvqcq\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"43dadd9a-00b5-4496-9210-487a919f5955\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Nov 27 10:18:51 crc kubenswrapper[4971]: I1127 10:18:51.798605 4971 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"43dadd9a-00b5-4496-9210-487a919f5955\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Nov 27 10:18:51 crc kubenswrapper[4971]: I1127 10:18:51.824024 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gvqcq\" (UniqueName: \"kubernetes.io/projected/43dadd9a-00b5-4496-9210-487a919f5955-kube-api-access-gvqcq\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"43dadd9a-00b5-4496-9210-487a919f5955\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Nov 27 10:18:51 crc kubenswrapper[4971]: I1127 10:18:51.841877 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"43dadd9a-00b5-4496-9210-487a919f5955\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Nov 27 10:18:51 crc kubenswrapper[4971]: I1127 10:18:51.999766 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Nov 27 10:18:52 crc kubenswrapper[4971]: I1127 10:18:52.483109 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Nov 27 10:18:52 crc kubenswrapper[4971]: I1127 10:18:52.487219 4971 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 27 10:18:52 crc kubenswrapper[4971]: I1127 10:18:52.533806 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"43dadd9a-00b5-4496-9210-487a919f5955","Type":"ContainerStarted","Data":"57472524e80a64c18d33dec2863f0cd9a51c662962892b77abcd8511c9575acf"} Nov 27 10:18:54 crc kubenswrapper[4971]: I1127 10:18:54.574453 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"43dadd9a-00b5-4496-9210-487a919f5955","Type":"ContainerStarted","Data":"97e5fda95d98114e25b24c6c69a6c88e287d4516946fe2a69db88711cb0d4e5d"} Nov 27 10:18:54 crc kubenswrapper[4971]: I1127 10:18:54.605237 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=2.59795462 podStartE2EDuration="3.605208878s" podCreationTimestamp="2025-11-27 10:18:51 +0000 UTC" firstStartedPulling="2025-11-27 10:18:52.486868822 +0000 UTC m=+12370.678912740" lastFinishedPulling="2025-11-27 10:18:53.49412308 +0000 UTC m=+12371.686166998" observedRunningTime="2025-11-27 10:18:54.589559799 +0000 UTC m=+12372.781603727" watchObservedRunningTime="2025-11-27 10:18:54.605208878 +0000 UTC m=+12372.797252796" Nov 27 10:18:56 crc kubenswrapper[4971]: I1127 10:18:56.413585 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 10:18:56 crc kubenswrapper[4971]: I1127 10:18:56.413662 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 10:18:56 crc kubenswrapper[4971]: I1127 10:18:56.413710 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 10:18:56 crc kubenswrapper[4971]: I1127 10:18:56.414837 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d9387d3b303ffc5dc8011c228415b19d694558cca04b8892441826aac2c78d22"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 10:18:56 crc kubenswrapper[4971]: I1127 10:18:56.414914 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://d9387d3b303ffc5dc8011c228415b19d694558cca04b8892441826aac2c78d22" gracePeriod=600 Nov 27 10:18:56 crc kubenswrapper[4971]: I1127 10:18:56.602455 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="d9387d3b303ffc5dc8011c228415b19d694558cca04b8892441826aac2c78d22" exitCode=0 Nov 27 10:18:56 crc kubenswrapper[4971]: I1127 10:18:56.602648 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"d9387d3b303ffc5dc8011c228415b19d694558cca04b8892441826aac2c78d22"} Nov 27 10:18:56 crc kubenswrapper[4971]: I1127 10:18:56.603370 4971 scope.go:117] "RemoveContainer" containerID="9fdd88e884f417697e53aaebe9b72e3b1bf740cd0fdca34e017b4d4d6723f1ce" Nov 27 10:18:57 crc kubenswrapper[4971]: I1127 10:18:57.620877 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"116b63c77f50e60a9681ede491266fd11db22eaa347218c7c899b5f0ec9100aa"} Nov 27 10:20:15 crc kubenswrapper[4971]: I1127 10:20:15.075804 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-8pdfd/must-gather-zl67k"] Nov 27 10:20:15 crc kubenswrapper[4971]: I1127 10:20:15.078343 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8pdfd/must-gather-zl67k" Nov 27 10:20:15 crc kubenswrapper[4971]: I1127 10:20:15.081965 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-8pdfd"/"openshift-service-ca.crt" Nov 27 10:20:15 crc kubenswrapper[4971]: I1127 10:20:15.082303 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-8pdfd"/"default-dockercfg-sb87m" Nov 27 10:20:15 crc kubenswrapper[4971]: I1127 10:20:15.082708 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-8pdfd"/"kube-root-ca.crt" Nov 27 10:20:15 crc kubenswrapper[4971]: I1127 10:20:15.106762 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-8pdfd/must-gather-zl67k"] Nov 27 10:20:15 crc kubenswrapper[4971]: I1127 10:20:15.216930 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/478390e2-ef28-432b-a0b5-be4a1ebb75cf-must-gather-output\") pod \"must-gather-zl67k\" (UID: \"478390e2-ef28-432b-a0b5-be4a1ebb75cf\") " pod="openshift-must-gather-8pdfd/must-gather-zl67k" Nov 27 10:20:15 crc kubenswrapper[4971]: I1127 10:20:15.217087 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-87bq8\" (UniqueName: \"kubernetes.io/projected/478390e2-ef28-432b-a0b5-be4a1ebb75cf-kube-api-access-87bq8\") pod \"must-gather-zl67k\" (UID: \"478390e2-ef28-432b-a0b5-be4a1ebb75cf\") " pod="openshift-must-gather-8pdfd/must-gather-zl67k" Nov 27 10:20:15 crc kubenswrapper[4971]: I1127 10:20:15.319509 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/478390e2-ef28-432b-a0b5-be4a1ebb75cf-must-gather-output\") pod \"must-gather-zl67k\" (UID: \"478390e2-ef28-432b-a0b5-be4a1ebb75cf\") " pod="openshift-must-gather-8pdfd/must-gather-zl67k" Nov 27 10:20:15 crc kubenswrapper[4971]: I1127 10:20:15.319633 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-87bq8\" (UniqueName: \"kubernetes.io/projected/478390e2-ef28-432b-a0b5-be4a1ebb75cf-kube-api-access-87bq8\") pod \"must-gather-zl67k\" (UID: \"478390e2-ef28-432b-a0b5-be4a1ebb75cf\") " pod="openshift-must-gather-8pdfd/must-gather-zl67k" Nov 27 10:20:15 crc kubenswrapper[4971]: I1127 10:20:15.320515 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/478390e2-ef28-432b-a0b5-be4a1ebb75cf-must-gather-output\") pod \"must-gather-zl67k\" (UID: \"478390e2-ef28-432b-a0b5-be4a1ebb75cf\") " pod="openshift-must-gather-8pdfd/must-gather-zl67k" Nov 27 10:20:15 crc kubenswrapper[4971]: I1127 10:20:15.342652 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-87bq8\" (UniqueName: \"kubernetes.io/projected/478390e2-ef28-432b-a0b5-be4a1ebb75cf-kube-api-access-87bq8\") pod \"must-gather-zl67k\" (UID: \"478390e2-ef28-432b-a0b5-be4a1ebb75cf\") " pod="openshift-must-gather-8pdfd/must-gather-zl67k" Nov 27 10:20:15 crc kubenswrapper[4971]: I1127 10:20:15.406205 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8pdfd/must-gather-zl67k" Nov 27 10:20:15 crc kubenswrapper[4971]: I1127 10:20:15.943729 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-8pdfd/must-gather-zl67k"] Nov 27 10:20:16 crc kubenswrapper[4971]: I1127 10:20:16.109873 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8pdfd/must-gather-zl67k" event={"ID":"478390e2-ef28-432b-a0b5-be4a1ebb75cf","Type":"ContainerStarted","Data":"8b69f0f621d262c251dc102e4758853997fcf38c47a3e9725bf2831f067ee6cd"} Nov 27 10:20:23 crc kubenswrapper[4971]: I1127 10:20:23.249270 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8pdfd/must-gather-zl67k" event={"ID":"478390e2-ef28-432b-a0b5-be4a1ebb75cf","Type":"ContainerStarted","Data":"0aed6c0d32929f552acdc69181c94f29768d5cd8c1c98ad7e3c9d0f6e46b2780"} Nov 27 10:20:24 crc kubenswrapper[4971]: I1127 10:20:24.264487 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8pdfd/must-gather-zl67k" event={"ID":"478390e2-ef28-432b-a0b5-be4a1ebb75cf","Type":"ContainerStarted","Data":"fef07161adb2a515623f89e36e39c9a59b35063aca8155cedc092ba9f47dcaeb"} Nov 27 10:20:29 crc kubenswrapper[4971]: I1127 10:20:29.403317 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-8pdfd/must-gather-zl67k" podStartSLOduration=7.580182359 podStartE2EDuration="14.403298661s" podCreationTimestamp="2025-11-27 10:20:15 +0000 UTC" firstStartedPulling="2025-11-27 10:20:15.94625919 +0000 UTC m=+12454.138303108" lastFinishedPulling="2025-11-27 10:20:22.769375502 +0000 UTC m=+12460.961419410" observedRunningTime="2025-11-27 10:20:24.285796454 +0000 UTC m=+12462.477840422" watchObservedRunningTime="2025-11-27 10:20:29.403298661 +0000 UTC m=+12467.595342579" Nov 27 10:20:29 crc kubenswrapper[4971]: I1127 10:20:29.409301 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-8pdfd/crc-debug-8kcf8"] Nov 27 10:20:29 crc kubenswrapper[4971]: I1127 10:20:29.410623 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8pdfd/crc-debug-8kcf8" Nov 27 10:20:29 crc kubenswrapper[4971]: I1127 10:20:29.565388 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4472e96d-dbc3-4f62-80b1-323ebfcee75f-host\") pod \"crc-debug-8kcf8\" (UID: \"4472e96d-dbc3-4f62-80b1-323ebfcee75f\") " pod="openshift-must-gather-8pdfd/crc-debug-8kcf8" Nov 27 10:20:29 crc kubenswrapper[4971]: I1127 10:20:29.565773 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vz84q\" (UniqueName: \"kubernetes.io/projected/4472e96d-dbc3-4f62-80b1-323ebfcee75f-kube-api-access-vz84q\") pod \"crc-debug-8kcf8\" (UID: \"4472e96d-dbc3-4f62-80b1-323ebfcee75f\") " pod="openshift-must-gather-8pdfd/crc-debug-8kcf8" Nov 27 10:20:29 crc kubenswrapper[4971]: I1127 10:20:29.668034 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4472e96d-dbc3-4f62-80b1-323ebfcee75f-host\") pod \"crc-debug-8kcf8\" (UID: \"4472e96d-dbc3-4f62-80b1-323ebfcee75f\") " pod="openshift-must-gather-8pdfd/crc-debug-8kcf8" Nov 27 10:20:29 crc kubenswrapper[4971]: I1127 10:20:29.668498 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4472e96d-dbc3-4f62-80b1-323ebfcee75f-host\") pod \"crc-debug-8kcf8\" (UID: \"4472e96d-dbc3-4f62-80b1-323ebfcee75f\") " pod="openshift-must-gather-8pdfd/crc-debug-8kcf8" Nov 27 10:20:29 crc kubenswrapper[4971]: I1127 10:20:29.668694 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vz84q\" (UniqueName: \"kubernetes.io/projected/4472e96d-dbc3-4f62-80b1-323ebfcee75f-kube-api-access-vz84q\") pod \"crc-debug-8kcf8\" (UID: \"4472e96d-dbc3-4f62-80b1-323ebfcee75f\") " pod="openshift-must-gather-8pdfd/crc-debug-8kcf8" Nov 27 10:20:29 crc kubenswrapper[4971]: I1127 10:20:29.694881 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vz84q\" (UniqueName: \"kubernetes.io/projected/4472e96d-dbc3-4f62-80b1-323ebfcee75f-kube-api-access-vz84q\") pod \"crc-debug-8kcf8\" (UID: \"4472e96d-dbc3-4f62-80b1-323ebfcee75f\") " pod="openshift-must-gather-8pdfd/crc-debug-8kcf8" Nov 27 10:20:29 crc kubenswrapper[4971]: I1127 10:20:29.735221 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8pdfd/crc-debug-8kcf8" Nov 27 10:20:30 crc kubenswrapper[4971]: I1127 10:20:30.338464 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8pdfd/crc-debug-8kcf8" event={"ID":"4472e96d-dbc3-4f62-80b1-323ebfcee75f","Type":"ContainerStarted","Data":"acdf92fa404d7aa1ec4842ba450888f4a8a2a7a3a00e99ac599a576313a49c5d"} Nov 27 10:20:43 crc kubenswrapper[4971]: I1127 10:20:43.531263 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8pdfd/crc-debug-8kcf8" event={"ID":"4472e96d-dbc3-4f62-80b1-323ebfcee75f","Type":"ContainerStarted","Data":"3c899c7e11051e45089a78a25373c4f1306deb7cfc3d959f8939fec72d7f5ec7"} Nov 27 10:20:43 crc kubenswrapper[4971]: I1127 10:20:43.553258 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-8pdfd/crc-debug-8kcf8" podStartSLOduration=2.050609723 podStartE2EDuration="14.553230819s" podCreationTimestamp="2025-11-27 10:20:29 +0000 UTC" firstStartedPulling="2025-11-27 10:20:29.778776017 +0000 UTC m=+12467.970819955" lastFinishedPulling="2025-11-27 10:20:42.281397143 +0000 UTC m=+12480.473441051" observedRunningTime="2025-11-27 10:20:43.548061544 +0000 UTC m=+12481.740105472" watchObservedRunningTime="2025-11-27 10:20:43.553230819 +0000 UTC m=+12481.745274747" Nov 27 10:20:56 crc kubenswrapper[4971]: I1127 10:20:56.412909 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 10:20:56 crc kubenswrapper[4971]: I1127 10:20:56.413754 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 10:21:26 crc kubenswrapper[4971]: I1127 10:21:26.413191 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 10:21:26 crc kubenswrapper[4971]: I1127 10:21:26.413751 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 10:21:31 crc kubenswrapper[4971]: I1127 10:21:31.193930 4971 generic.go:334] "Generic (PLEG): container finished" podID="4472e96d-dbc3-4f62-80b1-323ebfcee75f" containerID="3c899c7e11051e45089a78a25373c4f1306deb7cfc3d959f8939fec72d7f5ec7" exitCode=0 Nov 27 10:21:31 crc kubenswrapper[4971]: I1127 10:21:31.194084 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8pdfd/crc-debug-8kcf8" event={"ID":"4472e96d-dbc3-4f62-80b1-323ebfcee75f","Type":"ContainerDied","Data":"3c899c7e11051e45089a78a25373c4f1306deb7cfc3d959f8939fec72d7f5ec7"} Nov 27 10:21:32 crc kubenswrapper[4971]: I1127 10:21:32.354790 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8pdfd/crc-debug-8kcf8" Nov 27 10:21:32 crc kubenswrapper[4971]: I1127 10:21:32.432859 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-8pdfd/crc-debug-8kcf8"] Nov 27 10:21:32 crc kubenswrapper[4971]: I1127 10:21:32.444889 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-8pdfd/crc-debug-8kcf8"] Nov 27 10:21:32 crc kubenswrapper[4971]: I1127 10:21:32.445926 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4472e96d-dbc3-4f62-80b1-323ebfcee75f-host\") pod \"4472e96d-dbc3-4f62-80b1-323ebfcee75f\" (UID: \"4472e96d-dbc3-4f62-80b1-323ebfcee75f\") " Nov 27 10:21:32 crc kubenswrapper[4971]: I1127 10:21:32.446253 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4472e96d-dbc3-4f62-80b1-323ebfcee75f-host" (OuterVolumeSpecName: "host") pod "4472e96d-dbc3-4f62-80b1-323ebfcee75f" (UID: "4472e96d-dbc3-4f62-80b1-323ebfcee75f"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 10:21:32 crc kubenswrapper[4971]: I1127 10:21:32.446299 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vz84q\" (UniqueName: \"kubernetes.io/projected/4472e96d-dbc3-4f62-80b1-323ebfcee75f-kube-api-access-vz84q\") pod \"4472e96d-dbc3-4f62-80b1-323ebfcee75f\" (UID: \"4472e96d-dbc3-4f62-80b1-323ebfcee75f\") " Nov 27 10:21:32 crc kubenswrapper[4971]: I1127 10:21:32.448158 4971 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4472e96d-dbc3-4f62-80b1-323ebfcee75f-host\") on node \"crc\" DevicePath \"\"" Nov 27 10:21:32 crc kubenswrapper[4971]: I1127 10:21:32.464761 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4472e96d-dbc3-4f62-80b1-323ebfcee75f-kube-api-access-vz84q" (OuterVolumeSpecName: "kube-api-access-vz84q") pod "4472e96d-dbc3-4f62-80b1-323ebfcee75f" (UID: "4472e96d-dbc3-4f62-80b1-323ebfcee75f"). InnerVolumeSpecName "kube-api-access-vz84q". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 10:21:32 crc kubenswrapper[4971]: I1127 10:21:32.550309 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vz84q\" (UniqueName: \"kubernetes.io/projected/4472e96d-dbc3-4f62-80b1-323ebfcee75f-kube-api-access-vz84q\") on node \"crc\" DevicePath \"\"" Nov 27 10:21:32 crc kubenswrapper[4971]: I1127 10:21:32.563480 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4472e96d-dbc3-4f62-80b1-323ebfcee75f" path="/var/lib/kubelet/pods/4472e96d-dbc3-4f62-80b1-323ebfcee75f/volumes" Nov 27 10:21:33 crc kubenswrapper[4971]: I1127 10:21:33.223582 4971 scope.go:117] "RemoveContainer" containerID="3c899c7e11051e45089a78a25373c4f1306deb7cfc3d959f8939fec72d7f5ec7" Nov 27 10:21:33 crc kubenswrapper[4971]: I1127 10:21:33.223673 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8pdfd/crc-debug-8kcf8" Nov 27 10:21:33 crc kubenswrapper[4971]: I1127 10:21:33.687209 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-8pdfd/crc-debug-gn5hs"] Nov 27 10:21:33 crc kubenswrapper[4971]: E1127 10:21:33.688189 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4472e96d-dbc3-4f62-80b1-323ebfcee75f" containerName="container-00" Nov 27 10:21:33 crc kubenswrapper[4971]: I1127 10:21:33.688205 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="4472e96d-dbc3-4f62-80b1-323ebfcee75f" containerName="container-00" Nov 27 10:21:33 crc kubenswrapper[4971]: I1127 10:21:33.688448 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="4472e96d-dbc3-4f62-80b1-323ebfcee75f" containerName="container-00" Nov 27 10:21:33 crc kubenswrapper[4971]: I1127 10:21:33.689366 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8pdfd/crc-debug-gn5hs" Nov 27 10:21:33 crc kubenswrapper[4971]: I1127 10:21:33.783647 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7e2605b4-a048-4b8f-b6cd-5fbf02d48f8d-host\") pod \"crc-debug-gn5hs\" (UID: \"7e2605b4-a048-4b8f-b6cd-5fbf02d48f8d\") " pod="openshift-must-gather-8pdfd/crc-debug-gn5hs" Nov 27 10:21:33 crc kubenswrapper[4971]: I1127 10:21:33.783817 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8wcv\" (UniqueName: \"kubernetes.io/projected/7e2605b4-a048-4b8f-b6cd-5fbf02d48f8d-kube-api-access-h8wcv\") pod \"crc-debug-gn5hs\" (UID: \"7e2605b4-a048-4b8f-b6cd-5fbf02d48f8d\") " pod="openshift-must-gather-8pdfd/crc-debug-gn5hs" Nov 27 10:21:33 crc kubenswrapper[4971]: I1127 10:21:33.886574 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7e2605b4-a048-4b8f-b6cd-5fbf02d48f8d-host\") pod \"crc-debug-gn5hs\" (UID: \"7e2605b4-a048-4b8f-b6cd-5fbf02d48f8d\") " pod="openshift-must-gather-8pdfd/crc-debug-gn5hs" Nov 27 10:21:33 crc kubenswrapper[4971]: I1127 10:21:33.886708 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7e2605b4-a048-4b8f-b6cd-5fbf02d48f8d-host\") pod \"crc-debug-gn5hs\" (UID: \"7e2605b4-a048-4b8f-b6cd-5fbf02d48f8d\") " pod="openshift-must-gather-8pdfd/crc-debug-gn5hs" Nov 27 10:21:33 crc kubenswrapper[4971]: I1127 10:21:33.886736 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8wcv\" (UniqueName: \"kubernetes.io/projected/7e2605b4-a048-4b8f-b6cd-5fbf02d48f8d-kube-api-access-h8wcv\") pod \"crc-debug-gn5hs\" (UID: \"7e2605b4-a048-4b8f-b6cd-5fbf02d48f8d\") " pod="openshift-must-gather-8pdfd/crc-debug-gn5hs" Nov 27 10:21:33 crc kubenswrapper[4971]: I1127 10:21:33.919520 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8wcv\" (UniqueName: \"kubernetes.io/projected/7e2605b4-a048-4b8f-b6cd-5fbf02d48f8d-kube-api-access-h8wcv\") pod \"crc-debug-gn5hs\" (UID: \"7e2605b4-a048-4b8f-b6cd-5fbf02d48f8d\") " pod="openshift-must-gather-8pdfd/crc-debug-gn5hs" Nov 27 10:21:34 crc kubenswrapper[4971]: I1127 10:21:34.012015 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8pdfd/crc-debug-gn5hs" Nov 27 10:21:34 crc kubenswrapper[4971]: I1127 10:21:34.238101 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8pdfd/crc-debug-gn5hs" event={"ID":"7e2605b4-a048-4b8f-b6cd-5fbf02d48f8d","Type":"ContainerStarted","Data":"764b11231a01293e62e103c6506ef7a4156953d6e925db175d5f647d49041981"} Nov 27 10:21:35 crc kubenswrapper[4971]: I1127 10:21:35.254331 4971 generic.go:334] "Generic (PLEG): container finished" podID="7e2605b4-a048-4b8f-b6cd-5fbf02d48f8d" containerID="093a5e3461c063890e99c864249a67c03f773ee28cacf137ac8fc09b76be22b4" exitCode=0 Nov 27 10:21:35 crc kubenswrapper[4971]: I1127 10:21:35.254572 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8pdfd/crc-debug-gn5hs" event={"ID":"7e2605b4-a048-4b8f-b6cd-5fbf02d48f8d","Type":"ContainerDied","Data":"093a5e3461c063890e99c864249a67c03f773ee28cacf137ac8fc09b76be22b4"} Nov 27 10:21:35 crc kubenswrapper[4971]: I1127 10:21:35.772993 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-8pdfd/crc-debug-gn5hs"] Nov 27 10:21:35 crc kubenswrapper[4971]: I1127 10:21:35.788259 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-8pdfd/crc-debug-gn5hs"] Nov 27 10:21:36 crc kubenswrapper[4971]: I1127 10:21:36.403134 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8pdfd/crc-debug-gn5hs" Nov 27 10:21:36 crc kubenswrapper[4971]: I1127 10:21:36.457905 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h8wcv\" (UniqueName: \"kubernetes.io/projected/7e2605b4-a048-4b8f-b6cd-5fbf02d48f8d-kube-api-access-h8wcv\") pod \"7e2605b4-a048-4b8f-b6cd-5fbf02d48f8d\" (UID: \"7e2605b4-a048-4b8f-b6cd-5fbf02d48f8d\") " Nov 27 10:21:36 crc kubenswrapper[4971]: I1127 10:21:36.458216 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7e2605b4-a048-4b8f-b6cd-5fbf02d48f8d-host\") pod \"7e2605b4-a048-4b8f-b6cd-5fbf02d48f8d\" (UID: \"7e2605b4-a048-4b8f-b6cd-5fbf02d48f8d\") " Nov 27 10:21:36 crc kubenswrapper[4971]: I1127 10:21:36.459585 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7e2605b4-a048-4b8f-b6cd-5fbf02d48f8d-host" (OuterVolumeSpecName: "host") pod "7e2605b4-a048-4b8f-b6cd-5fbf02d48f8d" (UID: "7e2605b4-a048-4b8f-b6cd-5fbf02d48f8d"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 10:21:36 crc kubenswrapper[4971]: I1127 10:21:36.467402 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e2605b4-a048-4b8f-b6cd-5fbf02d48f8d-kube-api-access-h8wcv" (OuterVolumeSpecName: "kube-api-access-h8wcv") pod "7e2605b4-a048-4b8f-b6cd-5fbf02d48f8d" (UID: "7e2605b4-a048-4b8f-b6cd-5fbf02d48f8d"). InnerVolumeSpecName "kube-api-access-h8wcv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 10:21:36 crc kubenswrapper[4971]: I1127 10:21:36.561634 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h8wcv\" (UniqueName: \"kubernetes.io/projected/7e2605b4-a048-4b8f-b6cd-5fbf02d48f8d-kube-api-access-h8wcv\") on node \"crc\" DevicePath \"\"" Nov 27 10:21:36 crc kubenswrapper[4971]: I1127 10:21:36.561677 4971 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7e2605b4-a048-4b8f-b6cd-5fbf02d48f8d-host\") on node \"crc\" DevicePath \"\"" Nov 27 10:21:36 crc kubenswrapper[4971]: I1127 10:21:36.566243 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e2605b4-a048-4b8f-b6cd-5fbf02d48f8d" path="/var/lib/kubelet/pods/7e2605b4-a048-4b8f-b6cd-5fbf02d48f8d/volumes" Nov 27 10:21:36 crc kubenswrapper[4971]: I1127 10:21:36.964808 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-8pdfd/crc-debug-jgfkb"] Nov 27 10:21:36 crc kubenswrapper[4971]: E1127 10:21:36.965756 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e2605b4-a048-4b8f-b6cd-5fbf02d48f8d" containerName="container-00" Nov 27 10:21:36 crc kubenswrapper[4971]: I1127 10:21:36.965780 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e2605b4-a048-4b8f-b6cd-5fbf02d48f8d" containerName="container-00" Nov 27 10:21:36 crc kubenswrapper[4971]: I1127 10:21:36.966089 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e2605b4-a048-4b8f-b6cd-5fbf02d48f8d" containerName="container-00" Nov 27 10:21:36 crc kubenswrapper[4971]: I1127 10:21:36.967009 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8pdfd/crc-debug-jgfkb" Nov 27 10:21:37 crc kubenswrapper[4971]: I1127 10:21:37.074417 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1dd42aed-7e00-4fc5-9e46-95953fe4c15b-host\") pod \"crc-debug-jgfkb\" (UID: \"1dd42aed-7e00-4fc5-9e46-95953fe4c15b\") " pod="openshift-must-gather-8pdfd/crc-debug-jgfkb" Nov 27 10:21:37 crc kubenswrapper[4971]: I1127 10:21:37.074920 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tjnzs\" (UniqueName: \"kubernetes.io/projected/1dd42aed-7e00-4fc5-9e46-95953fe4c15b-kube-api-access-tjnzs\") pod \"crc-debug-jgfkb\" (UID: \"1dd42aed-7e00-4fc5-9e46-95953fe4c15b\") " pod="openshift-must-gather-8pdfd/crc-debug-jgfkb" Nov 27 10:21:37 crc kubenswrapper[4971]: I1127 10:21:37.177164 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1dd42aed-7e00-4fc5-9e46-95953fe4c15b-host\") pod \"crc-debug-jgfkb\" (UID: \"1dd42aed-7e00-4fc5-9e46-95953fe4c15b\") " pod="openshift-must-gather-8pdfd/crc-debug-jgfkb" Nov 27 10:21:37 crc kubenswrapper[4971]: I1127 10:21:37.177291 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tjnzs\" (UniqueName: \"kubernetes.io/projected/1dd42aed-7e00-4fc5-9e46-95953fe4c15b-kube-api-access-tjnzs\") pod \"crc-debug-jgfkb\" (UID: \"1dd42aed-7e00-4fc5-9e46-95953fe4c15b\") " pod="openshift-must-gather-8pdfd/crc-debug-jgfkb" Nov 27 10:21:37 crc kubenswrapper[4971]: I1127 10:21:37.177390 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1dd42aed-7e00-4fc5-9e46-95953fe4c15b-host\") pod \"crc-debug-jgfkb\" (UID: \"1dd42aed-7e00-4fc5-9e46-95953fe4c15b\") " pod="openshift-must-gather-8pdfd/crc-debug-jgfkb" Nov 27 10:21:37 crc kubenswrapper[4971]: I1127 10:21:37.198372 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tjnzs\" (UniqueName: \"kubernetes.io/projected/1dd42aed-7e00-4fc5-9e46-95953fe4c15b-kube-api-access-tjnzs\") pod \"crc-debug-jgfkb\" (UID: \"1dd42aed-7e00-4fc5-9e46-95953fe4c15b\") " pod="openshift-must-gather-8pdfd/crc-debug-jgfkb" Nov 27 10:21:37 crc kubenswrapper[4971]: I1127 10:21:37.287994 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8pdfd/crc-debug-jgfkb" Nov 27 10:21:37 crc kubenswrapper[4971]: I1127 10:21:37.289206 4971 scope.go:117] "RemoveContainer" containerID="093a5e3461c063890e99c864249a67c03f773ee28cacf137ac8fc09b76be22b4" Nov 27 10:21:37 crc kubenswrapper[4971]: I1127 10:21:37.289269 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8pdfd/crc-debug-gn5hs" Nov 27 10:21:38 crc kubenswrapper[4971]: I1127 10:21:38.305609 4971 generic.go:334] "Generic (PLEG): container finished" podID="1dd42aed-7e00-4fc5-9e46-95953fe4c15b" containerID="bab44e10b58b9bf2eaaaa7ba3082a4b9bc3ac6abef6ab7d443724ecd29efc341" exitCode=0 Nov 27 10:21:38 crc kubenswrapper[4971]: I1127 10:21:38.305661 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8pdfd/crc-debug-jgfkb" event={"ID":"1dd42aed-7e00-4fc5-9e46-95953fe4c15b","Type":"ContainerDied","Data":"bab44e10b58b9bf2eaaaa7ba3082a4b9bc3ac6abef6ab7d443724ecd29efc341"} Nov 27 10:21:38 crc kubenswrapper[4971]: I1127 10:21:38.305929 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8pdfd/crc-debug-jgfkb" event={"ID":"1dd42aed-7e00-4fc5-9e46-95953fe4c15b","Type":"ContainerStarted","Data":"e7ce2865fa0945d3b1a0572751877f7e851f8b097612b472e0d28af1f2d073ff"} Nov 27 10:21:38 crc kubenswrapper[4971]: I1127 10:21:38.373557 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-8pdfd/crc-debug-jgfkb"] Nov 27 10:21:38 crc kubenswrapper[4971]: I1127 10:21:38.392219 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-8pdfd/crc-debug-jgfkb"] Nov 27 10:21:39 crc kubenswrapper[4971]: I1127 10:21:39.435297 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8pdfd/crc-debug-jgfkb" Nov 27 10:21:39 crc kubenswrapper[4971]: I1127 10:21:39.534092 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tjnzs\" (UniqueName: \"kubernetes.io/projected/1dd42aed-7e00-4fc5-9e46-95953fe4c15b-kube-api-access-tjnzs\") pod \"1dd42aed-7e00-4fc5-9e46-95953fe4c15b\" (UID: \"1dd42aed-7e00-4fc5-9e46-95953fe4c15b\") " Nov 27 10:21:39 crc kubenswrapper[4971]: I1127 10:21:39.534508 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1dd42aed-7e00-4fc5-9e46-95953fe4c15b-host\") pod \"1dd42aed-7e00-4fc5-9e46-95953fe4c15b\" (UID: \"1dd42aed-7e00-4fc5-9e46-95953fe4c15b\") " Nov 27 10:21:39 crc kubenswrapper[4971]: I1127 10:21:39.535077 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1dd42aed-7e00-4fc5-9e46-95953fe4c15b-host" (OuterVolumeSpecName: "host") pod "1dd42aed-7e00-4fc5-9e46-95953fe4c15b" (UID: "1dd42aed-7e00-4fc5-9e46-95953fe4c15b"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 27 10:21:39 crc kubenswrapper[4971]: I1127 10:21:39.545071 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1dd42aed-7e00-4fc5-9e46-95953fe4c15b-kube-api-access-tjnzs" (OuterVolumeSpecName: "kube-api-access-tjnzs") pod "1dd42aed-7e00-4fc5-9e46-95953fe4c15b" (UID: "1dd42aed-7e00-4fc5-9e46-95953fe4c15b"). InnerVolumeSpecName "kube-api-access-tjnzs". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 10:21:39 crc kubenswrapper[4971]: I1127 10:21:39.637339 4971 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1dd42aed-7e00-4fc5-9e46-95953fe4c15b-host\") on node \"crc\" DevicePath \"\"" Nov 27 10:21:39 crc kubenswrapper[4971]: I1127 10:21:39.637376 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tjnzs\" (UniqueName: \"kubernetes.io/projected/1dd42aed-7e00-4fc5-9e46-95953fe4c15b-kube-api-access-tjnzs\") on node \"crc\" DevicePath \"\"" Nov 27 10:21:40 crc kubenswrapper[4971]: I1127 10:21:40.334132 4971 scope.go:117] "RemoveContainer" containerID="bab44e10b58b9bf2eaaaa7ba3082a4b9bc3ac6abef6ab7d443724ecd29efc341" Nov 27 10:21:40 crc kubenswrapper[4971]: I1127 10:21:40.334682 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8pdfd/crc-debug-jgfkb" Nov 27 10:21:40 crc kubenswrapper[4971]: I1127 10:21:40.568746 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1dd42aed-7e00-4fc5-9e46-95953fe4c15b" path="/var/lib/kubelet/pods/1dd42aed-7e00-4fc5-9e46-95953fe4c15b/volumes" Nov 27 10:21:56 crc kubenswrapper[4971]: I1127 10:21:56.412934 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 10:21:56 crc kubenswrapper[4971]: I1127 10:21:56.413492 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 10:21:56 crc kubenswrapper[4971]: I1127 10:21:56.413588 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 10:21:56 crc kubenswrapper[4971]: I1127 10:21:56.414744 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"116b63c77f50e60a9681ede491266fd11db22eaa347218c7c899b5f0ec9100aa"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 10:21:56 crc kubenswrapper[4971]: I1127 10:21:56.414836 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://116b63c77f50e60a9681ede491266fd11db22eaa347218c7c899b5f0ec9100aa" gracePeriod=600 Nov 27 10:21:56 crc kubenswrapper[4971]: E1127 10:21:56.563474 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:21:56 crc kubenswrapper[4971]: I1127 10:21:56.566399 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="116b63c77f50e60a9681ede491266fd11db22eaa347218c7c899b5f0ec9100aa" exitCode=0 Nov 27 10:21:56 crc kubenswrapper[4971]: I1127 10:21:56.569871 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"116b63c77f50e60a9681ede491266fd11db22eaa347218c7c899b5f0ec9100aa"} Nov 27 10:21:56 crc kubenswrapper[4971]: I1127 10:21:56.569956 4971 scope.go:117] "RemoveContainer" containerID="d9387d3b303ffc5dc8011c228415b19d694558cca04b8892441826aac2c78d22" Nov 27 10:21:56 crc kubenswrapper[4971]: E1127 10:21:56.624011 4971 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0ab8c2ef_d82b_4396_919d_8550cc2e24d7.slice/crio-116b63c77f50e60a9681ede491266fd11db22eaa347218c7c899b5f0ec9100aa.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0ab8c2ef_d82b_4396_919d_8550cc2e24d7.slice/crio-conmon-116b63c77f50e60a9681ede491266fd11db22eaa347218c7c899b5f0ec9100aa.scope\": RecentStats: unable to find data in memory cache]" Nov 27 10:21:57 crc kubenswrapper[4971]: I1127 10:21:57.584726 4971 scope.go:117] "RemoveContainer" containerID="116b63c77f50e60a9681ede491266fd11db22eaa347218c7c899b5f0ec9100aa" Nov 27 10:21:57 crc kubenswrapper[4971]: E1127 10:21:57.585382 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:22:12 crc kubenswrapper[4971]: I1127 10:22:12.561571 4971 scope.go:117] "RemoveContainer" containerID="116b63c77f50e60a9681ede491266fd11db22eaa347218c7c899b5f0ec9100aa" Nov 27 10:22:12 crc kubenswrapper[4971]: E1127 10:22:12.562434 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:22:15 crc kubenswrapper[4971]: I1127 10:22:15.137516 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-2j8tb"] Nov 27 10:22:15 crc kubenswrapper[4971]: E1127 10:22:15.140139 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1dd42aed-7e00-4fc5-9e46-95953fe4c15b" containerName="container-00" Nov 27 10:22:15 crc kubenswrapper[4971]: I1127 10:22:15.140279 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="1dd42aed-7e00-4fc5-9e46-95953fe4c15b" containerName="container-00" Nov 27 10:22:15 crc kubenswrapper[4971]: I1127 10:22:15.140760 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="1dd42aed-7e00-4fc5-9e46-95953fe4c15b" containerName="container-00" Nov 27 10:22:15 crc kubenswrapper[4971]: I1127 10:22:15.143187 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2j8tb" Nov 27 10:22:15 crc kubenswrapper[4971]: I1127 10:22:15.166199 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2j8tb"] Nov 27 10:22:15 crc kubenswrapper[4971]: I1127 10:22:15.274624 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7272175-0ae5-4c98-b3f4-72b4a719e7fa-catalog-content\") pod \"redhat-marketplace-2j8tb\" (UID: \"b7272175-0ae5-4c98-b3f4-72b4a719e7fa\") " pod="openshift-marketplace/redhat-marketplace-2j8tb" Nov 27 10:22:15 crc kubenswrapper[4971]: I1127 10:22:15.274800 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7kzz8\" (UniqueName: \"kubernetes.io/projected/b7272175-0ae5-4c98-b3f4-72b4a719e7fa-kube-api-access-7kzz8\") pod \"redhat-marketplace-2j8tb\" (UID: \"b7272175-0ae5-4c98-b3f4-72b4a719e7fa\") " pod="openshift-marketplace/redhat-marketplace-2j8tb" Nov 27 10:22:15 crc kubenswrapper[4971]: I1127 10:22:15.275183 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7272175-0ae5-4c98-b3f4-72b4a719e7fa-utilities\") pod \"redhat-marketplace-2j8tb\" (UID: \"b7272175-0ae5-4c98-b3f4-72b4a719e7fa\") " pod="openshift-marketplace/redhat-marketplace-2j8tb" Nov 27 10:22:15 crc kubenswrapper[4971]: I1127 10:22:15.378423 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7272175-0ae5-4c98-b3f4-72b4a719e7fa-utilities\") pod \"redhat-marketplace-2j8tb\" (UID: \"b7272175-0ae5-4c98-b3f4-72b4a719e7fa\") " pod="openshift-marketplace/redhat-marketplace-2j8tb" Nov 27 10:22:15 crc kubenswrapper[4971]: I1127 10:22:15.378918 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7272175-0ae5-4c98-b3f4-72b4a719e7fa-catalog-content\") pod \"redhat-marketplace-2j8tb\" (UID: \"b7272175-0ae5-4c98-b3f4-72b4a719e7fa\") " pod="openshift-marketplace/redhat-marketplace-2j8tb" Nov 27 10:22:15 crc kubenswrapper[4971]: I1127 10:22:15.379053 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7272175-0ae5-4c98-b3f4-72b4a719e7fa-utilities\") pod \"redhat-marketplace-2j8tb\" (UID: \"b7272175-0ae5-4c98-b3f4-72b4a719e7fa\") " pod="openshift-marketplace/redhat-marketplace-2j8tb" Nov 27 10:22:15 crc kubenswrapper[4971]: I1127 10:22:15.379498 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7272175-0ae5-4c98-b3f4-72b4a719e7fa-catalog-content\") pod \"redhat-marketplace-2j8tb\" (UID: \"b7272175-0ae5-4c98-b3f4-72b4a719e7fa\") " pod="openshift-marketplace/redhat-marketplace-2j8tb" Nov 27 10:22:15 crc kubenswrapper[4971]: I1127 10:22:15.380043 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7kzz8\" (UniqueName: \"kubernetes.io/projected/b7272175-0ae5-4c98-b3f4-72b4a719e7fa-kube-api-access-7kzz8\") pod \"redhat-marketplace-2j8tb\" (UID: \"b7272175-0ae5-4c98-b3f4-72b4a719e7fa\") " pod="openshift-marketplace/redhat-marketplace-2j8tb" Nov 27 10:22:15 crc kubenswrapper[4971]: I1127 10:22:15.401037 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7kzz8\" (UniqueName: \"kubernetes.io/projected/b7272175-0ae5-4c98-b3f4-72b4a719e7fa-kube-api-access-7kzz8\") pod \"redhat-marketplace-2j8tb\" (UID: \"b7272175-0ae5-4c98-b3f4-72b4a719e7fa\") " pod="openshift-marketplace/redhat-marketplace-2j8tb" Nov 27 10:22:15 crc kubenswrapper[4971]: I1127 10:22:15.483038 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2j8tb" Nov 27 10:22:16 crc kubenswrapper[4971]: I1127 10:22:16.083771 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2j8tb"] Nov 27 10:22:16 crc kubenswrapper[4971]: I1127 10:22:16.883564 4971 generic.go:334] "Generic (PLEG): container finished" podID="b7272175-0ae5-4c98-b3f4-72b4a719e7fa" containerID="4c54dff41aa73ce18f9165721ccad59959e5abb98557880f995e303b94aa220d" exitCode=0 Nov 27 10:22:16 crc kubenswrapper[4971]: I1127 10:22:16.883898 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2j8tb" event={"ID":"b7272175-0ae5-4c98-b3f4-72b4a719e7fa","Type":"ContainerDied","Data":"4c54dff41aa73ce18f9165721ccad59959e5abb98557880f995e303b94aa220d"} Nov 27 10:22:16 crc kubenswrapper[4971]: I1127 10:22:16.883936 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2j8tb" event={"ID":"b7272175-0ae5-4c98-b3f4-72b4a719e7fa","Type":"ContainerStarted","Data":"851ddc8e6aacd4fb16ee428a0cdc70f0267de48d1ddfc7ee7a9def269512fd55"} Nov 27 10:22:18 crc kubenswrapper[4971]: I1127 10:22:18.910862 4971 generic.go:334] "Generic (PLEG): container finished" podID="b7272175-0ae5-4c98-b3f4-72b4a719e7fa" containerID="488fa929615ddf9d7995de19de56cdc7e3f39a16b59fbf5018f8f32db8d6a69f" exitCode=0 Nov 27 10:22:18 crc kubenswrapper[4971]: I1127 10:22:18.910921 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2j8tb" event={"ID":"b7272175-0ae5-4c98-b3f4-72b4a719e7fa","Type":"ContainerDied","Data":"488fa929615ddf9d7995de19de56cdc7e3f39a16b59fbf5018f8f32db8d6a69f"} Nov 27 10:22:19 crc kubenswrapper[4971]: I1127 10:22:19.926250 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2j8tb" event={"ID":"b7272175-0ae5-4c98-b3f4-72b4a719e7fa","Type":"ContainerStarted","Data":"c94d1f6f9f6d51c8b1d801f7a0a3540437c0ebcb68540c1b30e85e3a7c370d98"} Nov 27 10:22:19 crc kubenswrapper[4971]: I1127 10:22:19.954362 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-2j8tb" podStartSLOduration=2.360535958 podStartE2EDuration="4.954340313s" podCreationTimestamp="2025-11-27 10:22:15 +0000 UTC" firstStartedPulling="2025-11-27 10:22:16.88619339 +0000 UTC m=+12575.078237318" lastFinishedPulling="2025-11-27 10:22:19.479997755 +0000 UTC m=+12577.672041673" observedRunningTime="2025-11-27 10:22:19.952048129 +0000 UTC m=+12578.144092067" watchObservedRunningTime="2025-11-27 10:22:19.954340313 +0000 UTC m=+12578.146384231" Nov 27 10:22:24 crc kubenswrapper[4971]: I1127 10:22:24.550780 4971 scope.go:117] "RemoveContainer" containerID="116b63c77f50e60a9681ede491266fd11db22eaa347218c7c899b5f0ec9100aa" Nov 27 10:22:24 crc kubenswrapper[4971]: E1127 10:22:24.551834 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:22:25 crc kubenswrapper[4971]: I1127 10:22:25.483949 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-2j8tb" Nov 27 10:22:25 crc kubenswrapper[4971]: I1127 10:22:25.484380 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-2j8tb" Nov 27 10:22:25 crc kubenswrapper[4971]: I1127 10:22:25.557739 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-2j8tb" Nov 27 10:22:26 crc kubenswrapper[4971]: I1127 10:22:26.057032 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-2j8tb" Nov 27 10:22:26 crc kubenswrapper[4971]: I1127 10:22:26.114113 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2j8tb"] Nov 27 10:22:28 crc kubenswrapper[4971]: I1127 10:22:28.021009 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-2j8tb" podUID="b7272175-0ae5-4c98-b3f4-72b4a719e7fa" containerName="registry-server" containerID="cri-o://c94d1f6f9f6d51c8b1d801f7a0a3540437c0ebcb68540c1b30e85e3a7c370d98" gracePeriod=2 Nov 27 10:22:28 crc kubenswrapper[4971]: I1127 10:22:28.542134 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2j8tb" Nov 27 10:22:28 crc kubenswrapper[4971]: I1127 10:22:28.640556 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7272175-0ae5-4c98-b3f4-72b4a719e7fa-utilities\") pod \"b7272175-0ae5-4c98-b3f4-72b4a719e7fa\" (UID: \"b7272175-0ae5-4c98-b3f4-72b4a719e7fa\") " Nov 27 10:22:28 crc kubenswrapper[4971]: I1127 10:22:28.640690 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7kzz8\" (UniqueName: \"kubernetes.io/projected/b7272175-0ae5-4c98-b3f4-72b4a719e7fa-kube-api-access-7kzz8\") pod \"b7272175-0ae5-4c98-b3f4-72b4a719e7fa\" (UID: \"b7272175-0ae5-4c98-b3f4-72b4a719e7fa\") " Nov 27 10:22:28 crc kubenswrapper[4971]: I1127 10:22:28.640884 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7272175-0ae5-4c98-b3f4-72b4a719e7fa-catalog-content\") pod \"b7272175-0ae5-4c98-b3f4-72b4a719e7fa\" (UID: \"b7272175-0ae5-4c98-b3f4-72b4a719e7fa\") " Nov 27 10:22:28 crc kubenswrapper[4971]: I1127 10:22:28.642055 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b7272175-0ae5-4c98-b3f4-72b4a719e7fa-utilities" (OuterVolumeSpecName: "utilities") pod "b7272175-0ae5-4c98-b3f4-72b4a719e7fa" (UID: "b7272175-0ae5-4c98-b3f4-72b4a719e7fa"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 10:22:28 crc kubenswrapper[4971]: I1127 10:22:28.648857 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7272175-0ae5-4c98-b3f4-72b4a719e7fa-kube-api-access-7kzz8" (OuterVolumeSpecName: "kube-api-access-7kzz8") pod "b7272175-0ae5-4c98-b3f4-72b4a719e7fa" (UID: "b7272175-0ae5-4c98-b3f4-72b4a719e7fa"). InnerVolumeSpecName "kube-api-access-7kzz8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 10:22:28 crc kubenswrapper[4971]: I1127 10:22:28.671134 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b7272175-0ae5-4c98-b3f4-72b4a719e7fa-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b7272175-0ae5-4c98-b3f4-72b4a719e7fa" (UID: "b7272175-0ae5-4c98-b3f4-72b4a719e7fa"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 10:22:28 crc kubenswrapper[4971]: I1127 10:22:28.744492 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7272175-0ae5-4c98-b3f4-72b4a719e7fa-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 10:22:28 crc kubenswrapper[4971]: I1127 10:22:28.744903 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7kzz8\" (UniqueName: \"kubernetes.io/projected/b7272175-0ae5-4c98-b3f4-72b4a719e7fa-kube-api-access-7kzz8\") on node \"crc\" DevicePath \"\"" Nov 27 10:22:28 crc kubenswrapper[4971]: I1127 10:22:28.744922 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7272175-0ae5-4c98-b3f4-72b4a719e7fa-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 10:22:29 crc kubenswrapper[4971]: I1127 10:22:29.034039 4971 generic.go:334] "Generic (PLEG): container finished" podID="b7272175-0ae5-4c98-b3f4-72b4a719e7fa" containerID="c94d1f6f9f6d51c8b1d801f7a0a3540437c0ebcb68540c1b30e85e3a7c370d98" exitCode=0 Nov 27 10:22:29 crc kubenswrapper[4971]: I1127 10:22:29.034095 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2j8tb" event={"ID":"b7272175-0ae5-4c98-b3f4-72b4a719e7fa","Type":"ContainerDied","Data":"c94d1f6f9f6d51c8b1d801f7a0a3540437c0ebcb68540c1b30e85e3a7c370d98"} Nov 27 10:22:29 crc kubenswrapper[4971]: I1127 10:22:29.034142 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2j8tb" event={"ID":"b7272175-0ae5-4c98-b3f4-72b4a719e7fa","Type":"ContainerDied","Data":"851ddc8e6aacd4fb16ee428a0cdc70f0267de48d1ddfc7ee7a9def269512fd55"} Nov 27 10:22:29 crc kubenswrapper[4971]: I1127 10:22:29.034166 4971 scope.go:117] "RemoveContainer" containerID="c94d1f6f9f6d51c8b1d801f7a0a3540437c0ebcb68540c1b30e85e3a7c370d98" Nov 27 10:22:29 crc kubenswrapper[4971]: I1127 10:22:29.034369 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2j8tb" Nov 27 10:22:29 crc kubenswrapper[4971]: I1127 10:22:29.062501 4971 scope.go:117] "RemoveContainer" containerID="488fa929615ddf9d7995de19de56cdc7e3f39a16b59fbf5018f8f32db8d6a69f" Nov 27 10:22:29 crc kubenswrapper[4971]: I1127 10:22:29.095597 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2j8tb"] Nov 27 10:22:29 crc kubenswrapper[4971]: I1127 10:22:29.117369 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-2j8tb"] Nov 27 10:22:29 crc kubenswrapper[4971]: I1127 10:22:29.122179 4971 scope.go:117] "RemoveContainer" containerID="4c54dff41aa73ce18f9165721ccad59959e5abb98557880f995e303b94aa220d" Nov 27 10:22:29 crc kubenswrapper[4971]: I1127 10:22:29.212826 4971 scope.go:117] "RemoveContainer" containerID="c94d1f6f9f6d51c8b1d801f7a0a3540437c0ebcb68540c1b30e85e3a7c370d98" Nov 27 10:22:29 crc kubenswrapper[4971]: E1127 10:22:29.216723 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c94d1f6f9f6d51c8b1d801f7a0a3540437c0ebcb68540c1b30e85e3a7c370d98\": container with ID starting with c94d1f6f9f6d51c8b1d801f7a0a3540437c0ebcb68540c1b30e85e3a7c370d98 not found: ID does not exist" containerID="c94d1f6f9f6d51c8b1d801f7a0a3540437c0ebcb68540c1b30e85e3a7c370d98" Nov 27 10:22:29 crc kubenswrapper[4971]: I1127 10:22:29.216784 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c94d1f6f9f6d51c8b1d801f7a0a3540437c0ebcb68540c1b30e85e3a7c370d98"} err="failed to get container status \"c94d1f6f9f6d51c8b1d801f7a0a3540437c0ebcb68540c1b30e85e3a7c370d98\": rpc error: code = NotFound desc = could not find container \"c94d1f6f9f6d51c8b1d801f7a0a3540437c0ebcb68540c1b30e85e3a7c370d98\": container with ID starting with c94d1f6f9f6d51c8b1d801f7a0a3540437c0ebcb68540c1b30e85e3a7c370d98 not found: ID does not exist" Nov 27 10:22:29 crc kubenswrapper[4971]: I1127 10:22:29.216819 4971 scope.go:117] "RemoveContainer" containerID="488fa929615ddf9d7995de19de56cdc7e3f39a16b59fbf5018f8f32db8d6a69f" Nov 27 10:22:29 crc kubenswrapper[4971]: E1127 10:22:29.218156 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"488fa929615ddf9d7995de19de56cdc7e3f39a16b59fbf5018f8f32db8d6a69f\": container with ID starting with 488fa929615ddf9d7995de19de56cdc7e3f39a16b59fbf5018f8f32db8d6a69f not found: ID does not exist" containerID="488fa929615ddf9d7995de19de56cdc7e3f39a16b59fbf5018f8f32db8d6a69f" Nov 27 10:22:29 crc kubenswrapper[4971]: I1127 10:22:29.218203 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"488fa929615ddf9d7995de19de56cdc7e3f39a16b59fbf5018f8f32db8d6a69f"} err="failed to get container status \"488fa929615ddf9d7995de19de56cdc7e3f39a16b59fbf5018f8f32db8d6a69f\": rpc error: code = NotFound desc = could not find container \"488fa929615ddf9d7995de19de56cdc7e3f39a16b59fbf5018f8f32db8d6a69f\": container with ID starting with 488fa929615ddf9d7995de19de56cdc7e3f39a16b59fbf5018f8f32db8d6a69f not found: ID does not exist" Nov 27 10:22:29 crc kubenswrapper[4971]: I1127 10:22:29.218221 4971 scope.go:117] "RemoveContainer" containerID="4c54dff41aa73ce18f9165721ccad59959e5abb98557880f995e303b94aa220d" Nov 27 10:22:29 crc kubenswrapper[4971]: E1127 10:22:29.223099 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c54dff41aa73ce18f9165721ccad59959e5abb98557880f995e303b94aa220d\": container with ID starting with 4c54dff41aa73ce18f9165721ccad59959e5abb98557880f995e303b94aa220d not found: ID does not exist" containerID="4c54dff41aa73ce18f9165721ccad59959e5abb98557880f995e303b94aa220d" Nov 27 10:22:29 crc kubenswrapper[4971]: I1127 10:22:29.223161 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c54dff41aa73ce18f9165721ccad59959e5abb98557880f995e303b94aa220d"} err="failed to get container status \"4c54dff41aa73ce18f9165721ccad59959e5abb98557880f995e303b94aa220d\": rpc error: code = NotFound desc = could not find container \"4c54dff41aa73ce18f9165721ccad59959e5abb98557880f995e303b94aa220d\": container with ID starting with 4c54dff41aa73ce18f9165721ccad59959e5abb98557880f995e303b94aa220d not found: ID does not exist" Nov 27 10:22:30 crc kubenswrapper[4971]: I1127 10:22:30.564872 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7272175-0ae5-4c98-b3f4-72b4a719e7fa" path="/var/lib/kubelet/pods/b7272175-0ae5-4c98-b3f4-72b4a719e7fa/volumes" Nov 27 10:22:36 crc kubenswrapper[4971]: I1127 10:22:36.551297 4971 scope.go:117] "RemoveContainer" containerID="116b63c77f50e60a9681ede491266fd11db22eaa347218c7c899b5f0ec9100aa" Nov 27 10:22:36 crc kubenswrapper[4971]: E1127 10:22:36.552500 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:22:49 crc kubenswrapper[4971]: I1127 10:22:49.551839 4971 scope.go:117] "RemoveContainer" containerID="116b63c77f50e60a9681ede491266fd11db22eaa347218c7c899b5f0ec9100aa" Nov 27 10:22:49 crc kubenswrapper[4971]: E1127 10:22:49.552685 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:22:53 crc kubenswrapper[4971]: I1127 10:22:53.303479 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6kqjs"] Nov 27 10:22:53 crc kubenswrapper[4971]: E1127 10:22:53.304698 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7272175-0ae5-4c98-b3f4-72b4a719e7fa" containerName="registry-server" Nov 27 10:22:53 crc kubenswrapper[4971]: I1127 10:22:53.304716 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7272175-0ae5-4c98-b3f4-72b4a719e7fa" containerName="registry-server" Nov 27 10:22:53 crc kubenswrapper[4971]: E1127 10:22:53.304744 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7272175-0ae5-4c98-b3f4-72b4a719e7fa" containerName="extract-utilities" Nov 27 10:22:53 crc kubenswrapper[4971]: I1127 10:22:53.304754 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7272175-0ae5-4c98-b3f4-72b4a719e7fa" containerName="extract-utilities" Nov 27 10:22:53 crc kubenswrapper[4971]: E1127 10:22:53.304769 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7272175-0ae5-4c98-b3f4-72b4a719e7fa" containerName="extract-content" Nov 27 10:22:53 crc kubenswrapper[4971]: I1127 10:22:53.304775 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7272175-0ae5-4c98-b3f4-72b4a719e7fa" containerName="extract-content" Nov 27 10:22:53 crc kubenswrapper[4971]: I1127 10:22:53.305057 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7272175-0ae5-4c98-b3f4-72b4a719e7fa" containerName="registry-server" Nov 27 10:22:53 crc kubenswrapper[4971]: I1127 10:22:53.307035 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6kqjs" Nov 27 10:22:53 crc kubenswrapper[4971]: I1127 10:22:53.323590 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6kqjs"] Nov 27 10:22:53 crc kubenswrapper[4971]: I1127 10:22:53.442469 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f28lr\" (UniqueName: \"kubernetes.io/projected/fecd00ab-7896-44cd-b47f-ec35f3574d12-kube-api-access-f28lr\") pod \"redhat-operators-6kqjs\" (UID: \"fecd00ab-7896-44cd-b47f-ec35f3574d12\") " pod="openshift-marketplace/redhat-operators-6kqjs" Nov 27 10:22:53 crc kubenswrapper[4971]: I1127 10:22:53.442810 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fecd00ab-7896-44cd-b47f-ec35f3574d12-catalog-content\") pod \"redhat-operators-6kqjs\" (UID: \"fecd00ab-7896-44cd-b47f-ec35f3574d12\") " pod="openshift-marketplace/redhat-operators-6kqjs" Nov 27 10:22:53 crc kubenswrapper[4971]: I1127 10:22:53.442859 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fecd00ab-7896-44cd-b47f-ec35f3574d12-utilities\") pod \"redhat-operators-6kqjs\" (UID: \"fecd00ab-7896-44cd-b47f-ec35f3574d12\") " pod="openshift-marketplace/redhat-operators-6kqjs" Nov 27 10:22:53 crc kubenswrapper[4971]: I1127 10:22:53.545924 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f28lr\" (UniqueName: \"kubernetes.io/projected/fecd00ab-7896-44cd-b47f-ec35f3574d12-kube-api-access-f28lr\") pod \"redhat-operators-6kqjs\" (UID: \"fecd00ab-7896-44cd-b47f-ec35f3574d12\") " pod="openshift-marketplace/redhat-operators-6kqjs" Nov 27 10:22:53 crc kubenswrapper[4971]: I1127 10:22:53.546078 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fecd00ab-7896-44cd-b47f-ec35f3574d12-catalog-content\") pod \"redhat-operators-6kqjs\" (UID: \"fecd00ab-7896-44cd-b47f-ec35f3574d12\") " pod="openshift-marketplace/redhat-operators-6kqjs" Nov 27 10:22:53 crc kubenswrapper[4971]: I1127 10:22:53.546113 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fecd00ab-7896-44cd-b47f-ec35f3574d12-utilities\") pod \"redhat-operators-6kqjs\" (UID: \"fecd00ab-7896-44cd-b47f-ec35f3574d12\") " pod="openshift-marketplace/redhat-operators-6kqjs" Nov 27 10:22:53 crc kubenswrapper[4971]: I1127 10:22:53.546720 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fecd00ab-7896-44cd-b47f-ec35f3574d12-utilities\") pod \"redhat-operators-6kqjs\" (UID: \"fecd00ab-7896-44cd-b47f-ec35f3574d12\") " pod="openshift-marketplace/redhat-operators-6kqjs" Nov 27 10:22:53 crc kubenswrapper[4971]: I1127 10:22:53.546787 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fecd00ab-7896-44cd-b47f-ec35f3574d12-catalog-content\") pod \"redhat-operators-6kqjs\" (UID: \"fecd00ab-7896-44cd-b47f-ec35f3574d12\") " pod="openshift-marketplace/redhat-operators-6kqjs" Nov 27 10:22:53 crc kubenswrapper[4971]: I1127 10:22:53.568439 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f28lr\" (UniqueName: \"kubernetes.io/projected/fecd00ab-7896-44cd-b47f-ec35f3574d12-kube-api-access-f28lr\") pod \"redhat-operators-6kqjs\" (UID: \"fecd00ab-7896-44cd-b47f-ec35f3574d12\") " pod="openshift-marketplace/redhat-operators-6kqjs" Nov 27 10:22:53 crc kubenswrapper[4971]: I1127 10:22:53.648629 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6kqjs" Nov 27 10:22:53 crc kubenswrapper[4971]: I1127 10:22:53.701130 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5xppf"] Nov 27 10:22:53 crc kubenswrapper[4971]: I1127 10:22:53.703818 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5xppf" Nov 27 10:22:53 crc kubenswrapper[4971]: I1127 10:22:53.733775 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5xppf"] Nov 27 10:22:53 crc kubenswrapper[4971]: I1127 10:22:53.870198 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/157d6b21-f537-476c-8a56-f3ff290a4f2d-catalog-content\") pod \"certified-operators-5xppf\" (UID: \"157d6b21-f537-476c-8a56-f3ff290a4f2d\") " pod="openshift-marketplace/certified-operators-5xppf" Nov 27 10:22:53 crc kubenswrapper[4971]: I1127 10:22:53.870961 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2qbrw\" (UniqueName: \"kubernetes.io/projected/157d6b21-f537-476c-8a56-f3ff290a4f2d-kube-api-access-2qbrw\") pod \"certified-operators-5xppf\" (UID: \"157d6b21-f537-476c-8a56-f3ff290a4f2d\") " pod="openshift-marketplace/certified-operators-5xppf" Nov 27 10:22:53 crc kubenswrapper[4971]: I1127 10:22:53.871336 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/157d6b21-f537-476c-8a56-f3ff290a4f2d-utilities\") pod \"certified-operators-5xppf\" (UID: \"157d6b21-f537-476c-8a56-f3ff290a4f2d\") " pod="openshift-marketplace/certified-operators-5xppf" Nov 27 10:22:53 crc kubenswrapper[4971]: I1127 10:22:53.977278 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2qbrw\" (UniqueName: \"kubernetes.io/projected/157d6b21-f537-476c-8a56-f3ff290a4f2d-kube-api-access-2qbrw\") pod \"certified-operators-5xppf\" (UID: \"157d6b21-f537-476c-8a56-f3ff290a4f2d\") " pod="openshift-marketplace/certified-operators-5xppf" Nov 27 10:22:53 crc kubenswrapper[4971]: I1127 10:22:53.977392 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/157d6b21-f537-476c-8a56-f3ff290a4f2d-utilities\") pod \"certified-operators-5xppf\" (UID: \"157d6b21-f537-476c-8a56-f3ff290a4f2d\") " pod="openshift-marketplace/certified-operators-5xppf" Nov 27 10:22:53 crc kubenswrapper[4971]: I1127 10:22:53.977505 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/157d6b21-f537-476c-8a56-f3ff290a4f2d-catalog-content\") pod \"certified-operators-5xppf\" (UID: \"157d6b21-f537-476c-8a56-f3ff290a4f2d\") " pod="openshift-marketplace/certified-operators-5xppf" Nov 27 10:22:53 crc kubenswrapper[4971]: I1127 10:22:53.978154 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/157d6b21-f537-476c-8a56-f3ff290a4f2d-catalog-content\") pod \"certified-operators-5xppf\" (UID: \"157d6b21-f537-476c-8a56-f3ff290a4f2d\") " pod="openshift-marketplace/certified-operators-5xppf" Nov 27 10:22:53 crc kubenswrapper[4971]: I1127 10:22:53.978161 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/157d6b21-f537-476c-8a56-f3ff290a4f2d-utilities\") pod \"certified-operators-5xppf\" (UID: \"157d6b21-f537-476c-8a56-f3ff290a4f2d\") " pod="openshift-marketplace/certified-operators-5xppf" Nov 27 10:22:54 crc kubenswrapper[4971]: I1127 10:22:54.009496 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2qbrw\" (UniqueName: \"kubernetes.io/projected/157d6b21-f537-476c-8a56-f3ff290a4f2d-kube-api-access-2qbrw\") pod \"certified-operators-5xppf\" (UID: \"157d6b21-f537-476c-8a56-f3ff290a4f2d\") " pod="openshift-marketplace/certified-operators-5xppf" Nov 27 10:22:54 crc kubenswrapper[4971]: I1127 10:22:54.105290 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5xppf" Nov 27 10:22:54 crc kubenswrapper[4971]: I1127 10:22:54.270212 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6kqjs"] Nov 27 10:22:54 crc kubenswrapper[4971]: I1127 10:22:54.422387 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6kqjs" event={"ID":"fecd00ab-7896-44cd-b47f-ec35f3574d12","Type":"ContainerStarted","Data":"2c45e732ce1ed25cb40fd062319458dc611959137bf4e67e6f8ce707101a0a15"} Nov 27 10:22:54 crc kubenswrapper[4971]: I1127 10:22:54.756000 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5xppf"] Nov 27 10:22:54 crc kubenswrapper[4971]: W1127 10:22:54.757406 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod157d6b21_f537_476c_8a56_f3ff290a4f2d.slice/crio-cdb2371eb29833ffce2753eb13fae1d47e96b71e0889a66a506ce4cf9af0e039 WatchSource:0}: Error finding container cdb2371eb29833ffce2753eb13fae1d47e96b71e0889a66a506ce4cf9af0e039: Status 404 returned error can't find the container with id cdb2371eb29833ffce2753eb13fae1d47e96b71e0889a66a506ce4cf9af0e039 Nov 27 10:22:55 crc kubenswrapper[4971]: I1127 10:22:55.436134 4971 generic.go:334] "Generic (PLEG): container finished" podID="fecd00ab-7896-44cd-b47f-ec35f3574d12" containerID="75b6ae9168bd50ba122e61c0324a4ee1e4fb1edab804ddd15f363d06b37ac583" exitCode=0 Nov 27 10:22:55 crc kubenswrapper[4971]: I1127 10:22:55.436305 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6kqjs" event={"ID":"fecd00ab-7896-44cd-b47f-ec35f3574d12","Type":"ContainerDied","Data":"75b6ae9168bd50ba122e61c0324a4ee1e4fb1edab804ddd15f363d06b37ac583"} Nov 27 10:22:55 crc kubenswrapper[4971]: I1127 10:22:55.439698 4971 generic.go:334] "Generic (PLEG): container finished" podID="157d6b21-f537-476c-8a56-f3ff290a4f2d" containerID="6824740103b48dbc816e5320498a9c3150532d04cdd12e40f37215566b54fd66" exitCode=0 Nov 27 10:22:55 crc kubenswrapper[4971]: I1127 10:22:55.439765 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5xppf" event={"ID":"157d6b21-f537-476c-8a56-f3ff290a4f2d","Type":"ContainerDied","Data":"6824740103b48dbc816e5320498a9c3150532d04cdd12e40f37215566b54fd66"} Nov 27 10:22:55 crc kubenswrapper[4971]: I1127 10:22:55.439932 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5xppf" event={"ID":"157d6b21-f537-476c-8a56-f3ff290a4f2d","Type":"ContainerStarted","Data":"cdb2371eb29833ffce2753eb13fae1d47e96b71e0889a66a506ce4cf9af0e039"} Nov 27 10:22:56 crc kubenswrapper[4971]: I1127 10:22:56.453721 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6kqjs" event={"ID":"fecd00ab-7896-44cd-b47f-ec35f3574d12","Type":"ContainerStarted","Data":"618fc4de45f42118d2a953d1b6145726d08fc0e023a8e98c5534e545f4f34d86"} Nov 27 10:22:56 crc kubenswrapper[4971]: I1127 10:22:56.499486 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-ffcdm"] Nov 27 10:22:56 crc kubenswrapper[4971]: I1127 10:22:56.503024 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ffcdm" Nov 27 10:22:56 crc kubenswrapper[4971]: I1127 10:22:56.529628 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ffcdm"] Nov 27 10:22:56 crc kubenswrapper[4971]: I1127 10:22:56.658858 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86f013ee-f7bd-4c78-87d7-01840e15985e-utilities\") pod \"community-operators-ffcdm\" (UID: \"86f013ee-f7bd-4c78-87d7-01840e15985e\") " pod="openshift-marketplace/community-operators-ffcdm" Nov 27 10:22:56 crc kubenswrapper[4971]: I1127 10:22:56.658969 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86f013ee-f7bd-4c78-87d7-01840e15985e-catalog-content\") pod \"community-operators-ffcdm\" (UID: \"86f013ee-f7bd-4c78-87d7-01840e15985e\") " pod="openshift-marketplace/community-operators-ffcdm" Nov 27 10:22:56 crc kubenswrapper[4971]: I1127 10:22:56.659088 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z2vsc\" (UniqueName: \"kubernetes.io/projected/86f013ee-f7bd-4c78-87d7-01840e15985e-kube-api-access-z2vsc\") pod \"community-operators-ffcdm\" (UID: \"86f013ee-f7bd-4c78-87d7-01840e15985e\") " pod="openshift-marketplace/community-operators-ffcdm" Nov 27 10:22:56 crc kubenswrapper[4971]: I1127 10:22:56.761352 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86f013ee-f7bd-4c78-87d7-01840e15985e-utilities\") pod \"community-operators-ffcdm\" (UID: \"86f013ee-f7bd-4c78-87d7-01840e15985e\") " pod="openshift-marketplace/community-operators-ffcdm" Nov 27 10:22:56 crc kubenswrapper[4971]: I1127 10:22:56.761458 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86f013ee-f7bd-4c78-87d7-01840e15985e-catalog-content\") pod \"community-operators-ffcdm\" (UID: \"86f013ee-f7bd-4c78-87d7-01840e15985e\") " pod="openshift-marketplace/community-operators-ffcdm" Nov 27 10:22:56 crc kubenswrapper[4971]: I1127 10:22:56.761701 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z2vsc\" (UniqueName: \"kubernetes.io/projected/86f013ee-f7bd-4c78-87d7-01840e15985e-kube-api-access-z2vsc\") pod \"community-operators-ffcdm\" (UID: \"86f013ee-f7bd-4c78-87d7-01840e15985e\") " pod="openshift-marketplace/community-operators-ffcdm" Nov 27 10:22:56 crc kubenswrapper[4971]: I1127 10:22:56.762151 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86f013ee-f7bd-4c78-87d7-01840e15985e-utilities\") pod \"community-operators-ffcdm\" (UID: \"86f013ee-f7bd-4c78-87d7-01840e15985e\") " pod="openshift-marketplace/community-operators-ffcdm" Nov 27 10:22:56 crc kubenswrapper[4971]: I1127 10:22:56.762265 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86f013ee-f7bd-4c78-87d7-01840e15985e-catalog-content\") pod \"community-operators-ffcdm\" (UID: \"86f013ee-f7bd-4c78-87d7-01840e15985e\") " pod="openshift-marketplace/community-operators-ffcdm" Nov 27 10:22:56 crc kubenswrapper[4971]: I1127 10:22:56.783951 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z2vsc\" (UniqueName: \"kubernetes.io/projected/86f013ee-f7bd-4c78-87d7-01840e15985e-kube-api-access-z2vsc\") pod \"community-operators-ffcdm\" (UID: \"86f013ee-f7bd-4c78-87d7-01840e15985e\") " pod="openshift-marketplace/community-operators-ffcdm" Nov 27 10:22:56 crc kubenswrapper[4971]: I1127 10:22:56.896341 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ffcdm" Nov 27 10:22:57 crc kubenswrapper[4971]: I1127 10:22:57.470156 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5xppf" event={"ID":"157d6b21-f537-476c-8a56-f3ff290a4f2d","Type":"ContainerStarted","Data":"56f983d7c2d03577d4eeed6eaeab4ea10917990d00d0c5a32b579d609e82296a"} Nov 27 10:22:57 crc kubenswrapper[4971]: I1127 10:22:57.533817 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ffcdm"] Nov 27 10:22:57 crc kubenswrapper[4971]: W1127 10:22:57.538714 4971 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod86f013ee_f7bd_4c78_87d7_01840e15985e.slice/crio-a88637e7fb0c8110f696e421c62bc5f0e2708735c45a3965eb74f7c9fd8a8880 WatchSource:0}: Error finding container a88637e7fb0c8110f696e421c62bc5f0e2708735c45a3965eb74f7c9fd8a8880: Status 404 returned error can't find the container with id a88637e7fb0c8110f696e421c62bc5f0e2708735c45a3965eb74f7c9fd8a8880 Nov 27 10:22:58 crc kubenswrapper[4971]: I1127 10:22:58.485228 4971 generic.go:334] "Generic (PLEG): container finished" podID="86f013ee-f7bd-4c78-87d7-01840e15985e" containerID="2a4576a994e8050344b2f674449dc96f2fea0c99799bb3318cd9361158c1c90c" exitCode=0 Nov 27 10:22:58 crc kubenswrapper[4971]: I1127 10:22:58.486664 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ffcdm" event={"ID":"86f013ee-f7bd-4c78-87d7-01840e15985e","Type":"ContainerDied","Data":"2a4576a994e8050344b2f674449dc96f2fea0c99799bb3318cd9361158c1c90c"} Nov 27 10:22:58 crc kubenswrapper[4971]: I1127 10:22:58.486726 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ffcdm" event={"ID":"86f013ee-f7bd-4c78-87d7-01840e15985e","Type":"ContainerStarted","Data":"a88637e7fb0c8110f696e421c62bc5f0e2708735c45a3965eb74f7c9fd8a8880"} Nov 27 10:23:00 crc kubenswrapper[4971]: I1127 10:23:00.512856 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ffcdm" event={"ID":"86f013ee-f7bd-4c78-87d7-01840e15985e","Type":"ContainerStarted","Data":"f4e5d32699fde22ffee5e914a784cda8addeb6737e1c513c484c11f8ac9594a7"} Nov 27 10:23:00 crc kubenswrapper[4971]: I1127 10:23:00.551116 4971 scope.go:117] "RemoveContainer" containerID="116b63c77f50e60a9681ede491266fd11db22eaa347218c7c899b5f0ec9100aa" Nov 27 10:23:00 crc kubenswrapper[4971]: E1127 10:23:00.551480 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:23:01 crc kubenswrapper[4971]: I1127 10:23:01.542926 4971 generic.go:334] "Generic (PLEG): container finished" podID="157d6b21-f537-476c-8a56-f3ff290a4f2d" containerID="56f983d7c2d03577d4eeed6eaeab4ea10917990d00d0c5a32b579d609e82296a" exitCode=0 Nov 27 10:23:01 crc kubenswrapper[4971]: I1127 10:23:01.544629 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5xppf" event={"ID":"157d6b21-f537-476c-8a56-f3ff290a4f2d","Type":"ContainerDied","Data":"56f983d7c2d03577d4eeed6eaeab4ea10917990d00d0c5a32b579d609e82296a"} Nov 27 10:23:03 crc kubenswrapper[4971]: I1127 10:23:03.573254 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5xppf" event={"ID":"157d6b21-f537-476c-8a56-f3ff290a4f2d","Type":"ContainerStarted","Data":"231870db48bb41d91e2ffa2a23b1b38c9cb200b1dd3446aa479f3fa395e83047"} Nov 27 10:23:03 crc kubenswrapper[4971]: I1127 10:23:03.616897 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5xppf" podStartSLOduration=3.423795515 podStartE2EDuration="10.61687251s" podCreationTimestamp="2025-11-27 10:22:53 +0000 UTC" firstStartedPulling="2025-11-27 10:22:55.441095576 +0000 UTC m=+12613.633139494" lastFinishedPulling="2025-11-27 10:23:02.634172571 +0000 UTC m=+12620.826216489" observedRunningTime="2025-11-27 10:23:03.599264077 +0000 UTC m=+12621.791308015" watchObservedRunningTime="2025-11-27 10:23:03.61687251 +0000 UTC m=+12621.808916428" Nov 27 10:23:04 crc kubenswrapper[4971]: I1127 10:23:04.106301 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5xppf" Nov 27 10:23:04 crc kubenswrapper[4971]: I1127 10:23:04.106720 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5xppf" Nov 27 10:23:04 crc kubenswrapper[4971]: I1127 10:23:04.597606 4971 generic.go:334] "Generic (PLEG): container finished" podID="fecd00ab-7896-44cd-b47f-ec35f3574d12" containerID="618fc4de45f42118d2a953d1b6145726d08fc0e023a8e98c5534e545f4f34d86" exitCode=0 Nov 27 10:23:04 crc kubenswrapper[4971]: I1127 10:23:04.597727 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6kqjs" event={"ID":"fecd00ab-7896-44cd-b47f-ec35f3574d12","Type":"ContainerDied","Data":"618fc4de45f42118d2a953d1b6145726d08fc0e023a8e98c5534e545f4f34d86"} Nov 27 10:23:04 crc kubenswrapper[4971]: I1127 10:23:04.601331 4971 generic.go:334] "Generic (PLEG): container finished" podID="86f013ee-f7bd-4c78-87d7-01840e15985e" containerID="f4e5d32699fde22ffee5e914a784cda8addeb6737e1c513c484c11f8ac9594a7" exitCode=0 Nov 27 10:23:04 crc kubenswrapper[4971]: I1127 10:23:04.603359 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ffcdm" event={"ID":"86f013ee-f7bd-4c78-87d7-01840e15985e","Type":"ContainerDied","Data":"f4e5d32699fde22ffee5e914a784cda8addeb6737e1c513c484c11f8ac9594a7"} Nov 27 10:23:05 crc kubenswrapper[4971]: I1127 10:23:05.167092 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-5xppf" podUID="157d6b21-f537-476c-8a56-f3ff290a4f2d" containerName="registry-server" probeResult="failure" output=< Nov 27 10:23:05 crc kubenswrapper[4971]: timeout: failed to connect service ":50051" within 1s Nov 27 10:23:05 crc kubenswrapper[4971]: > Nov 27 10:23:05 crc kubenswrapper[4971]: I1127 10:23:05.619988 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ffcdm" event={"ID":"86f013ee-f7bd-4c78-87d7-01840e15985e","Type":"ContainerStarted","Data":"d9c9e3d4405e46fd1b55beaf5a6dc31fa37fccdb2c5c202c18916ce1b319eaa2"} Nov 27 10:23:05 crc kubenswrapper[4971]: I1127 10:23:05.655479 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-ffcdm" podStartSLOduration=3.088127678 podStartE2EDuration="9.65545225s" podCreationTimestamp="2025-11-27 10:22:56 +0000 UTC" firstStartedPulling="2025-11-27 10:22:58.48963254 +0000 UTC m=+12616.681676458" lastFinishedPulling="2025-11-27 10:23:05.056957112 +0000 UTC m=+12623.249001030" observedRunningTime="2025-11-27 10:23:05.640958724 +0000 UTC m=+12623.833002682" watchObservedRunningTime="2025-11-27 10:23:05.65545225 +0000 UTC m=+12623.847496178" Nov 27 10:23:06 crc kubenswrapper[4971]: I1127 10:23:06.634263 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6kqjs" event={"ID":"fecd00ab-7896-44cd-b47f-ec35f3574d12","Type":"ContainerStarted","Data":"930256dd37b87b5b8ef4406ebe43521f99563cbe60755aaf62715a4494ad4368"} Nov 27 10:23:06 crc kubenswrapper[4971]: I1127 10:23:06.666917 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6kqjs" podStartSLOduration=3.49169699 podStartE2EDuration="13.666892606s" podCreationTimestamp="2025-11-27 10:22:53 +0000 UTC" firstStartedPulling="2025-11-27 10:22:55.439980625 +0000 UTC m=+12613.632024543" lastFinishedPulling="2025-11-27 10:23:05.615176231 +0000 UTC m=+12623.807220159" observedRunningTime="2025-11-27 10:23:06.654165349 +0000 UTC m=+12624.846209277" watchObservedRunningTime="2025-11-27 10:23:06.666892606 +0000 UTC m=+12624.858936524" Nov 27 10:23:06 crc kubenswrapper[4971]: I1127 10:23:06.897290 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-ffcdm" Nov 27 10:23:06 crc kubenswrapper[4971]: I1127 10:23:06.897373 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-ffcdm" Nov 27 10:23:07 crc kubenswrapper[4971]: I1127 10:23:07.960472 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-ffcdm" podUID="86f013ee-f7bd-4c78-87d7-01840e15985e" containerName="registry-server" probeResult="failure" output=< Nov 27 10:23:07 crc kubenswrapper[4971]: timeout: failed to connect service ":50051" within 1s Nov 27 10:23:07 crc kubenswrapper[4971]: > Nov 27 10:23:13 crc kubenswrapper[4971]: I1127 10:23:13.551687 4971 scope.go:117] "RemoveContainer" containerID="116b63c77f50e60a9681ede491266fd11db22eaa347218c7c899b5f0ec9100aa" Nov 27 10:23:13 crc kubenswrapper[4971]: E1127 10:23:13.552505 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:23:13 crc kubenswrapper[4971]: I1127 10:23:13.648862 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6kqjs" Nov 27 10:23:13 crc kubenswrapper[4971]: I1127 10:23:13.649577 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6kqjs" Nov 27 10:23:14 crc kubenswrapper[4971]: I1127 10:23:14.713573 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-6kqjs" podUID="fecd00ab-7896-44cd-b47f-ec35f3574d12" containerName="registry-server" probeResult="failure" output=< Nov 27 10:23:14 crc kubenswrapper[4971]: timeout: failed to connect service ":50051" within 1s Nov 27 10:23:14 crc kubenswrapper[4971]: > Nov 27 10:23:15 crc kubenswrapper[4971]: I1127 10:23:15.160337 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-5xppf" podUID="157d6b21-f537-476c-8a56-f3ff290a4f2d" containerName="registry-server" probeResult="failure" output=< Nov 27 10:23:15 crc kubenswrapper[4971]: timeout: failed to connect service ":50051" within 1s Nov 27 10:23:15 crc kubenswrapper[4971]: > Nov 27 10:23:16 crc kubenswrapper[4971]: I1127 10:23:16.981178 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-ffcdm" Nov 27 10:23:17 crc kubenswrapper[4971]: I1127 10:23:17.045820 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-ffcdm" Nov 27 10:23:17 crc kubenswrapper[4971]: I1127 10:23:17.231040 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ffcdm"] Nov 27 10:23:18 crc kubenswrapper[4971]: I1127 10:23:18.831894 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-ffcdm" podUID="86f013ee-f7bd-4c78-87d7-01840e15985e" containerName="registry-server" containerID="cri-o://d9c9e3d4405e46fd1b55beaf5a6dc31fa37fccdb2c5c202c18916ce1b319eaa2" gracePeriod=2 Nov 27 10:23:19 crc kubenswrapper[4971]: I1127 10:23:19.383976 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ffcdm" Nov 27 10:23:19 crc kubenswrapper[4971]: I1127 10:23:19.486494 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86f013ee-f7bd-4c78-87d7-01840e15985e-catalog-content\") pod \"86f013ee-f7bd-4c78-87d7-01840e15985e\" (UID: \"86f013ee-f7bd-4c78-87d7-01840e15985e\") " Nov 27 10:23:19 crc kubenswrapper[4971]: I1127 10:23:19.486621 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z2vsc\" (UniqueName: \"kubernetes.io/projected/86f013ee-f7bd-4c78-87d7-01840e15985e-kube-api-access-z2vsc\") pod \"86f013ee-f7bd-4c78-87d7-01840e15985e\" (UID: \"86f013ee-f7bd-4c78-87d7-01840e15985e\") " Nov 27 10:23:19 crc kubenswrapper[4971]: I1127 10:23:19.486943 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86f013ee-f7bd-4c78-87d7-01840e15985e-utilities\") pod \"86f013ee-f7bd-4c78-87d7-01840e15985e\" (UID: \"86f013ee-f7bd-4c78-87d7-01840e15985e\") " Nov 27 10:23:19 crc kubenswrapper[4971]: I1127 10:23:19.487542 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86f013ee-f7bd-4c78-87d7-01840e15985e-utilities" (OuterVolumeSpecName: "utilities") pod "86f013ee-f7bd-4c78-87d7-01840e15985e" (UID: "86f013ee-f7bd-4c78-87d7-01840e15985e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 10:23:19 crc kubenswrapper[4971]: I1127 10:23:19.487871 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86f013ee-f7bd-4c78-87d7-01840e15985e-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 10:23:19 crc kubenswrapper[4971]: I1127 10:23:19.503798 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86f013ee-f7bd-4c78-87d7-01840e15985e-kube-api-access-z2vsc" (OuterVolumeSpecName: "kube-api-access-z2vsc") pod "86f013ee-f7bd-4c78-87d7-01840e15985e" (UID: "86f013ee-f7bd-4c78-87d7-01840e15985e"). InnerVolumeSpecName "kube-api-access-z2vsc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 10:23:19 crc kubenswrapper[4971]: I1127 10:23:19.542592 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86f013ee-f7bd-4c78-87d7-01840e15985e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "86f013ee-f7bd-4c78-87d7-01840e15985e" (UID: "86f013ee-f7bd-4c78-87d7-01840e15985e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 10:23:19 crc kubenswrapper[4971]: I1127 10:23:19.590559 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z2vsc\" (UniqueName: \"kubernetes.io/projected/86f013ee-f7bd-4c78-87d7-01840e15985e-kube-api-access-z2vsc\") on node \"crc\" DevicePath \"\"" Nov 27 10:23:19 crc kubenswrapper[4971]: I1127 10:23:19.590594 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86f013ee-f7bd-4c78-87d7-01840e15985e-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 10:23:19 crc kubenswrapper[4971]: I1127 10:23:19.853634 4971 generic.go:334] "Generic (PLEG): container finished" podID="86f013ee-f7bd-4c78-87d7-01840e15985e" containerID="d9c9e3d4405e46fd1b55beaf5a6dc31fa37fccdb2c5c202c18916ce1b319eaa2" exitCode=0 Nov 27 10:23:19 crc kubenswrapper[4971]: I1127 10:23:19.853690 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ffcdm" event={"ID":"86f013ee-f7bd-4c78-87d7-01840e15985e","Type":"ContainerDied","Data":"d9c9e3d4405e46fd1b55beaf5a6dc31fa37fccdb2c5c202c18916ce1b319eaa2"} Nov 27 10:23:19 crc kubenswrapper[4971]: I1127 10:23:19.853727 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ffcdm" event={"ID":"86f013ee-f7bd-4c78-87d7-01840e15985e","Type":"ContainerDied","Data":"a88637e7fb0c8110f696e421c62bc5f0e2708735c45a3965eb74f7c9fd8a8880"} Nov 27 10:23:19 crc kubenswrapper[4971]: I1127 10:23:19.853746 4971 scope.go:117] "RemoveContainer" containerID="d9c9e3d4405e46fd1b55beaf5a6dc31fa37fccdb2c5c202c18916ce1b319eaa2" Nov 27 10:23:19 crc kubenswrapper[4971]: I1127 10:23:19.853804 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ffcdm" Nov 27 10:23:19 crc kubenswrapper[4971]: I1127 10:23:19.892657 4971 scope.go:117] "RemoveContainer" containerID="f4e5d32699fde22ffee5e914a784cda8addeb6737e1c513c484c11f8ac9594a7" Nov 27 10:23:19 crc kubenswrapper[4971]: I1127 10:23:19.916757 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ffcdm"] Nov 27 10:23:19 crc kubenswrapper[4971]: I1127 10:23:19.924778 4971 scope.go:117] "RemoveContainer" containerID="2a4576a994e8050344b2f674449dc96f2fea0c99799bb3318cd9361158c1c90c" Nov 27 10:23:19 crc kubenswrapper[4971]: I1127 10:23:19.932197 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-ffcdm"] Nov 27 10:23:19 crc kubenswrapper[4971]: I1127 10:23:19.986341 4971 scope.go:117] "RemoveContainer" containerID="d9c9e3d4405e46fd1b55beaf5a6dc31fa37fccdb2c5c202c18916ce1b319eaa2" Nov 27 10:23:19 crc kubenswrapper[4971]: E1127 10:23:19.986908 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d9c9e3d4405e46fd1b55beaf5a6dc31fa37fccdb2c5c202c18916ce1b319eaa2\": container with ID starting with d9c9e3d4405e46fd1b55beaf5a6dc31fa37fccdb2c5c202c18916ce1b319eaa2 not found: ID does not exist" containerID="d9c9e3d4405e46fd1b55beaf5a6dc31fa37fccdb2c5c202c18916ce1b319eaa2" Nov 27 10:23:19 crc kubenswrapper[4971]: I1127 10:23:19.986964 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9c9e3d4405e46fd1b55beaf5a6dc31fa37fccdb2c5c202c18916ce1b319eaa2"} err="failed to get container status \"d9c9e3d4405e46fd1b55beaf5a6dc31fa37fccdb2c5c202c18916ce1b319eaa2\": rpc error: code = NotFound desc = could not find container \"d9c9e3d4405e46fd1b55beaf5a6dc31fa37fccdb2c5c202c18916ce1b319eaa2\": container with ID starting with d9c9e3d4405e46fd1b55beaf5a6dc31fa37fccdb2c5c202c18916ce1b319eaa2 not found: ID does not exist" Nov 27 10:23:19 crc kubenswrapper[4971]: I1127 10:23:19.986994 4971 scope.go:117] "RemoveContainer" containerID="f4e5d32699fde22ffee5e914a784cda8addeb6737e1c513c484c11f8ac9594a7" Nov 27 10:23:19 crc kubenswrapper[4971]: E1127 10:23:19.989612 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4e5d32699fde22ffee5e914a784cda8addeb6737e1c513c484c11f8ac9594a7\": container with ID starting with f4e5d32699fde22ffee5e914a784cda8addeb6737e1c513c484c11f8ac9594a7 not found: ID does not exist" containerID="f4e5d32699fde22ffee5e914a784cda8addeb6737e1c513c484c11f8ac9594a7" Nov 27 10:23:19 crc kubenswrapper[4971]: I1127 10:23:19.989648 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4e5d32699fde22ffee5e914a784cda8addeb6737e1c513c484c11f8ac9594a7"} err="failed to get container status \"f4e5d32699fde22ffee5e914a784cda8addeb6737e1c513c484c11f8ac9594a7\": rpc error: code = NotFound desc = could not find container \"f4e5d32699fde22ffee5e914a784cda8addeb6737e1c513c484c11f8ac9594a7\": container with ID starting with f4e5d32699fde22ffee5e914a784cda8addeb6737e1c513c484c11f8ac9594a7 not found: ID does not exist" Nov 27 10:23:19 crc kubenswrapper[4971]: I1127 10:23:19.989681 4971 scope.go:117] "RemoveContainer" containerID="2a4576a994e8050344b2f674449dc96f2fea0c99799bb3318cd9361158c1c90c" Nov 27 10:23:19 crc kubenswrapper[4971]: E1127 10:23:19.990052 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a4576a994e8050344b2f674449dc96f2fea0c99799bb3318cd9361158c1c90c\": container with ID starting with 2a4576a994e8050344b2f674449dc96f2fea0c99799bb3318cd9361158c1c90c not found: ID does not exist" containerID="2a4576a994e8050344b2f674449dc96f2fea0c99799bb3318cd9361158c1c90c" Nov 27 10:23:19 crc kubenswrapper[4971]: I1127 10:23:19.990112 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a4576a994e8050344b2f674449dc96f2fea0c99799bb3318cd9361158c1c90c"} err="failed to get container status \"2a4576a994e8050344b2f674449dc96f2fea0c99799bb3318cd9361158c1c90c\": rpc error: code = NotFound desc = could not find container \"2a4576a994e8050344b2f674449dc96f2fea0c99799bb3318cd9361158c1c90c\": container with ID starting with 2a4576a994e8050344b2f674449dc96f2fea0c99799bb3318cd9361158c1c90c not found: ID does not exist" Nov 27 10:23:20 crc kubenswrapper[4971]: I1127 10:23:20.565336 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86f013ee-f7bd-4c78-87d7-01840e15985e" path="/var/lib/kubelet/pods/86f013ee-f7bd-4c78-87d7-01840e15985e/volumes" Nov 27 10:23:24 crc kubenswrapper[4971]: I1127 10:23:24.160928 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5xppf" Nov 27 10:23:24 crc kubenswrapper[4971]: I1127 10:23:24.234103 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5xppf" Nov 27 10:23:24 crc kubenswrapper[4971]: I1127 10:23:24.626038 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5xppf"] Nov 27 10:23:24 crc kubenswrapper[4971]: I1127 10:23:24.700351 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-6kqjs" podUID="fecd00ab-7896-44cd-b47f-ec35f3574d12" containerName="registry-server" probeResult="failure" output=< Nov 27 10:23:24 crc kubenswrapper[4971]: timeout: failed to connect service ":50051" within 1s Nov 27 10:23:24 crc kubenswrapper[4971]: > Nov 27 10:23:25 crc kubenswrapper[4971]: I1127 10:23:25.929899 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5xppf" podUID="157d6b21-f537-476c-8a56-f3ff290a4f2d" containerName="registry-server" containerID="cri-o://231870db48bb41d91e2ffa2a23b1b38c9cb200b1dd3446aa479f3fa395e83047" gracePeriod=2 Nov 27 10:23:26 crc kubenswrapper[4971]: I1127 10:23:26.467559 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5xppf" Nov 27 10:23:26 crc kubenswrapper[4971]: I1127 10:23:26.551844 4971 scope.go:117] "RemoveContainer" containerID="116b63c77f50e60a9681ede491266fd11db22eaa347218c7c899b5f0ec9100aa" Nov 27 10:23:26 crc kubenswrapper[4971]: E1127 10:23:26.552167 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:23:26 crc kubenswrapper[4971]: I1127 10:23:26.580448 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/157d6b21-f537-476c-8a56-f3ff290a4f2d-utilities\") pod \"157d6b21-f537-476c-8a56-f3ff290a4f2d\" (UID: \"157d6b21-f537-476c-8a56-f3ff290a4f2d\") " Nov 27 10:23:26 crc kubenswrapper[4971]: I1127 10:23:26.580822 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/157d6b21-f537-476c-8a56-f3ff290a4f2d-catalog-content\") pod \"157d6b21-f537-476c-8a56-f3ff290a4f2d\" (UID: \"157d6b21-f537-476c-8a56-f3ff290a4f2d\") " Nov 27 10:23:26 crc kubenswrapper[4971]: I1127 10:23:26.580987 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2qbrw\" (UniqueName: \"kubernetes.io/projected/157d6b21-f537-476c-8a56-f3ff290a4f2d-kube-api-access-2qbrw\") pod \"157d6b21-f537-476c-8a56-f3ff290a4f2d\" (UID: \"157d6b21-f537-476c-8a56-f3ff290a4f2d\") " Nov 27 10:23:26 crc kubenswrapper[4971]: I1127 10:23:26.581302 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/157d6b21-f537-476c-8a56-f3ff290a4f2d-utilities" (OuterVolumeSpecName: "utilities") pod "157d6b21-f537-476c-8a56-f3ff290a4f2d" (UID: "157d6b21-f537-476c-8a56-f3ff290a4f2d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 10:23:26 crc kubenswrapper[4971]: I1127 10:23:26.583409 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/157d6b21-f537-476c-8a56-f3ff290a4f2d-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 10:23:26 crc kubenswrapper[4971]: I1127 10:23:26.590007 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/157d6b21-f537-476c-8a56-f3ff290a4f2d-kube-api-access-2qbrw" (OuterVolumeSpecName: "kube-api-access-2qbrw") pod "157d6b21-f537-476c-8a56-f3ff290a4f2d" (UID: "157d6b21-f537-476c-8a56-f3ff290a4f2d"). InnerVolumeSpecName "kube-api-access-2qbrw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 10:23:26 crc kubenswrapper[4971]: I1127 10:23:26.647580 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/157d6b21-f537-476c-8a56-f3ff290a4f2d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "157d6b21-f537-476c-8a56-f3ff290a4f2d" (UID: "157d6b21-f537-476c-8a56-f3ff290a4f2d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 10:23:26 crc kubenswrapper[4971]: I1127 10:23:26.686907 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2qbrw\" (UniqueName: \"kubernetes.io/projected/157d6b21-f537-476c-8a56-f3ff290a4f2d-kube-api-access-2qbrw\") on node \"crc\" DevicePath \"\"" Nov 27 10:23:26 crc kubenswrapper[4971]: I1127 10:23:26.686955 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/157d6b21-f537-476c-8a56-f3ff290a4f2d-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 10:23:26 crc kubenswrapper[4971]: I1127 10:23:26.943787 4971 generic.go:334] "Generic (PLEG): container finished" podID="157d6b21-f537-476c-8a56-f3ff290a4f2d" containerID="231870db48bb41d91e2ffa2a23b1b38c9cb200b1dd3446aa479f3fa395e83047" exitCode=0 Nov 27 10:23:26 crc kubenswrapper[4971]: I1127 10:23:26.943844 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5xppf" event={"ID":"157d6b21-f537-476c-8a56-f3ff290a4f2d","Type":"ContainerDied","Data":"231870db48bb41d91e2ffa2a23b1b38c9cb200b1dd3446aa479f3fa395e83047"} Nov 27 10:23:26 crc kubenswrapper[4971]: I1127 10:23:26.943871 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5xppf" Nov 27 10:23:26 crc kubenswrapper[4971]: I1127 10:23:26.943885 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5xppf" event={"ID":"157d6b21-f537-476c-8a56-f3ff290a4f2d","Type":"ContainerDied","Data":"cdb2371eb29833ffce2753eb13fae1d47e96b71e0889a66a506ce4cf9af0e039"} Nov 27 10:23:26 crc kubenswrapper[4971]: I1127 10:23:26.944098 4971 scope.go:117] "RemoveContainer" containerID="231870db48bb41d91e2ffa2a23b1b38c9cb200b1dd3446aa479f3fa395e83047" Nov 27 10:23:26 crc kubenswrapper[4971]: I1127 10:23:26.966635 4971 scope.go:117] "RemoveContainer" containerID="56f983d7c2d03577d4eeed6eaeab4ea10917990d00d0c5a32b579d609e82296a" Nov 27 10:23:27 crc kubenswrapper[4971]: I1127 10:23:27.005561 4971 scope.go:117] "RemoveContainer" containerID="6824740103b48dbc816e5320498a9c3150532d04cdd12e40f37215566b54fd66" Nov 27 10:23:27 crc kubenswrapper[4971]: I1127 10:23:27.014313 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5xppf"] Nov 27 10:23:27 crc kubenswrapper[4971]: I1127 10:23:27.042160 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5xppf"] Nov 27 10:23:27 crc kubenswrapper[4971]: I1127 10:23:27.059943 4971 scope.go:117] "RemoveContainer" containerID="231870db48bb41d91e2ffa2a23b1b38c9cb200b1dd3446aa479f3fa395e83047" Nov 27 10:23:27 crc kubenswrapper[4971]: E1127 10:23:27.060674 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"231870db48bb41d91e2ffa2a23b1b38c9cb200b1dd3446aa479f3fa395e83047\": container with ID starting with 231870db48bb41d91e2ffa2a23b1b38c9cb200b1dd3446aa479f3fa395e83047 not found: ID does not exist" containerID="231870db48bb41d91e2ffa2a23b1b38c9cb200b1dd3446aa479f3fa395e83047" Nov 27 10:23:27 crc kubenswrapper[4971]: I1127 10:23:27.060740 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"231870db48bb41d91e2ffa2a23b1b38c9cb200b1dd3446aa479f3fa395e83047"} err="failed to get container status \"231870db48bb41d91e2ffa2a23b1b38c9cb200b1dd3446aa479f3fa395e83047\": rpc error: code = NotFound desc = could not find container \"231870db48bb41d91e2ffa2a23b1b38c9cb200b1dd3446aa479f3fa395e83047\": container with ID starting with 231870db48bb41d91e2ffa2a23b1b38c9cb200b1dd3446aa479f3fa395e83047 not found: ID does not exist" Nov 27 10:23:27 crc kubenswrapper[4971]: I1127 10:23:27.060787 4971 scope.go:117] "RemoveContainer" containerID="56f983d7c2d03577d4eeed6eaeab4ea10917990d00d0c5a32b579d609e82296a" Nov 27 10:23:27 crc kubenswrapper[4971]: E1127 10:23:27.061313 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"56f983d7c2d03577d4eeed6eaeab4ea10917990d00d0c5a32b579d609e82296a\": container with ID starting with 56f983d7c2d03577d4eeed6eaeab4ea10917990d00d0c5a32b579d609e82296a not found: ID does not exist" containerID="56f983d7c2d03577d4eeed6eaeab4ea10917990d00d0c5a32b579d609e82296a" Nov 27 10:23:27 crc kubenswrapper[4971]: I1127 10:23:27.061349 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56f983d7c2d03577d4eeed6eaeab4ea10917990d00d0c5a32b579d609e82296a"} err="failed to get container status \"56f983d7c2d03577d4eeed6eaeab4ea10917990d00d0c5a32b579d609e82296a\": rpc error: code = NotFound desc = could not find container \"56f983d7c2d03577d4eeed6eaeab4ea10917990d00d0c5a32b579d609e82296a\": container with ID starting with 56f983d7c2d03577d4eeed6eaeab4ea10917990d00d0c5a32b579d609e82296a not found: ID does not exist" Nov 27 10:23:27 crc kubenswrapper[4971]: I1127 10:23:27.061379 4971 scope.go:117] "RemoveContainer" containerID="6824740103b48dbc816e5320498a9c3150532d04cdd12e40f37215566b54fd66" Nov 27 10:23:27 crc kubenswrapper[4971]: E1127 10:23:27.061912 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6824740103b48dbc816e5320498a9c3150532d04cdd12e40f37215566b54fd66\": container with ID starting with 6824740103b48dbc816e5320498a9c3150532d04cdd12e40f37215566b54fd66 not found: ID does not exist" containerID="6824740103b48dbc816e5320498a9c3150532d04cdd12e40f37215566b54fd66" Nov 27 10:23:27 crc kubenswrapper[4971]: I1127 10:23:27.061938 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6824740103b48dbc816e5320498a9c3150532d04cdd12e40f37215566b54fd66"} err="failed to get container status \"6824740103b48dbc816e5320498a9c3150532d04cdd12e40f37215566b54fd66\": rpc error: code = NotFound desc = could not find container \"6824740103b48dbc816e5320498a9c3150532d04cdd12e40f37215566b54fd66\": container with ID starting with 6824740103b48dbc816e5320498a9c3150532d04cdd12e40f37215566b54fd66 not found: ID does not exist" Nov 27 10:23:28 crc kubenswrapper[4971]: I1127 10:23:28.565199 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="157d6b21-f537-476c-8a56-f3ff290a4f2d" path="/var/lib/kubelet/pods/157d6b21-f537-476c-8a56-f3ff290a4f2d/volumes" Nov 27 10:23:33 crc kubenswrapper[4971]: I1127 10:23:33.706851 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6kqjs" Nov 27 10:23:33 crc kubenswrapper[4971]: I1127 10:23:33.765576 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6kqjs" Nov 27 10:23:33 crc kubenswrapper[4971]: I1127 10:23:33.952146 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6kqjs"] Nov 27 10:23:35 crc kubenswrapper[4971]: I1127 10:23:35.059897 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-6kqjs" podUID="fecd00ab-7896-44cd-b47f-ec35f3574d12" containerName="registry-server" containerID="cri-o://930256dd37b87b5b8ef4406ebe43521f99563cbe60755aaf62715a4494ad4368" gracePeriod=2 Nov 27 10:23:35 crc kubenswrapper[4971]: E1127 10:23:35.428963 4971 kubelet_node_status.go:756] "Failed to set some node status fields" err="failed to validate nodeIP: route ip+net: no such network interface" node="crc" Nov 27 10:23:35 crc kubenswrapper[4971]: I1127 10:23:35.654783 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6kqjs" Nov 27 10:23:35 crc kubenswrapper[4971]: I1127 10:23:35.734903 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fecd00ab-7896-44cd-b47f-ec35f3574d12-utilities\") pod \"fecd00ab-7896-44cd-b47f-ec35f3574d12\" (UID: \"fecd00ab-7896-44cd-b47f-ec35f3574d12\") " Nov 27 10:23:35 crc kubenswrapper[4971]: I1127 10:23:35.735058 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f28lr\" (UniqueName: \"kubernetes.io/projected/fecd00ab-7896-44cd-b47f-ec35f3574d12-kube-api-access-f28lr\") pod \"fecd00ab-7896-44cd-b47f-ec35f3574d12\" (UID: \"fecd00ab-7896-44cd-b47f-ec35f3574d12\") " Nov 27 10:23:35 crc kubenswrapper[4971]: I1127 10:23:35.735319 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fecd00ab-7896-44cd-b47f-ec35f3574d12-catalog-content\") pod \"fecd00ab-7896-44cd-b47f-ec35f3574d12\" (UID: \"fecd00ab-7896-44cd-b47f-ec35f3574d12\") " Nov 27 10:23:35 crc kubenswrapper[4971]: I1127 10:23:35.735709 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fecd00ab-7896-44cd-b47f-ec35f3574d12-utilities" (OuterVolumeSpecName: "utilities") pod "fecd00ab-7896-44cd-b47f-ec35f3574d12" (UID: "fecd00ab-7896-44cd-b47f-ec35f3574d12"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 10:23:35 crc kubenswrapper[4971]: I1127 10:23:35.736492 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fecd00ab-7896-44cd-b47f-ec35f3574d12-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 10:23:35 crc kubenswrapper[4971]: I1127 10:23:35.741273 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fecd00ab-7896-44cd-b47f-ec35f3574d12-kube-api-access-f28lr" (OuterVolumeSpecName: "kube-api-access-f28lr") pod "fecd00ab-7896-44cd-b47f-ec35f3574d12" (UID: "fecd00ab-7896-44cd-b47f-ec35f3574d12"). InnerVolumeSpecName "kube-api-access-f28lr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 10:23:35 crc kubenswrapper[4971]: I1127 10:23:35.838167 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fecd00ab-7896-44cd-b47f-ec35f3574d12-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fecd00ab-7896-44cd-b47f-ec35f3574d12" (UID: "fecd00ab-7896-44cd-b47f-ec35f3574d12"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 10:23:35 crc kubenswrapper[4971]: I1127 10:23:35.838798 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fecd00ab-7896-44cd-b47f-ec35f3574d12-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 10:23:35 crc kubenswrapper[4971]: I1127 10:23:35.838907 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f28lr\" (UniqueName: \"kubernetes.io/projected/fecd00ab-7896-44cd-b47f-ec35f3574d12-kube-api-access-f28lr\") on node \"crc\" DevicePath \"\"" Nov 27 10:23:36 crc kubenswrapper[4971]: I1127 10:23:36.077027 4971 generic.go:334] "Generic (PLEG): container finished" podID="fecd00ab-7896-44cd-b47f-ec35f3574d12" containerID="930256dd37b87b5b8ef4406ebe43521f99563cbe60755aaf62715a4494ad4368" exitCode=0 Nov 27 10:23:36 crc kubenswrapper[4971]: I1127 10:23:36.077106 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6kqjs" event={"ID":"fecd00ab-7896-44cd-b47f-ec35f3574d12","Type":"ContainerDied","Data":"930256dd37b87b5b8ef4406ebe43521f99563cbe60755aaf62715a4494ad4368"} Nov 27 10:23:36 crc kubenswrapper[4971]: I1127 10:23:36.077154 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6kqjs" Nov 27 10:23:36 crc kubenswrapper[4971]: I1127 10:23:36.077185 4971 scope.go:117] "RemoveContainer" containerID="930256dd37b87b5b8ef4406ebe43521f99563cbe60755aaf62715a4494ad4368" Nov 27 10:23:36 crc kubenswrapper[4971]: I1127 10:23:36.077162 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6kqjs" event={"ID":"fecd00ab-7896-44cd-b47f-ec35f3574d12","Type":"ContainerDied","Data":"2c45e732ce1ed25cb40fd062319458dc611959137bf4e67e6f8ce707101a0a15"} Nov 27 10:23:36 crc kubenswrapper[4971]: I1127 10:23:36.107249 4971 scope.go:117] "RemoveContainer" containerID="618fc4de45f42118d2a953d1b6145726d08fc0e023a8e98c5534e545f4f34d86" Nov 27 10:23:36 crc kubenswrapper[4971]: I1127 10:23:36.131776 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6kqjs"] Nov 27 10:23:36 crc kubenswrapper[4971]: I1127 10:23:36.139275 4971 scope.go:117] "RemoveContainer" containerID="75b6ae9168bd50ba122e61c0324a4ee1e4fb1edab804ddd15f363d06b37ac583" Nov 27 10:23:36 crc kubenswrapper[4971]: I1127 10:23:36.156181 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-6kqjs"] Nov 27 10:23:36 crc kubenswrapper[4971]: I1127 10:23:36.198051 4971 scope.go:117] "RemoveContainer" containerID="930256dd37b87b5b8ef4406ebe43521f99563cbe60755aaf62715a4494ad4368" Nov 27 10:23:36 crc kubenswrapper[4971]: E1127 10:23:36.198927 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"930256dd37b87b5b8ef4406ebe43521f99563cbe60755aaf62715a4494ad4368\": container with ID starting with 930256dd37b87b5b8ef4406ebe43521f99563cbe60755aaf62715a4494ad4368 not found: ID does not exist" containerID="930256dd37b87b5b8ef4406ebe43521f99563cbe60755aaf62715a4494ad4368" Nov 27 10:23:36 crc kubenswrapper[4971]: I1127 10:23:36.198972 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"930256dd37b87b5b8ef4406ebe43521f99563cbe60755aaf62715a4494ad4368"} err="failed to get container status \"930256dd37b87b5b8ef4406ebe43521f99563cbe60755aaf62715a4494ad4368\": rpc error: code = NotFound desc = could not find container \"930256dd37b87b5b8ef4406ebe43521f99563cbe60755aaf62715a4494ad4368\": container with ID starting with 930256dd37b87b5b8ef4406ebe43521f99563cbe60755aaf62715a4494ad4368 not found: ID does not exist" Nov 27 10:23:36 crc kubenswrapper[4971]: I1127 10:23:36.198997 4971 scope.go:117] "RemoveContainer" containerID="618fc4de45f42118d2a953d1b6145726d08fc0e023a8e98c5534e545f4f34d86" Nov 27 10:23:36 crc kubenswrapper[4971]: E1127 10:23:36.199261 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"618fc4de45f42118d2a953d1b6145726d08fc0e023a8e98c5534e545f4f34d86\": container with ID starting with 618fc4de45f42118d2a953d1b6145726d08fc0e023a8e98c5534e545f4f34d86 not found: ID does not exist" containerID="618fc4de45f42118d2a953d1b6145726d08fc0e023a8e98c5534e545f4f34d86" Nov 27 10:23:36 crc kubenswrapper[4971]: I1127 10:23:36.199298 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"618fc4de45f42118d2a953d1b6145726d08fc0e023a8e98c5534e545f4f34d86"} err="failed to get container status \"618fc4de45f42118d2a953d1b6145726d08fc0e023a8e98c5534e545f4f34d86\": rpc error: code = NotFound desc = could not find container \"618fc4de45f42118d2a953d1b6145726d08fc0e023a8e98c5534e545f4f34d86\": container with ID starting with 618fc4de45f42118d2a953d1b6145726d08fc0e023a8e98c5534e545f4f34d86 not found: ID does not exist" Nov 27 10:23:36 crc kubenswrapper[4971]: I1127 10:23:36.199324 4971 scope.go:117] "RemoveContainer" containerID="75b6ae9168bd50ba122e61c0324a4ee1e4fb1edab804ddd15f363d06b37ac583" Nov 27 10:23:36 crc kubenswrapper[4971]: E1127 10:23:36.199674 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75b6ae9168bd50ba122e61c0324a4ee1e4fb1edab804ddd15f363d06b37ac583\": container with ID starting with 75b6ae9168bd50ba122e61c0324a4ee1e4fb1edab804ddd15f363d06b37ac583 not found: ID does not exist" containerID="75b6ae9168bd50ba122e61c0324a4ee1e4fb1edab804ddd15f363d06b37ac583" Nov 27 10:23:36 crc kubenswrapper[4971]: I1127 10:23:36.199702 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75b6ae9168bd50ba122e61c0324a4ee1e4fb1edab804ddd15f363d06b37ac583"} err="failed to get container status \"75b6ae9168bd50ba122e61c0324a4ee1e4fb1edab804ddd15f363d06b37ac583\": rpc error: code = NotFound desc = could not find container \"75b6ae9168bd50ba122e61c0324a4ee1e4fb1edab804ddd15f363d06b37ac583\": container with ID starting with 75b6ae9168bd50ba122e61c0324a4ee1e4fb1edab804ddd15f363d06b37ac583 not found: ID does not exist" Nov 27 10:23:36 crc kubenswrapper[4971]: I1127 10:23:36.566918 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fecd00ab-7896-44cd-b47f-ec35f3574d12" path="/var/lib/kubelet/pods/fecd00ab-7896-44cd-b47f-ec35f3574d12/volumes" Nov 27 10:23:39 crc kubenswrapper[4971]: I1127 10:23:39.550452 4971 scope.go:117] "RemoveContainer" containerID="116b63c77f50e60a9681ede491266fd11db22eaa347218c7c899b5f0ec9100aa" Nov 27 10:23:39 crc kubenswrapper[4971]: E1127 10:23:39.551011 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:23:53 crc kubenswrapper[4971]: I1127 10:23:53.551329 4971 scope.go:117] "RemoveContainer" containerID="116b63c77f50e60a9681ede491266fd11db22eaa347218c7c899b5f0ec9100aa" Nov 27 10:23:53 crc kubenswrapper[4971]: E1127 10:23:53.552664 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:24:08 crc kubenswrapper[4971]: I1127 10:24:08.552163 4971 scope.go:117] "RemoveContainer" containerID="116b63c77f50e60a9681ede491266fd11db22eaa347218c7c899b5f0ec9100aa" Nov 27 10:24:08 crc kubenswrapper[4971]: E1127 10:24:08.553441 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:24:20 crc kubenswrapper[4971]: I1127 10:24:20.551317 4971 scope.go:117] "RemoveContainer" containerID="116b63c77f50e60a9681ede491266fd11db22eaa347218c7c899b5f0ec9100aa" Nov 27 10:24:20 crc kubenswrapper[4971]: E1127 10:24:20.552852 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:24:35 crc kubenswrapper[4971]: I1127 10:24:35.551050 4971 scope.go:117] "RemoveContainer" containerID="116b63c77f50e60a9681ede491266fd11db22eaa347218c7c899b5f0ec9100aa" Nov 27 10:24:35 crc kubenswrapper[4971]: E1127 10:24:35.551839 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:24:50 crc kubenswrapper[4971]: I1127 10:24:50.550939 4971 scope.go:117] "RemoveContainer" containerID="116b63c77f50e60a9681ede491266fd11db22eaa347218c7c899b5f0ec9100aa" Nov 27 10:24:50 crc kubenswrapper[4971]: E1127 10:24:50.552319 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:25:00 crc kubenswrapper[4971]: I1127 10:25:00.621583 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_5448dcaf-a243-4941-8c62-6bc48cc0f32e/init-config-reloader/0.log" Nov 27 10:25:00 crc kubenswrapper[4971]: I1127 10:25:00.835416 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_5448dcaf-a243-4941-8c62-6bc48cc0f32e/init-config-reloader/0.log" Nov 27 10:25:00 crc kubenswrapper[4971]: I1127 10:25:00.856060 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_5448dcaf-a243-4941-8c62-6bc48cc0f32e/alertmanager/0.log" Nov 27 10:25:00 crc kubenswrapper[4971]: I1127 10:25:00.911781 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_5448dcaf-a243-4941-8c62-6bc48cc0f32e/config-reloader/0.log" Nov 27 10:25:01 crc kubenswrapper[4971]: I1127 10:25:01.088773 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_f88d1fcd-36f3-489a-a844-4d2208822b41/aodh-api/0.log" Nov 27 10:25:01 crc kubenswrapper[4971]: I1127 10:25:01.088932 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_f88d1fcd-36f3-489a-a844-4d2208822b41/aodh-evaluator/0.log" Nov 27 10:25:01 crc kubenswrapper[4971]: I1127 10:25:01.193975 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_f88d1fcd-36f3-489a-a844-4d2208822b41/aodh-listener/0.log" Nov 27 10:25:01 crc kubenswrapper[4971]: I1127 10:25:01.285015 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_f88d1fcd-36f3-489a-a844-4d2208822b41/aodh-notifier/0.log" Nov 27 10:25:01 crc kubenswrapper[4971]: I1127 10:25:01.378413 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-7f9867f5f8-5v9cv_b5931b59-b508-45f2-9c53-428808f072cb/barbican-api/0.log" Nov 27 10:25:01 crc kubenswrapper[4971]: I1127 10:25:01.642348 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-86bc479d88-stpvp_5e91e8ba-b5c3-437f-9406-d76e697a9036/barbican-keystone-listener/0.log" Nov 27 10:25:02 crc kubenswrapper[4971]: I1127 10:25:02.110293 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-579556d9cc-tzpkz_3289cb6f-6334-47db-a85b-8fcc20a12295/barbican-worker/0.log" Nov 27 10:25:02 crc kubenswrapper[4971]: I1127 10:25:02.120791 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-579556d9cc-tzpkz_3289cb6f-6334-47db-a85b-8fcc20a12295/barbican-worker-log/0.log" Nov 27 10:25:02 crc kubenswrapper[4971]: I1127 10:25:02.125406 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-7f9867f5f8-5v9cv_b5931b59-b508-45f2-9c53-428808f072cb/barbican-api-log/0.log" Nov 27 10:25:02 crc kubenswrapper[4971]: I1127 10:25:02.535097 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-openstack-openstack-cell1-qbnqs_978c7516-1e2e-4d58-a4da-8d85ec47dcd9/bootstrap-openstack-openstack-cell1/0.log" Nov 27 10:25:02 crc kubenswrapper[4971]: I1127 10:25:02.847983 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-openstack-openstack-networker-l9thw_370dcf34-1b59-47fb-9dbc-92e31c352718/bootstrap-openstack-openstack-networker/0.log" Nov 27 10:25:02 crc kubenswrapper[4971]: I1127 10:25:02.895347 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84/ceilometer-central-agent/0.log" Nov 27 10:25:02 crc kubenswrapper[4971]: I1127 10:25:02.947569 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-86bc479d88-stpvp_5e91e8ba-b5c3-437f-9406-d76e697a9036/barbican-keystone-listener-log/0.log" Nov 27 10:25:03 crc kubenswrapper[4971]: I1127 10:25:03.126056 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84/ceilometer-notification-agent/0.log" Nov 27 10:25:03 crc kubenswrapper[4971]: I1127 10:25:03.128247 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84/proxy-httpd/0.log" Nov 27 10:25:03 crc kubenswrapper[4971]: I1127 10:25:03.162384 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_a7c50396-fa1e-4ec4-8835-6fd6e1bc9b84/sg-core/0.log" Nov 27 10:25:03 crc kubenswrapper[4971]: I1127 10:25:03.371419 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-client-openstack-openstack-cell1-7snx6_7101fdfd-d9ee-4e8b-8971-82acd2b5b391/ceph-client-openstack-openstack-cell1/0.log" Nov 27 10:25:03 crc kubenswrapper[4971]: I1127 10:25:03.719945 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_b9382e66-cf98-4705-8a0d-475e3f29ddea/cinder-api/0.log" Nov 27 10:25:03 crc kubenswrapper[4971]: I1127 10:25:03.744882 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_b9382e66-cf98-4705-8a0d-475e3f29ddea/cinder-api-log/0.log" Nov 27 10:25:04 crc kubenswrapper[4971]: I1127 10:25:04.256648 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6/probe/0.log" Nov 27 10:25:04 crc kubenswrapper[4971]: I1127 10:25:04.334696 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_de667c8e-c3dc-48f7-9f4a-4330a5a4416d/cinder-scheduler/0.log" Nov 27 10:25:04 crc kubenswrapper[4971]: I1127 10:25:04.553897 4971 scope.go:117] "RemoveContainer" containerID="116b63c77f50e60a9681ede491266fd11db22eaa347218c7c899b5f0ec9100aa" Nov 27 10:25:04 crc kubenswrapper[4971]: E1127 10:25:04.554217 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:25:04 crc kubenswrapper[4971]: I1127 10:25:04.680850 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_de667c8e-c3dc-48f7-9f4a-4330a5a4416d/probe/0.log" Nov 27 10:25:04 crc kubenswrapper[4971]: I1127 10:25:04.927701 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_25a18d82-9dba-46e3-b37d-da25ab109122/probe/0.log" Nov 27 10:25:04 crc kubenswrapper[4971]: I1127 10:25:04.993701 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-openstack-openstack-cell1-ftnrt_8a65697f-7b70-42e9-bc6b-12f32a73cbb7/configure-network-openstack-openstack-cell1/0.log" Nov 27 10:25:05 crc kubenswrapper[4971]: I1127 10:25:05.234684 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-openstack-openstack-networker-m6k84_df91b509-08ef-4038-aaf4-b200f15daad2/configure-network-openstack-openstack-networker/0.log" Nov 27 10:25:05 crc kubenswrapper[4971]: I1127 10:25:05.529301 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-openstack-openstack-cell1-58vhc_811b1337-2a00-4b0a-a398-163af3d76d79/configure-os-openstack-openstack-cell1/0.log" Nov 27 10:25:05 crc kubenswrapper[4971]: I1127 10:25:05.554301 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-openstack-openstack-networker-jzpb4_0f83c73e-5e55-47a9-9df5-32c19df3372f/configure-os-openstack-openstack-networker/0.log" Nov 27 10:25:05 crc kubenswrapper[4971]: I1127 10:25:05.826564 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-79c966c8f9-zl9w6_d3464217-b299-4e96-bab3-c00569d46839/init/0.log" Nov 27 10:25:06 crc kubenswrapper[4971]: I1127 10:25:06.102370 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-79c966c8f9-zl9w6_d3464217-b299-4e96-bab3-c00569d46839/init/0.log" Nov 27 10:25:06 crc kubenswrapper[4971]: I1127 10:25:06.227044 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_3da9f3bb-8ff4-46ec-a7af-d5aeb79120a6/cinder-backup/0.log" Nov 27 10:25:06 crc kubenswrapper[4971]: I1127 10:25:06.318403 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-79c966c8f9-zl9w6_d3464217-b299-4e96-bab3-c00569d46839/dnsmasq-dns/0.log" Nov 27 10:25:06 crc kubenswrapper[4971]: I1127 10:25:06.365219 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-openstack-openstack-cell1-n7bzc_48953229-16fe-403a-a919-a6aa7ed6a6bb/download-cache-openstack-openstack-cell1/0.log" Nov 27 10:25:06 crc kubenswrapper[4971]: I1127 10:25:06.527782 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-openstack-openstack-networker-pbgpj_6c999c8f-a99f-49ec-9899-602fb664ef82/download-cache-openstack-openstack-networker/0.log" Nov 27 10:25:06 crc kubenswrapper[4971]: I1127 10:25:06.681742 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_9b1eda3c-bd39-46d3-9e13-c330b23c286b/glance-httpd/0.log" Nov 27 10:25:06 crc kubenswrapper[4971]: I1127 10:25:06.754554 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_9b1eda3c-bd39-46d3-9e13-c330b23c286b/glance-log/0.log" Nov 27 10:25:06 crc kubenswrapper[4971]: I1127 10:25:06.978629 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_e554a98e-ca6f-407c-be33-a634a8057430/glance-httpd/0.log" Nov 27 10:25:07 crc kubenswrapper[4971]: I1127 10:25:07.101378 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_e554a98e-ca6f-407c-be33-a634a8057430/glance-log/0.log" Nov 27 10:25:07 crc kubenswrapper[4971]: I1127 10:25:07.371500 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-api-7bc5c74f54-7k5pr_1d195723-5d1e-4cad-9893-627476bcf980/heat-api/0.log" Nov 27 10:25:07 crc kubenswrapper[4971]: I1127 10:25:07.480622 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_25a18d82-9dba-46e3-b37d-da25ab109122/cinder-volume/0.log" Nov 27 10:25:07 crc kubenswrapper[4971]: I1127 10:25:07.529987 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-cfnapi-558f5f68c6-gwcv5_b974245a-b10d-460c-a512-783707368cfb/heat-cfnapi/0.log" Nov 27 10:25:07 crc kubenswrapper[4971]: I1127 10:25:07.571215 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-engine-59c8bcfc86-ndvzb_4a891ed3-1a18-48f2-8a36-f0cacf4cda34/heat-engine/0.log" Nov 27 10:25:07 crc kubenswrapper[4971]: I1127 10:25:07.741387 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-fd46b4db5-sp4l9_e03d8c76-8c6c-4945-8fd3-5b3709e91963/horizon/0.log" Nov 27 10:25:07 crc kubenswrapper[4971]: I1127 10:25:07.851836 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-fd46b4db5-sp4l9_e03d8c76-8c6c-4945-8fd3-5b3709e91963/horizon-log/0.log" Nov 27 10:25:07 crc kubenswrapper[4971]: I1127 10:25:07.896764 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-openstack-openstack-cell1-m6f7c_65820bcd-c13b-4c2d-a475-e98ec977d33d/install-certs-openstack-openstack-cell1/0.log" Nov 27 10:25:08 crc kubenswrapper[4971]: I1127 10:25:08.010942 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-openstack-openstack-networker-zs7h5_ca45dc1d-d9c7-435a-bbf7-a5bb15f244cd/install-certs-openstack-openstack-networker/0.log" Nov 27 10:25:08 crc kubenswrapper[4971]: I1127 10:25:08.096716 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-openstack-openstack-cell1-d9wmt_2973f319-d645-472b-b9b5-b3708dd37782/install-os-openstack-openstack-cell1/0.log" Nov 27 10:25:08 crc kubenswrapper[4971]: I1127 10:25:08.558240 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-openstack-openstack-networker-2dgqc_326f81f2-90ba-46d9-a77e-fdefdce61bea/install-os-openstack-openstack-networker/0.log" Nov 27 10:25:08 crc kubenswrapper[4971]: I1127 10:25:08.824719 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29403901-xc2hf_3a9eaaae-c63a-4924-8833-138a78aacd29/keystone-cron/0.log" Nov 27 10:25:09 crc kubenswrapper[4971]: I1127 10:25:09.055611 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29403961-2r79p_28af66d8-2264-4581-b51a-dfab3e4a7456/keystone-cron/0.log" Nov 27 10:25:09 crc kubenswrapper[4971]: I1127 10:25:09.238213 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_98285765-b7fa-435c-a83f-545e4f7bb7bb/kube-state-metrics/0.log" Nov 27 10:25:09 crc kubenswrapper[4971]: I1127 10:25:09.434626 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-openstack-openstack-cell1-hhpn7_780772cd-49b3-471d-b7a6-cdbde5c28c7a/libvirt-openstack-openstack-cell1/0.log" Nov 27 10:25:09 crc kubenswrapper[4971]: I1127 10:25:09.521285 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-6d6f88957b-tm5kw_dd725c35-d118-42cd-b18e-52de53fd7fc0/keystone-api/0.log" Nov 27 10:25:10 crc kubenswrapper[4971]: I1127 10:25:10.037419 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_f53839dc-4c3a-4d71-ba66-897cc991de94/manila-scheduler/0.log" Nov 27 10:25:10 crc kubenswrapper[4971]: I1127 10:25:10.098154 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_3285b5d5-f19d-4f37-912a-8d3fde19a794/manila-api/0.log" Nov 27 10:25:10 crc kubenswrapper[4971]: I1127 10:25:10.102395 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_f53839dc-4c3a-4d71-ba66-897cc991de94/probe/0.log" Nov 27 10:25:10 crc kubenswrapper[4971]: I1127 10:25:10.194568 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_3285b5d5-f19d-4f37-912a-8d3fde19a794/manila-api-log/0.log" Nov 27 10:25:10 crc kubenswrapper[4971]: I1127 10:25:10.337020 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_2ca40967-dc15-4bb4-8f05-24a4eb302605/probe/0.log" Nov 27 10:25:10 crc kubenswrapper[4971]: I1127 10:25:10.380933 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_2ca40967-dc15-4bb4-8f05-24a4eb302605/manila-share/0.log" Nov 27 10:25:10 crc kubenswrapper[4971]: I1127 10:25:10.865359 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-dhcp-openstack-openstack-cell1-hw9g8_34e270a8-c0d0-4ace-a1f8-b7ba36869a70/neutron-dhcp-openstack-openstack-cell1/0.log" Nov 27 10:25:11 crc kubenswrapper[4971]: I1127 10:25:11.099087 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5fd8b7cd95-lc5lp_289e2425-5bea-432f-bb53-37fbd0e796b5/neutron-httpd/0.log" Nov 27 10:25:11 crc kubenswrapper[4971]: I1127 10:25:11.191964 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-openstack-openstack-cell1-mkwjr_3f8fa824-e9d3-469b-8dec-9edc6aa63271/neutron-metadata-openstack-openstack-cell1/0.log" Nov 27 10:25:11 crc kubenswrapper[4971]: I1127 10:25:11.600496 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5fd8b7cd95-lc5lp_289e2425-5bea-432f-bb53-37fbd0e796b5/neutron-api/0.log" Nov 27 10:25:11 crc kubenswrapper[4971]: I1127 10:25:11.696019 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-sriov-openstack-openstack-cell1-ztg25_3e206d9e-acba-4218-9b29-370e5c829461/neutron-sriov-openstack-openstack-cell1/0.log" Nov 27 10:25:11 crc kubenswrapper[4971]: I1127 10:25:11.703941 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-openstack-openstack-networker-fqnz2_69e0000b-dec5-4186-85ac-b65c6091f4d8/neutron-metadata-openstack-openstack-networker/0.log" Nov 27 10:25:12 crc kubenswrapper[4971]: I1127 10:25:12.073912 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_edd3cc22-05c2-4f9d-a2ef-df5f9fcdac84/nova-api-api/0.log" Nov 27 10:25:12 crc kubenswrapper[4971]: I1127 10:25:12.250216 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_30642a33-6ef3-4ef7-9598-06fad84e16b1/nova-cell0-conductor-conductor/0.log" Nov 27 10:25:12 crc kubenswrapper[4971]: I1127 10:25:12.459378 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_6d70faa6-5591-4e41-9020-2d81ece79575/nova-cell1-conductor-conductor/0.log" Nov 27 10:25:12 crc kubenswrapper[4971]: I1127 10:25:12.472913 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_edd3cc22-05c2-4f9d-a2ef-df5f9fcdac84/nova-api-log/0.log" Nov 27 10:25:13 crc kubenswrapper[4971]: I1127 10:25:13.098205 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_43d941be-9782-4b3b-b9ff-b01aa66e612b/nova-cell1-novncproxy-novncproxy/0.log" Nov 27 10:25:13 crc kubenswrapper[4971]: I1127 10:25:13.209652 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-openstack-nova-compute-ffu-cell1-openstack-celldn86c_9cbe8f92-e455-4fc6-b875-1895f7dee6fd/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1/0.log" Nov 27 10:25:13 crc kubenswrapper[4971]: I1127 10:25:13.489301 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-openstack-openstack-cell1-vmfjt_a8268ae2-f26c-4829-9552-75e2645d7ffa/nova-cell1-openstack-openstack-cell1/0.log" Nov 27 10:25:13 crc kubenswrapper[4971]: I1127 10:25:13.582727 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_2ba702d1-dbdd-4396-9105-0f258581ec30/nova-metadata-log/0.log" Nov 27 10:25:13 crc kubenswrapper[4971]: I1127 10:25:13.743618 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_2ba702d1-dbdd-4396-9105-0f258581ec30/nova-metadata-metadata/0.log" Nov 27 10:25:13 crc kubenswrapper[4971]: I1127 10:25:13.898235 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_c7f9a94d-de60-4ba6-b2b4-7d7de72108ac/nova-scheduler-scheduler/0.log" Nov 27 10:25:14 crc kubenswrapper[4971]: I1127 10:25:14.032042 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_30e1cda1-3bb1-4df3-9a75-998e7a88cab1/mysql-bootstrap/0.log" Nov 27 10:25:14 crc kubenswrapper[4971]: I1127 10:25:14.351117 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0/mysql-bootstrap/0.log" Nov 27 10:25:14 crc kubenswrapper[4971]: I1127 10:25:14.359406 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_30e1cda1-3bb1-4df3-9a75-998e7a88cab1/galera/0.log" Nov 27 10:25:14 crc kubenswrapper[4971]: I1127 10:25:14.366253 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_30e1cda1-3bb1-4df3-9a75-998e7a88cab1/mysql-bootstrap/0.log" Nov 27 10:25:14 crc kubenswrapper[4971]: I1127 10:25:14.656198 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_307e1bff-561c-4b41-8ac4-40a403f1936c/openstackclient/0.log" Nov 27 10:25:14 crc kubenswrapper[4971]: I1127 10:25:14.659091 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0/mysql-bootstrap/0.log" Nov 27 10:25:14 crc kubenswrapper[4971]: I1127 10:25:14.861964 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_bbeec80f-deb5-4f4c-adb2-36eb45f4c7e0/galera/0.log" Nov 27 10:25:14 crc kubenswrapper[4971]: I1127 10:25:14.918649 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_68d03f53-0125-471d-af31-02b3f026ec33/openstack-network-exporter/0.log" Nov 27 10:25:15 crc kubenswrapper[4971]: I1127 10:25:15.058099 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_68d03f53-0125-471d-af31-02b3f026ec33/ovn-northd/0.log" Nov 27 10:25:15 crc kubenswrapper[4971]: I1127 10:25:15.336124 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-openstack-openstack-cell1-mpk56_7d24cd5b-2f66-48ea-a1c1-7c53ae6962ba/ovn-openstack-openstack-cell1/0.log" Nov 27 10:25:15 crc kubenswrapper[4971]: I1127 10:25:15.549622 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-openstack-openstack-networker-6lml5_8e0f0dfd-4c80-47f9-b334-dfeac0f423d6/ovn-openstack-openstack-networker/0.log" Nov 27 10:25:15 crc kubenswrapper[4971]: I1127 10:25:15.631133 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_27511fdf-56f4-42be-8f6f-bb0cfd874e6b/ovsdbserver-nb/0.log" Nov 27 10:25:15 crc kubenswrapper[4971]: I1127 10:25:15.705554 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_27511fdf-56f4-42be-8f6f-bb0cfd874e6b/openstack-network-exporter/0.log" Nov 27 10:25:15 crc kubenswrapper[4971]: I1127 10:25:15.857750 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_7ad86fb8-ef7c-4f30-87d0-e7d49757ec00/openstack-network-exporter/0.log" Nov 27 10:25:15 crc kubenswrapper[4971]: I1127 10:25:15.923373 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_7ad86fb8-ef7c-4f30-87d0-e7d49757ec00/ovsdbserver-nb/0.log" Nov 27 10:25:16 crc kubenswrapper[4971]: I1127 10:25:16.089880 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_d33d13ea-43c9-4a85-a638-026123faf7e8/openstack-network-exporter/0.log" Nov 27 10:25:16 crc kubenswrapper[4971]: I1127 10:25:16.144974 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_d33d13ea-43c9-4a85-a638-026123faf7e8/ovsdbserver-nb/0.log" Nov 27 10:25:16 crc kubenswrapper[4971]: I1127 10:25:16.440657 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_5fb7afd5-f25d-4315-92e5-89c9f46b6eae/openstack-network-exporter/0.log" Nov 27 10:25:16 crc kubenswrapper[4971]: I1127 10:25:16.810805 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_5fb7afd5-f25d-4315-92e5-89c9f46b6eae/ovsdbserver-sb/0.log" Nov 27 10:25:16 crc kubenswrapper[4971]: I1127 10:25:16.886867 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_3d3c4964-3f5f-40eb-a660-63b41ed0a3bc/openstack-network-exporter/0.log" Nov 27 10:25:16 crc kubenswrapper[4971]: I1127 10:25:16.956375 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_3d3c4964-3f5f-40eb-a660-63b41ed0a3bc/ovsdbserver-sb/0.log" Nov 27 10:25:17 crc kubenswrapper[4971]: I1127 10:25:17.061319 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_d85a4597-b37a-4ddb-943e-1c2c7fb9693d/openstack-network-exporter/0.log" Nov 27 10:25:17 crc kubenswrapper[4971]: I1127 10:25:17.127409 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_d85a4597-b37a-4ddb-943e-1c2c7fb9693d/ovsdbserver-sb/0.log" Nov 27 10:25:17 crc kubenswrapper[4971]: I1127 10:25:17.550421 4971 scope.go:117] "RemoveContainer" containerID="116b63c77f50e60a9681ede491266fd11db22eaa347218c7c899b5f0ec9100aa" Nov 27 10:25:17 crc kubenswrapper[4971]: E1127 10:25:17.550753 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:25:17 crc kubenswrapper[4971]: I1127 10:25:17.598842 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_pre-adoption-validation-openstack-pre-adoption-openstack-ch7c89_1dbb8cb6-a78a-4bdd-8508-aa1439f484a9/pre-adoption-validation-openstack-pre-adoption-openstack-cell1/0.log" Nov 27 10:25:17 crc kubenswrapper[4971]: I1127 10:25:17.634165 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-74fbdbb586-65qb7_aa5026c6-2f7f-4590-a883-d2599ec5a57a/placement-api/0.log" Nov 27 10:25:17 crc kubenswrapper[4971]: I1127 10:25:17.774168 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-74fbdbb586-65qb7_aa5026c6-2f7f-4590-a883-d2599ec5a57a/placement-log/0.log" Nov 27 10:25:17 crc kubenswrapper[4971]: I1127 10:25:17.860372 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_pre-adoption-validation-openstack-pre-adoption-openstack-nb7djh_2da7d353-1007-422a-aebb-e80704a87fba/pre-adoption-validation-openstack-pre-adoption-openstack-networ/0.log" Nov 27 10:25:18 crc kubenswrapper[4971]: I1127 10:25:18.045060 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_15d3c3db-5df1-4825-8fc0-3e9a10dfb943/init-config-reloader/0.log" Nov 27 10:25:18 crc kubenswrapper[4971]: I1127 10:25:18.287087 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_15d3c3db-5df1-4825-8fc0-3e9a10dfb943/config-reloader/0.log" Nov 27 10:25:18 crc kubenswrapper[4971]: I1127 10:25:18.322495 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_15d3c3db-5df1-4825-8fc0-3e9a10dfb943/thanos-sidecar/0.log" Nov 27 10:25:18 crc kubenswrapper[4971]: I1127 10:25:18.333032 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_15d3c3db-5df1-4825-8fc0-3e9a10dfb943/prometheus/0.log" Nov 27 10:25:18 crc kubenswrapper[4971]: I1127 10:25:18.348950 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_15d3c3db-5df1-4825-8fc0-3e9a10dfb943/init-config-reloader/0.log" Nov 27 10:25:18 crc kubenswrapper[4971]: I1127 10:25:18.597031 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_15383cc2-2672-447c-8556-62f1b78de474/setup-container/0.log" Nov 27 10:25:18 crc kubenswrapper[4971]: I1127 10:25:18.783861 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_15383cc2-2672-447c-8556-62f1b78de474/setup-container/0.log" Nov 27 10:25:18 crc kubenswrapper[4971]: I1127 10:25:18.833497 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_15383cc2-2672-447c-8556-62f1b78de474/rabbitmq/0.log" Nov 27 10:25:18 crc kubenswrapper[4971]: I1127 10:25:18.898663 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_90837c23-3be4-4046-8b88-b328e3ddd9e9/setup-container/0.log" Nov 27 10:25:19 crc kubenswrapper[4971]: I1127 10:25:19.170695 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-openstack-openstack-cell1-2d4qd_363453ac-740f-4603-a65c-71a4bb442fe6/reboot-os-openstack-openstack-cell1/0.log" Nov 27 10:25:19 crc kubenswrapper[4971]: I1127 10:25:19.222431 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_90837c23-3be4-4046-8b88-b328e3ddd9e9/setup-container/0.log" Nov 27 10:25:19 crc kubenswrapper[4971]: I1127 10:25:19.409301 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-openstack-openstack-networker-hnrlz_3dc9cc91-a665-4d22-a5dc-e8014b1d1925/reboot-os-openstack-openstack-networker/0.log" Nov 27 10:25:19 crc kubenswrapper[4971]: I1127 10:25:19.635461 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-openstack-openstack-cell1-mn94j_46fe2d18-3744-489f-93f7-d771ebb2d6a8/run-os-openstack-openstack-cell1/0.log" Nov 27 10:25:19 crc kubenswrapper[4971]: I1127 10:25:19.820123 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-openstack-openstack-networker-jdddp_98fa796b-6f0e-4968-aea3-bbea8edc617c/run-os-openstack-openstack-networker/0.log" Nov 27 10:25:19 crc kubenswrapper[4971]: I1127 10:25:19.980332 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-openstack-ncmnj_0f7dd6cb-a9ab-43b2-acfd-352d29e9dc14/ssh-known-hosts-openstack/0.log" Nov 27 10:25:20 crc kubenswrapper[4971]: I1127 10:25:20.360054 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-openstack-openstack-cell1-xbpj6_35017a9d-382f-4e37-9652-77e0a5aec05e/telemetry-openstack-openstack-cell1/0.log" Nov 27 10:25:20 crc kubenswrapper[4971]: I1127 10:25:20.509115 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_5dede177-0f43-453e-b75b-e5ac63add3da/tempest-tests-tempest-tests-runner/0.log" Nov 27 10:25:20 crc kubenswrapper[4971]: I1127 10:25:20.608737 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_43dadd9a-00b5-4496-9210-487a919f5955/test-operator-logs-container/0.log" Nov 27 10:25:20 crc kubenswrapper[4971]: I1127 10:25:20.847986 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tripleo-cleanup-tripleo-cleanup-openstack-cell1-qlcpd_ea8298a5-6819-4be7-a1d9-1af470a81021/tripleo-cleanup-tripleo-cleanup-openstack-cell1/0.log" Nov 27 10:25:21 crc kubenswrapper[4971]: I1127 10:25:21.105190 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tripleo-cleanup-tripleo-cleanup-openstack-networker-6j8mr_f3bbe788-f3b6-4502-b497-d9aa6937ae74/tripleo-cleanup-tripleo-cleanup-openstack-networker/0.log" Nov 27 10:25:21 crc kubenswrapper[4971]: I1127 10:25:21.124661 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-openstack-openstack-cell1-4hhx2_ef7cbaeb-f90b-4ff4-855c-917a445abf20/validate-network-openstack-openstack-cell1/0.log" Nov 27 10:25:21 crc kubenswrapper[4971]: I1127 10:25:21.365242 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-openstack-openstack-networker-kz2cj_9657dfc5-381a-4d1b-adb3-c5e6d0676d88/validate-network-openstack-openstack-networker/0.log" Nov 27 10:25:21 crc kubenswrapper[4971]: I1127 10:25:21.889401 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_90837c23-3be4-4046-8b88-b328e3ddd9e9/rabbitmq/0.log" Nov 27 10:25:31 crc kubenswrapper[4971]: I1127 10:25:31.552082 4971 scope.go:117] "RemoveContainer" containerID="116b63c77f50e60a9681ede491266fd11db22eaa347218c7c899b5f0ec9100aa" Nov 27 10:25:31 crc kubenswrapper[4971]: E1127 10:25:31.552840 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:25:34 crc kubenswrapper[4971]: I1127 10:25:34.405181 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_55c82719-efe6-4d3a-b9c4-1def29b2c252/memcached/0.log" Nov 27 10:25:45 crc kubenswrapper[4971]: I1127 10:25:45.551498 4971 scope.go:117] "RemoveContainer" containerID="116b63c77f50e60a9681ede491266fd11db22eaa347218c7c899b5f0ec9100aa" Nov 27 10:25:45 crc kubenswrapper[4971]: E1127 10:25:45.552704 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:25:49 crc kubenswrapper[4971]: I1127 10:25:49.626046 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b45623fa79426ee6d52ccc2f61ed894b37aa2fb70e5ce0cf390950ffberxxdg_5c497b31-bddd-4d0a-bbf8-89dcd999ff3c/util/0.log" Nov 27 10:25:49 crc kubenswrapper[4971]: I1127 10:25:49.852979 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b45623fa79426ee6d52ccc2f61ed894b37aa2fb70e5ce0cf390950ffberxxdg_5c497b31-bddd-4d0a-bbf8-89dcd999ff3c/util/0.log" Nov 27 10:25:49 crc kubenswrapper[4971]: I1127 10:25:49.853877 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b45623fa79426ee6d52ccc2f61ed894b37aa2fb70e5ce0cf390950ffberxxdg_5c497b31-bddd-4d0a-bbf8-89dcd999ff3c/pull/0.log" Nov 27 10:25:49 crc kubenswrapper[4971]: I1127 10:25:49.854009 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b45623fa79426ee6d52ccc2f61ed894b37aa2fb70e5ce0cf390950ffberxxdg_5c497b31-bddd-4d0a-bbf8-89dcd999ff3c/pull/0.log" Nov 27 10:25:50 crc kubenswrapper[4971]: I1127 10:25:50.046396 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b45623fa79426ee6d52ccc2f61ed894b37aa2fb70e5ce0cf390950ffberxxdg_5c497b31-bddd-4d0a-bbf8-89dcd999ff3c/util/0.log" Nov 27 10:25:50 crc kubenswrapper[4971]: I1127 10:25:50.077069 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b45623fa79426ee6d52ccc2f61ed894b37aa2fb70e5ce0cf390950ffberxxdg_5c497b31-bddd-4d0a-bbf8-89dcd999ff3c/pull/0.log" Nov 27 10:25:50 crc kubenswrapper[4971]: I1127 10:25:50.110898 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_b45623fa79426ee6d52ccc2f61ed894b37aa2fb70e5ce0cf390950ffberxxdg_5c497b31-bddd-4d0a-bbf8-89dcd999ff3c/extract/0.log" Nov 27 10:25:50 crc kubenswrapper[4971]: I1127 10:25:50.294666 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7b64f4fb85-bp29n_9be13ef4-b6e0-434f-aea4-a0b374facb97/kube-rbac-proxy/0.log" Nov 27 10:25:50 crc kubenswrapper[4971]: I1127 10:25:50.436716 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7b64f4fb85-bp29n_9be13ef4-b6e0-434f-aea4-a0b374facb97/manager/0.log" Nov 27 10:25:50 crc kubenswrapper[4971]: I1127 10:25:50.459200 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-6b7f75547b-p5tg5_f9f71029-8412-4967-acf0-ad2d2e7c31f4/kube-rbac-proxy/0.log" Nov 27 10:25:50 crc kubenswrapper[4971]: I1127 10:25:50.599808 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-6b7f75547b-p5tg5_f9f71029-8412-4967-acf0-ad2d2e7c31f4/manager/0.log" Nov 27 10:25:50 crc kubenswrapper[4971]: I1127 10:25:50.748579 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-955677c94-rm5cw_d4125c07-9503-4a93-a635-5863fdf26632/kube-rbac-proxy/0.log" Nov 27 10:25:50 crc kubenswrapper[4971]: I1127 10:25:50.757754 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-955677c94-rm5cw_d4125c07-9503-4a93-a635-5863fdf26632/manager/0.log" Nov 27 10:25:50 crc kubenswrapper[4971]: I1127 10:25:50.861684 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-589cbd6b5b-mg4bw_e9ec811b-a5ea-4068-abe7-fdfbdaba39b3/kube-rbac-proxy/0.log" Nov 27 10:25:51 crc kubenswrapper[4971]: I1127 10:25:51.094393 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5b77f656f-5m76m_2e4648d4-38e3-4e9b-a68d-7db912f67470/kube-rbac-proxy/0.log" Nov 27 10:25:51 crc kubenswrapper[4971]: I1127 10:25:51.151896 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-589cbd6b5b-mg4bw_e9ec811b-a5ea-4068-abe7-fdfbdaba39b3/manager/0.log" Nov 27 10:25:51 crc kubenswrapper[4971]: I1127 10:25:51.177254 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5b77f656f-5m76m_2e4648d4-38e3-4e9b-a68d-7db912f67470/manager/0.log" Nov 27 10:25:51 crc kubenswrapper[4971]: I1127 10:25:51.338814 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-5d494799bf-q7w5w_784d6231-82a4-4f58-8de7-0a3f6378e2b0/kube-rbac-proxy/0.log" Nov 27 10:25:51 crc kubenswrapper[4971]: I1127 10:25:51.381744 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-5d494799bf-q7w5w_784d6231-82a4-4f58-8de7-0a3f6378e2b0/manager/0.log" Nov 27 10:25:51 crc kubenswrapper[4971]: I1127 10:25:51.611554 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-zdjzp_eee345d4-5e02-4a96-a204-383fd410e564/kube-rbac-proxy/0.log" Nov 27 10:25:51 crc kubenswrapper[4971]: I1127 10:25:51.751614 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-67cb4dc6d4-7c6m7_bc2fe762-04b6-4c5a-bf42-ce67dd839609/kube-rbac-proxy/0.log" Nov 27 10:25:51 crc kubenswrapper[4971]: I1127 10:25:51.903860 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-zdjzp_eee345d4-5e02-4a96-a204-383fd410e564/manager/0.log" Nov 27 10:25:51 crc kubenswrapper[4971]: I1127 10:25:51.922123 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-67cb4dc6d4-7c6m7_bc2fe762-04b6-4c5a-bf42-ce67dd839609/manager/0.log" Nov 27 10:25:51 crc kubenswrapper[4971]: I1127 10:25:51.980868 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7b4567c7cf-gn2tw_6488635b-4233-4898-b540-8c7433b25d0f/kube-rbac-proxy/0.log" Nov 27 10:25:52 crc kubenswrapper[4971]: I1127 10:25:52.222487 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7b4567c7cf-gn2tw_6488635b-4233-4898-b540-8c7433b25d0f/manager/0.log" Nov 27 10:25:52 crc kubenswrapper[4971]: I1127 10:25:52.239845 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-5d499bf58b-t8rr8_a5e429ee-33ed-40b1-878c-c4cf36ffd5ff/kube-rbac-proxy/0.log" Nov 27 10:25:52 crc kubenswrapper[4971]: I1127 10:25:52.357764 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-5d499bf58b-t8rr8_a5e429ee-33ed-40b1-878c-c4cf36ffd5ff/manager/0.log" Nov 27 10:25:52 crc kubenswrapper[4971]: I1127 10:25:52.458241 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-66f4dd4bc7-xscln_ccd00428-c0de-472f-a9a3-19ec6524d4c8/kube-rbac-proxy/0.log" Nov 27 10:25:52 crc kubenswrapper[4971]: I1127 10:25:52.495087 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-66f4dd4bc7-xscln_ccd00428-c0de-472f-a9a3-19ec6524d4c8/manager/0.log" Nov 27 10:25:52 crc kubenswrapper[4971]: I1127 10:25:52.639421 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-6fdcddb789-q47ql_8e16feb0-cfc3-43ba-9411-00788a5c42dd/kube-rbac-proxy/0.log" Nov 27 10:25:52 crc kubenswrapper[4971]: I1127 10:25:52.770841 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-6fdcddb789-q47ql_8e16feb0-cfc3-43ba-9411-00788a5c42dd/manager/0.log" Nov 27 10:25:52 crc kubenswrapper[4971]: I1127 10:25:52.898273 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-79556f57fc-2zx72_f3763945-b889-4f5b-bd9c-2f5bfa93e6d6/kube-rbac-proxy/0.log" Nov 27 10:25:53 crc kubenswrapper[4971]: I1127 10:25:53.010572 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-64cdc6ff96-7928r_4f041cb0-6235-484c-829e-3bf4b6a3e5e6/kube-rbac-proxy/0.log" Nov 27 10:25:53 crc kubenswrapper[4971]: I1127 10:25:53.135354 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-79556f57fc-2zx72_f3763945-b889-4f5b-bd9c-2f5bfa93e6d6/manager/0.log" Nov 27 10:25:53 crc kubenswrapper[4971]: I1127 10:25:53.161168 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-64cdc6ff96-7928r_4f041cb0-6235-484c-829e-3bf4b6a3e5e6/manager/0.log" Nov 27 10:25:53 crc kubenswrapper[4971]: I1127 10:25:53.238939 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-5d9f9695db7tlww_d1299562-0e0b-4dc5-916a-a72c2a79993d/kube-rbac-proxy/0.log" Nov 27 10:25:53 crc kubenswrapper[4971]: I1127 10:25:53.351467 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-5d9f9695db7tlww_d1299562-0e0b-4dc5-916a-a72c2a79993d/manager/0.log" Nov 27 10:25:53 crc kubenswrapper[4971]: I1127 10:25:53.739261 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-67d8f6cc56-69xws_577bd944-782b-4fb1-aff2-2c7b564be7a1/operator/0.log" Nov 27 10:25:53 crc kubenswrapper[4971]: I1127 10:25:53.911341 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-56897c768d-2zcq7_bd9bf1a7-8218-4ea2-b68a-4f5961835bf3/kube-rbac-proxy/0.log" Nov 27 10:25:54 crc kubenswrapper[4971]: I1127 10:25:54.001485 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-r5brv_90063128-e4a8-4b8a-b830-44c061b9f533/registry-server/0.log" Nov 27 10:25:54 crc kubenswrapper[4971]: I1127 10:25:54.099823 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-56897c768d-2zcq7_bd9bf1a7-8218-4ea2-b68a-4f5961835bf3/manager/0.log" Nov 27 10:25:54 crc kubenswrapper[4971]: I1127 10:25:54.178576 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-57988cc5b5-kglvq_5a3f31c6-891f-4fc3-9979-ada42facf791/kube-rbac-proxy/0.log" Nov 27 10:25:54 crc kubenswrapper[4971]: I1127 10:25:54.251791 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-57988cc5b5-kglvq_5a3f31c6-891f-4fc3-9979-ada42facf791/manager/0.log" Nov 27 10:25:54 crc kubenswrapper[4971]: I1127 10:25:54.432547 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-ssqrc_e539f873-39f9-493b-be0b-af897894466e/operator/0.log" Nov 27 10:25:54 crc kubenswrapper[4971]: I1127 10:25:54.531686 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-d77b94747-zkgrb_8f49b4e8-d2ef-4a8b-8751-f480782f970f/kube-rbac-proxy/0.log" Nov 27 10:25:54 crc kubenswrapper[4971]: I1127 10:25:54.601319 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-d77b94747-zkgrb_8f49b4e8-d2ef-4a8b-8751-f480782f970f/manager/0.log" Nov 27 10:25:54 crc kubenswrapper[4971]: I1127 10:25:54.865605 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-p4brb_13bda70c-c880-4a51-aee8-6de35bcf8a35/kube-rbac-proxy/0.log" Nov 27 10:25:54 crc kubenswrapper[4971]: I1127 10:25:54.934302 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5cd6c7f4c8-jl2db_defd6a4c-7400-4f6b-8258-1c70426bcb85/kube-rbac-proxy/0.log" Nov 27 10:25:55 crc kubenswrapper[4971]: I1127 10:25:55.044262 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-p4brb_13bda70c-c880-4a51-aee8-6de35bcf8a35/manager/0.log" Nov 27 10:25:55 crc kubenswrapper[4971]: I1127 10:25:55.185682 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5cd6c7f4c8-jl2db_defd6a4c-7400-4f6b-8258-1c70426bcb85/manager/0.log" Nov 27 10:25:55 crc kubenswrapper[4971]: I1127 10:25:55.200149 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-656dcb59d4-ctgpd_f6b1aca7-dda9-4f85-b305-ecc1f7e40c36/kube-rbac-proxy/0.log" Nov 27 10:25:55 crc kubenswrapper[4971]: I1127 10:25:55.364083 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-656dcb59d4-ctgpd_f6b1aca7-dda9-4f85-b305-ecc1f7e40c36/manager/0.log" Nov 27 10:25:56 crc kubenswrapper[4971]: I1127 10:25:56.147541 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-66f75ddbcc-fpsdk_03c0f055-04c8-42da-b1bf-a35ba598aeb3/manager/0.log" Nov 27 10:26:00 crc kubenswrapper[4971]: I1127 10:26:00.550853 4971 scope.go:117] "RemoveContainer" containerID="116b63c77f50e60a9681ede491266fd11db22eaa347218c7c899b5f0ec9100aa" Nov 27 10:26:00 crc kubenswrapper[4971]: E1127 10:26:00.552078 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:26:15 crc kubenswrapper[4971]: I1127 10:26:15.551012 4971 scope.go:117] "RemoveContainer" containerID="116b63c77f50e60a9681ede491266fd11db22eaa347218c7c899b5f0ec9100aa" Nov 27 10:26:15 crc kubenswrapper[4971]: E1127 10:26:15.552033 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:26:18 crc kubenswrapper[4971]: I1127 10:26:18.001016 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-ht55m_85f4323a-d5cf-45bb-b3df-57434dfe4d53/control-plane-machine-set-operator/0.log" Nov 27 10:26:18 crc kubenswrapper[4971]: I1127 10:26:18.187568 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-9d7hw_fda2f244-1f2f-4a07-915c-586bd4136c02/machine-api-operator/0.log" Nov 27 10:26:18 crc kubenswrapper[4971]: I1127 10:26:18.212587 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-9d7hw_fda2f244-1f2f-4a07-915c-586bd4136c02/kube-rbac-proxy/0.log" Nov 27 10:26:27 crc kubenswrapper[4971]: I1127 10:26:27.550284 4971 scope.go:117] "RemoveContainer" containerID="116b63c77f50e60a9681ede491266fd11db22eaa347218c7c899b5f0ec9100aa" Nov 27 10:26:27 crc kubenswrapper[4971]: E1127 10:26:27.551617 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:26:35 crc kubenswrapper[4971]: I1127 10:26:35.282278 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-rdkl2_3184f0b5-107c-4f9b-a3aa-e5d2f7b30fba/cert-manager-controller/0.log" Nov 27 10:26:35 crc kubenswrapper[4971]: I1127 10:26:35.465586 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-nmpm9_ed50780c-b184-42e4-9551-656f8abb0d17/cert-manager-cainjector/0.log" Nov 27 10:26:35 crc kubenswrapper[4971]: I1127 10:26:35.521938 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-4c2cw_427779b7-166e-4422-9db7-b9a2ab5db7f2/cert-manager-webhook/0.log" Nov 27 10:26:42 crc kubenswrapper[4971]: I1127 10:26:42.564257 4971 scope.go:117] "RemoveContainer" containerID="116b63c77f50e60a9681ede491266fd11db22eaa347218c7c899b5f0ec9100aa" Nov 27 10:26:42 crc kubenswrapper[4971]: E1127 10:26:42.565370 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:26:51 crc kubenswrapper[4971]: I1127 10:26:51.175642 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-8l7v6_1c7d7b3d-899a-4feb-b8a2-9a906b0878cd/nmstate-console-plugin/0.log" Nov 27 10:26:51 crc kubenswrapper[4971]: I1127 10:26:51.415813 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-9kfld_c63c6fa0-dedb-4b00-b001-da4dcfcd8058/nmstate-handler/0.log" Nov 27 10:26:51 crc kubenswrapper[4971]: I1127 10:26:51.771950 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-npfl8_d83969e1-c2e8-40a2-b5a6-2226f3876a85/kube-rbac-proxy/0.log" Nov 27 10:26:51 crc kubenswrapper[4971]: I1127 10:26:51.871109 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-npfl8_d83969e1-c2e8-40a2-b5a6-2226f3876a85/nmstate-metrics/0.log" Nov 27 10:26:51 crc kubenswrapper[4971]: I1127 10:26:51.998845 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-2xtbl_b8cc8b1e-4eaf-40de-8385-0b5bec5db1ba/nmstate-operator/0.log" Nov 27 10:26:52 crc kubenswrapper[4971]: I1127 10:26:52.135374 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-ttwlt_89b39e22-730b-4f81-a85e-b80ec9e7f19a/nmstate-webhook/0.log" Nov 27 10:26:56 crc kubenswrapper[4971]: I1127 10:26:56.550705 4971 scope.go:117] "RemoveContainer" containerID="116b63c77f50e60a9681ede491266fd11db22eaa347218c7c899b5f0ec9100aa" Nov 27 10:26:56 crc kubenswrapper[4971]: I1127 10:26:56.825225 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"40651520d9a16833ec5da7f3b31faf04fb3ad0c60cd1a3ec6e12318c1dc0944c"} Nov 27 10:27:12 crc kubenswrapper[4971]: I1127 10:27:12.458813 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-2tmkr_6f64f6f7-7b67-4928-b73e-198643169a45/kube-rbac-proxy/0.log" Nov 27 10:27:12 crc kubenswrapper[4971]: I1127 10:27:12.822522 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-2tmkr_6f64f6f7-7b67-4928-b73e-198643169a45/controller/0.log" Nov 27 10:27:12 crc kubenswrapper[4971]: I1127 10:27:12.850654 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bpn6l_cce86d74-11b3-43c7-bdae-eb87fbeb457a/cp-frr-files/0.log" Nov 27 10:27:13 crc kubenswrapper[4971]: I1127 10:27:13.417279 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bpn6l_cce86d74-11b3-43c7-bdae-eb87fbeb457a/cp-frr-files/0.log" Nov 27 10:27:13 crc kubenswrapper[4971]: I1127 10:27:13.432599 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bpn6l_cce86d74-11b3-43c7-bdae-eb87fbeb457a/cp-reloader/0.log" Nov 27 10:27:13 crc kubenswrapper[4971]: I1127 10:27:13.495462 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bpn6l_cce86d74-11b3-43c7-bdae-eb87fbeb457a/cp-metrics/0.log" Nov 27 10:27:13 crc kubenswrapper[4971]: I1127 10:27:13.533283 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bpn6l_cce86d74-11b3-43c7-bdae-eb87fbeb457a/cp-reloader/0.log" Nov 27 10:27:13 crc kubenswrapper[4971]: I1127 10:27:13.645942 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bpn6l_cce86d74-11b3-43c7-bdae-eb87fbeb457a/cp-frr-files/0.log" Nov 27 10:27:13 crc kubenswrapper[4971]: I1127 10:27:13.651988 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bpn6l_cce86d74-11b3-43c7-bdae-eb87fbeb457a/cp-reloader/0.log" Nov 27 10:27:13 crc kubenswrapper[4971]: I1127 10:27:13.745564 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bpn6l_cce86d74-11b3-43c7-bdae-eb87fbeb457a/cp-metrics/0.log" Nov 27 10:27:13 crc kubenswrapper[4971]: I1127 10:27:13.764116 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bpn6l_cce86d74-11b3-43c7-bdae-eb87fbeb457a/cp-metrics/0.log" Nov 27 10:27:14 crc kubenswrapper[4971]: I1127 10:27:14.007350 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bpn6l_cce86d74-11b3-43c7-bdae-eb87fbeb457a/cp-metrics/0.log" Nov 27 10:27:14 crc kubenswrapper[4971]: I1127 10:27:14.015966 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bpn6l_cce86d74-11b3-43c7-bdae-eb87fbeb457a/cp-frr-files/0.log" Nov 27 10:27:14 crc kubenswrapper[4971]: I1127 10:27:14.054494 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bpn6l_cce86d74-11b3-43c7-bdae-eb87fbeb457a/cp-reloader/0.log" Nov 27 10:27:14 crc kubenswrapper[4971]: I1127 10:27:14.060834 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bpn6l_cce86d74-11b3-43c7-bdae-eb87fbeb457a/controller/0.log" Nov 27 10:27:14 crc kubenswrapper[4971]: I1127 10:27:14.277843 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bpn6l_cce86d74-11b3-43c7-bdae-eb87fbeb457a/frr-metrics/0.log" Nov 27 10:27:14 crc kubenswrapper[4971]: I1127 10:27:14.319093 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bpn6l_cce86d74-11b3-43c7-bdae-eb87fbeb457a/kube-rbac-proxy-frr/0.log" Nov 27 10:27:14 crc kubenswrapper[4971]: I1127 10:27:14.367785 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bpn6l_cce86d74-11b3-43c7-bdae-eb87fbeb457a/kube-rbac-proxy/0.log" Nov 27 10:27:14 crc kubenswrapper[4971]: I1127 10:27:14.573705 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bpn6l_cce86d74-11b3-43c7-bdae-eb87fbeb457a/reloader/0.log" Nov 27 10:27:14 crc kubenswrapper[4971]: I1127 10:27:14.686072 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-8pnfh_b677a99e-e567-4d2f-8c52-a92ec72d82a6/frr-k8s-webhook-server/0.log" Nov 27 10:27:15 crc kubenswrapper[4971]: I1127 10:27:15.016656 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-69f6b6f45b-zlxzt_58f5b77d-eff3-48fe-b104-c7d62bb97327/manager/0.log" Nov 27 10:27:15 crc kubenswrapper[4971]: I1127 10:27:15.127893 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-74448477b5-m75vr_1f8d9f92-08c5-4ae0-bcbb-8d78b73dd6b7/webhook-server/0.log" Nov 27 10:27:15 crc kubenswrapper[4971]: I1127 10:27:15.273615 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-rpws9_d39b11b5-58d7-4eb3-aea7-50e2f7f40c89/kube-rbac-proxy/0.log" Nov 27 10:27:16 crc kubenswrapper[4971]: I1127 10:27:16.219408 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-rpws9_d39b11b5-58d7-4eb3-aea7-50e2f7f40c89/speaker/0.log" Nov 27 10:27:17 crc kubenswrapper[4971]: I1127 10:27:17.654331 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bpn6l_cce86d74-11b3-43c7-bdae-eb87fbeb457a/frr/0.log" Nov 27 10:27:31 crc kubenswrapper[4971]: I1127 10:27:31.721026 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axk2wm_f3242efe-2323-432b-bcb2-5301d60426eb/util/0.log" Nov 27 10:27:32 crc kubenswrapper[4971]: I1127 10:27:32.013234 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axk2wm_f3242efe-2323-432b-bcb2-5301d60426eb/util/0.log" Nov 27 10:27:32 crc kubenswrapper[4971]: I1127 10:27:32.029316 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axk2wm_f3242efe-2323-432b-bcb2-5301d60426eb/pull/0.log" Nov 27 10:27:32 crc kubenswrapper[4971]: I1127 10:27:32.055188 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axk2wm_f3242efe-2323-432b-bcb2-5301d60426eb/pull/0.log" Nov 27 10:27:32 crc kubenswrapper[4971]: I1127 10:27:32.462701 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axk2wm_f3242efe-2323-432b-bcb2-5301d60426eb/util/0.log" Nov 27 10:27:32 crc kubenswrapper[4971]: I1127 10:27:32.509100 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axk2wm_f3242efe-2323-432b-bcb2-5301d60426eb/pull/0.log" Nov 27 10:27:32 crc kubenswrapper[4971]: I1127 10:27:32.528775 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931axk2wm_f3242efe-2323-432b-bcb2-5301d60426eb/extract/0.log" Nov 27 10:27:32 crc kubenswrapper[4971]: I1127 10:27:32.665818 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f8jcx9_dd894e14-f465-47e2-aaed-d2060077c852/util/0.log" Nov 27 10:27:32 crc kubenswrapper[4971]: I1127 10:27:32.945635 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f8jcx9_dd894e14-f465-47e2-aaed-d2060077c852/pull/0.log" Nov 27 10:27:32 crc kubenswrapper[4971]: I1127 10:27:32.971987 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f8jcx9_dd894e14-f465-47e2-aaed-d2060077c852/pull/0.log" Nov 27 10:27:32 crc kubenswrapper[4971]: I1127 10:27:32.982273 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f8jcx9_dd894e14-f465-47e2-aaed-d2060077c852/util/0.log" Nov 27 10:27:33 crc kubenswrapper[4971]: I1127 10:27:33.241321 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f8jcx9_dd894e14-f465-47e2-aaed-d2060077c852/util/0.log" Nov 27 10:27:33 crc kubenswrapper[4971]: I1127 10:27:33.272956 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f8jcx9_dd894e14-f465-47e2-aaed-d2060077c852/pull/0.log" Nov 27 10:27:33 crc kubenswrapper[4971]: I1127 10:27:33.288882 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f8jcx9_dd894e14-f465-47e2-aaed-d2060077c852/extract/0.log" Nov 27 10:27:33 crc kubenswrapper[4971]: I1127 10:27:33.468399 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105tpqp_ada68182-ff97-4a6f-83a5-10dba9af7324/util/0.log" Nov 27 10:27:33 crc kubenswrapper[4971]: I1127 10:27:33.662720 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105tpqp_ada68182-ff97-4a6f-83a5-10dba9af7324/util/0.log" Nov 27 10:27:33 crc kubenswrapper[4971]: I1127 10:27:33.664336 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105tpqp_ada68182-ff97-4a6f-83a5-10dba9af7324/pull/0.log" Nov 27 10:27:33 crc kubenswrapper[4971]: I1127 10:27:33.703282 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105tpqp_ada68182-ff97-4a6f-83a5-10dba9af7324/pull/0.log" Nov 27 10:27:33 crc kubenswrapper[4971]: I1127 10:27:33.944790 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105tpqp_ada68182-ff97-4a6f-83a5-10dba9af7324/util/0.log" Nov 27 10:27:33 crc kubenswrapper[4971]: I1127 10:27:33.956004 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105tpqp_ada68182-ff97-4a6f-83a5-10dba9af7324/extract/0.log" Nov 27 10:27:33 crc kubenswrapper[4971]: I1127 10:27:33.987002 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105tpqp_ada68182-ff97-4a6f-83a5-10dba9af7324/pull/0.log" Nov 27 10:27:34 crc kubenswrapper[4971]: I1127 10:27:34.136566 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834hblm_39c338ba-f061-448c-8f8d-0699d5ea38ac/util/0.log" Nov 27 10:27:34 crc kubenswrapper[4971]: I1127 10:27:34.319042 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834hblm_39c338ba-f061-448c-8f8d-0699d5ea38ac/util/0.log" Nov 27 10:27:34 crc kubenswrapper[4971]: I1127 10:27:34.336811 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834hblm_39c338ba-f061-448c-8f8d-0699d5ea38ac/pull/0.log" Nov 27 10:27:34 crc kubenswrapper[4971]: I1127 10:27:34.352918 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834hblm_39c338ba-f061-448c-8f8d-0699d5ea38ac/pull/0.log" Nov 27 10:27:34 crc kubenswrapper[4971]: I1127 10:27:34.620748 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834hblm_39c338ba-f061-448c-8f8d-0699d5ea38ac/util/0.log" Nov 27 10:27:34 crc kubenswrapper[4971]: I1127 10:27:34.654814 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834hblm_39c338ba-f061-448c-8f8d-0699d5ea38ac/pull/0.log" Nov 27 10:27:34 crc kubenswrapper[4971]: I1127 10:27:34.669340 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f834hblm_39c338ba-f061-448c-8f8d-0699d5ea38ac/extract/0.log" Nov 27 10:27:34 crc kubenswrapper[4971]: I1127 10:27:34.803841 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2lx5q_8835b0f2-9588-4250-b6bb-355e55445496/extract-utilities/0.log" Nov 27 10:27:35 crc kubenswrapper[4971]: I1127 10:27:35.094086 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2lx5q_8835b0f2-9588-4250-b6bb-355e55445496/extract-content/0.log" Nov 27 10:27:35 crc kubenswrapper[4971]: I1127 10:27:35.147876 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2lx5q_8835b0f2-9588-4250-b6bb-355e55445496/extract-utilities/0.log" Nov 27 10:27:35 crc kubenswrapper[4971]: I1127 10:27:35.160229 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2lx5q_8835b0f2-9588-4250-b6bb-355e55445496/extract-content/0.log" Nov 27 10:27:35 crc kubenswrapper[4971]: I1127 10:27:35.346996 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2lx5q_8835b0f2-9588-4250-b6bb-355e55445496/extract-content/0.log" Nov 27 10:27:35 crc kubenswrapper[4971]: I1127 10:27:35.395682 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2lx5q_8835b0f2-9588-4250-b6bb-355e55445496/extract-utilities/0.log" Nov 27 10:27:35 crc kubenswrapper[4971]: I1127 10:27:35.668431 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8l8gh_2677e996-721b-4cff-8b1d-fb927f04e0e6/extract-utilities/0.log" Nov 27 10:27:35 crc kubenswrapper[4971]: I1127 10:27:35.838281 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-2lx5q_8835b0f2-9588-4250-b6bb-355e55445496/registry-server/0.log" Nov 27 10:27:35 crc kubenswrapper[4971]: I1127 10:27:35.855356 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8l8gh_2677e996-721b-4cff-8b1d-fb927f04e0e6/extract-utilities/0.log" Nov 27 10:27:35 crc kubenswrapper[4971]: I1127 10:27:35.900941 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8l8gh_2677e996-721b-4cff-8b1d-fb927f04e0e6/extract-content/0.log" Nov 27 10:27:35 crc kubenswrapper[4971]: I1127 10:27:35.920178 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8l8gh_2677e996-721b-4cff-8b1d-fb927f04e0e6/extract-content/0.log" Nov 27 10:27:36 crc kubenswrapper[4971]: I1127 10:27:36.141392 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8l8gh_2677e996-721b-4cff-8b1d-fb927f04e0e6/extract-utilities/0.log" Nov 27 10:27:36 crc kubenswrapper[4971]: I1127 10:27:36.167981 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8l8gh_2677e996-721b-4cff-8b1d-fb927f04e0e6/extract-content/0.log" Nov 27 10:27:36 crc kubenswrapper[4971]: I1127 10:27:36.216491 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-ctzhf_9304f55c-4ec0-4828-bfe6-1024a5ec719e/marketplace-operator/0.log" Nov 27 10:27:36 crc kubenswrapper[4971]: I1127 10:27:36.408302 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-s6lp8_8c727031-a0df-4bca-be8b-e8211a220302/extract-utilities/0.log" Nov 27 10:27:36 crc kubenswrapper[4971]: I1127 10:27:36.687211 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-s6lp8_8c727031-a0df-4bca-be8b-e8211a220302/extract-content/0.log" Nov 27 10:27:36 crc kubenswrapper[4971]: I1127 10:27:36.699895 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-s6lp8_8c727031-a0df-4bca-be8b-e8211a220302/extract-utilities/0.log" Nov 27 10:27:36 crc kubenswrapper[4971]: I1127 10:27:36.788615 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-s6lp8_8c727031-a0df-4bca-be8b-e8211a220302/extract-content/0.log" Nov 27 10:27:37 crc kubenswrapper[4971]: I1127 10:27:37.371894 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-s6lp8_8c727031-a0df-4bca-be8b-e8211a220302/extract-content/0.log" Nov 27 10:27:37 crc kubenswrapper[4971]: I1127 10:27:37.460181 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-s6lp8_8c727031-a0df-4bca-be8b-e8211a220302/extract-utilities/0.log" Nov 27 10:27:37 crc kubenswrapper[4971]: I1127 10:27:37.565232 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-j7mvd_6f79433c-d33b-4b19-9582-2900de4137a1/extract-utilities/0.log" Nov 27 10:27:37 crc kubenswrapper[4971]: I1127 10:27:37.792281 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-j7mvd_6f79433c-d33b-4b19-9582-2900de4137a1/extract-utilities/0.log" Nov 27 10:27:37 crc kubenswrapper[4971]: I1127 10:27:37.792381 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-j7mvd_6f79433c-d33b-4b19-9582-2900de4137a1/extract-content/0.log" Nov 27 10:27:37 crc kubenswrapper[4971]: I1127 10:27:37.857176 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-j7mvd_6f79433c-d33b-4b19-9582-2900de4137a1/extract-content/0.log" Nov 27 10:27:38 crc kubenswrapper[4971]: I1127 10:27:38.120938 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-s6lp8_8c727031-a0df-4bca-be8b-e8211a220302/registry-server/0.log" Nov 27 10:27:38 crc kubenswrapper[4971]: I1127 10:27:38.171246 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-j7mvd_6f79433c-d33b-4b19-9582-2900de4137a1/extract-content/0.log" Nov 27 10:27:38 crc kubenswrapper[4971]: I1127 10:27:38.185617 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-j7mvd_6f79433c-d33b-4b19-9582-2900de4137a1/extract-utilities/0.log" Nov 27 10:27:38 crc kubenswrapper[4971]: I1127 10:27:38.576598 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8l8gh_2677e996-721b-4cff-8b1d-fb927f04e0e6/registry-server/0.log" Nov 27 10:27:39 crc kubenswrapper[4971]: I1127 10:27:39.808713 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-j7mvd_6f79433c-d33b-4b19-9582-2900de4137a1/registry-server/0.log" Nov 27 10:27:53 crc kubenswrapper[4971]: I1127 10:27:53.850504 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-668cf9dfbb-shr67_d80dae31-1743-4758-a865-83748b028b5d/prometheus-operator/0.log" Nov 27 10:27:54 crc kubenswrapper[4971]: I1127 10:27:54.031041 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-75dd79885d-5mct7_074b15e8-70dd-4d14-beee-731393461a46/prometheus-operator-admission-webhook/0.log" Nov 27 10:27:54 crc kubenswrapper[4971]: I1127 10:27:54.069119 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-75dd79885d-87mfz_25a6f6c7-3cbb-42a4-a2a3-ba9268fac935/prometheus-operator-admission-webhook/0.log" Nov 27 10:27:54 crc kubenswrapper[4971]: I1127 10:27:54.249988 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-d8bb48f5d-v8snv_8306e952-474c-42d2-b1a2-d99d7c0c3fef/operator/0.log" Nov 27 10:27:54 crc kubenswrapper[4971]: I1127 10:27:54.285071 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5446b9c989-45hp5_0e006a4c-d74e-4f76-8310-2c017bd69a5d/perses-operator/0.log" Nov 27 10:28:56 crc kubenswrapper[4971]: I1127 10:28:56.413822 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 10:28:56 crc kubenswrapper[4971]: I1127 10:28:56.415948 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 10:29:26 crc kubenswrapper[4971]: I1127 10:29:26.413070 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 10:29:26 crc kubenswrapper[4971]: I1127 10:29:26.414137 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 10:29:56 crc kubenswrapper[4971]: I1127 10:29:56.413177 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 10:29:56 crc kubenswrapper[4971]: I1127 10:29:56.414112 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 10:29:56 crc kubenswrapper[4971]: I1127 10:29:56.414204 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 10:29:56 crc kubenswrapper[4971]: I1127 10:29:56.416037 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"40651520d9a16833ec5da7f3b31faf04fb3ad0c60cd1a3ec6e12318c1dc0944c"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 10:29:56 crc kubenswrapper[4971]: I1127 10:29:56.416165 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://40651520d9a16833ec5da7f3b31faf04fb3ad0c60cd1a3ec6e12318c1dc0944c" gracePeriod=600 Nov 27 10:29:56 crc kubenswrapper[4971]: I1127 10:29:56.610807 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="40651520d9a16833ec5da7f3b31faf04fb3ad0c60cd1a3ec6e12318c1dc0944c" exitCode=0 Nov 27 10:29:56 crc kubenswrapper[4971]: I1127 10:29:56.610867 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"40651520d9a16833ec5da7f3b31faf04fb3ad0c60cd1a3ec6e12318c1dc0944c"} Nov 27 10:29:56 crc kubenswrapper[4971]: I1127 10:29:56.610932 4971 scope.go:117] "RemoveContainer" containerID="116b63c77f50e60a9681ede491266fd11db22eaa347218c7c899b5f0ec9100aa" Nov 27 10:29:57 crc kubenswrapper[4971]: I1127 10:29:57.629114 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerStarted","Data":"69e0880639ede65ab1f37011c12886df1228f7179ca05fbe2e45c9270018313c"} Nov 27 10:30:00 crc kubenswrapper[4971]: I1127 10:30:00.215115 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403990-6d8ft"] Nov 27 10:30:00 crc kubenswrapper[4971]: E1127 10:30:00.216722 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="157d6b21-f537-476c-8a56-f3ff290a4f2d" containerName="registry-server" Nov 27 10:30:00 crc kubenswrapper[4971]: I1127 10:30:00.216747 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="157d6b21-f537-476c-8a56-f3ff290a4f2d" containerName="registry-server" Nov 27 10:30:00 crc kubenswrapper[4971]: E1127 10:30:00.216765 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="157d6b21-f537-476c-8a56-f3ff290a4f2d" containerName="extract-utilities" Nov 27 10:30:00 crc kubenswrapper[4971]: I1127 10:30:00.216779 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="157d6b21-f537-476c-8a56-f3ff290a4f2d" containerName="extract-utilities" Nov 27 10:30:00 crc kubenswrapper[4971]: E1127 10:30:00.216817 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fecd00ab-7896-44cd-b47f-ec35f3574d12" containerName="extract-content" Nov 27 10:30:00 crc kubenswrapper[4971]: I1127 10:30:00.216832 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="fecd00ab-7896-44cd-b47f-ec35f3574d12" containerName="extract-content" Nov 27 10:30:00 crc kubenswrapper[4971]: E1127 10:30:00.216853 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="157d6b21-f537-476c-8a56-f3ff290a4f2d" containerName="extract-content" Nov 27 10:30:00 crc kubenswrapper[4971]: I1127 10:30:00.216866 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="157d6b21-f537-476c-8a56-f3ff290a4f2d" containerName="extract-content" Nov 27 10:30:00 crc kubenswrapper[4971]: E1127 10:30:00.216900 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fecd00ab-7896-44cd-b47f-ec35f3574d12" containerName="registry-server" Nov 27 10:30:00 crc kubenswrapper[4971]: I1127 10:30:00.216914 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="fecd00ab-7896-44cd-b47f-ec35f3574d12" containerName="registry-server" Nov 27 10:30:00 crc kubenswrapper[4971]: E1127 10:30:00.216946 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86f013ee-f7bd-4c78-87d7-01840e15985e" containerName="registry-server" Nov 27 10:30:00 crc kubenswrapper[4971]: I1127 10:30:00.216959 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="86f013ee-f7bd-4c78-87d7-01840e15985e" containerName="registry-server" Nov 27 10:30:00 crc kubenswrapper[4971]: E1127 10:30:00.216983 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fecd00ab-7896-44cd-b47f-ec35f3574d12" containerName="extract-utilities" Nov 27 10:30:00 crc kubenswrapper[4971]: I1127 10:30:00.216996 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="fecd00ab-7896-44cd-b47f-ec35f3574d12" containerName="extract-utilities" Nov 27 10:30:00 crc kubenswrapper[4971]: E1127 10:30:00.217029 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86f013ee-f7bd-4c78-87d7-01840e15985e" containerName="extract-utilities" Nov 27 10:30:00 crc kubenswrapper[4971]: I1127 10:30:00.217042 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="86f013ee-f7bd-4c78-87d7-01840e15985e" containerName="extract-utilities" Nov 27 10:30:00 crc kubenswrapper[4971]: E1127 10:30:00.217072 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86f013ee-f7bd-4c78-87d7-01840e15985e" containerName="extract-content" Nov 27 10:30:00 crc kubenswrapper[4971]: I1127 10:30:00.217085 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="86f013ee-f7bd-4c78-87d7-01840e15985e" containerName="extract-content" Nov 27 10:30:00 crc kubenswrapper[4971]: I1127 10:30:00.217522 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="157d6b21-f537-476c-8a56-f3ff290a4f2d" containerName="registry-server" Nov 27 10:30:00 crc kubenswrapper[4971]: I1127 10:30:00.217570 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="fecd00ab-7896-44cd-b47f-ec35f3574d12" containerName="registry-server" Nov 27 10:30:00 crc kubenswrapper[4971]: I1127 10:30:00.217591 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="86f013ee-f7bd-4c78-87d7-01840e15985e" containerName="registry-server" Nov 27 10:30:00 crc kubenswrapper[4971]: I1127 10:30:00.235484 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403990-6d8ft" Nov 27 10:30:00 crc kubenswrapper[4971]: I1127 10:30:00.241014 4971 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 27 10:30:00 crc kubenswrapper[4971]: I1127 10:30:00.241207 4971 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 27 10:30:00 crc kubenswrapper[4971]: I1127 10:30:00.252153 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403990-6d8ft"] Nov 27 10:30:00 crc kubenswrapper[4971]: I1127 10:30:00.315281 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/68914e27-a023-4f9f-8141-d0ec79522540-secret-volume\") pod \"collect-profiles-29403990-6d8ft\" (UID: \"68914e27-a023-4f9f-8141-d0ec79522540\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403990-6d8ft" Nov 27 10:30:00 crc kubenswrapper[4971]: I1127 10:30:00.315381 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/68914e27-a023-4f9f-8141-d0ec79522540-config-volume\") pod \"collect-profiles-29403990-6d8ft\" (UID: \"68914e27-a023-4f9f-8141-d0ec79522540\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403990-6d8ft" Nov 27 10:30:00 crc kubenswrapper[4971]: I1127 10:30:00.315452 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ddq9f\" (UniqueName: \"kubernetes.io/projected/68914e27-a023-4f9f-8141-d0ec79522540-kube-api-access-ddq9f\") pod \"collect-profiles-29403990-6d8ft\" (UID: \"68914e27-a023-4f9f-8141-d0ec79522540\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403990-6d8ft" Nov 27 10:30:00 crc kubenswrapper[4971]: I1127 10:30:00.418453 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/68914e27-a023-4f9f-8141-d0ec79522540-secret-volume\") pod \"collect-profiles-29403990-6d8ft\" (UID: \"68914e27-a023-4f9f-8141-d0ec79522540\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403990-6d8ft" Nov 27 10:30:00 crc kubenswrapper[4971]: I1127 10:30:00.418548 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/68914e27-a023-4f9f-8141-d0ec79522540-config-volume\") pod \"collect-profiles-29403990-6d8ft\" (UID: \"68914e27-a023-4f9f-8141-d0ec79522540\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403990-6d8ft" Nov 27 10:30:00 crc kubenswrapper[4971]: I1127 10:30:00.418593 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ddq9f\" (UniqueName: \"kubernetes.io/projected/68914e27-a023-4f9f-8141-d0ec79522540-kube-api-access-ddq9f\") pod \"collect-profiles-29403990-6d8ft\" (UID: \"68914e27-a023-4f9f-8141-d0ec79522540\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403990-6d8ft" Nov 27 10:30:00 crc kubenswrapper[4971]: I1127 10:30:00.420718 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/68914e27-a023-4f9f-8141-d0ec79522540-config-volume\") pod \"collect-profiles-29403990-6d8ft\" (UID: \"68914e27-a023-4f9f-8141-d0ec79522540\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403990-6d8ft" Nov 27 10:30:00 crc kubenswrapper[4971]: I1127 10:30:00.456877 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/68914e27-a023-4f9f-8141-d0ec79522540-secret-volume\") pod \"collect-profiles-29403990-6d8ft\" (UID: \"68914e27-a023-4f9f-8141-d0ec79522540\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403990-6d8ft" Nov 27 10:30:00 crc kubenswrapper[4971]: I1127 10:30:00.462093 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ddq9f\" (UniqueName: \"kubernetes.io/projected/68914e27-a023-4f9f-8141-d0ec79522540-kube-api-access-ddq9f\") pod \"collect-profiles-29403990-6d8ft\" (UID: \"68914e27-a023-4f9f-8141-d0ec79522540\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29403990-6d8ft" Nov 27 10:30:00 crc kubenswrapper[4971]: I1127 10:30:00.566412 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403990-6d8ft" Nov 27 10:30:01 crc kubenswrapper[4971]: I1127 10:30:01.116712 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403990-6d8ft"] Nov 27 10:30:01 crc kubenswrapper[4971]: I1127 10:30:01.694508 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403990-6d8ft" event={"ID":"68914e27-a023-4f9f-8141-d0ec79522540","Type":"ContainerStarted","Data":"6b5804b0916f10a4bc943dda9e1215acbc5e71b2414cca3a0ca6cc49e7fbaec5"} Nov 27 10:30:01 crc kubenswrapper[4971]: I1127 10:30:01.709749 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403990-6d8ft" event={"ID":"68914e27-a023-4f9f-8141-d0ec79522540","Type":"ContainerStarted","Data":"2525e6167422a4d0828b061567c31ccd234f99c622b3bef33ee0c9c871502d3f"} Nov 27 10:30:01 crc kubenswrapper[4971]: I1127 10:30:01.738311 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29403990-6d8ft" podStartSLOduration=1.738291454 podStartE2EDuration="1.738291454s" podCreationTimestamp="2025-11-27 10:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-27 10:30:01.727672451 +0000 UTC m=+13039.919716369" watchObservedRunningTime="2025-11-27 10:30:01.738291454 +0000 UTC m=+13039.930335362" Nov 27 10:30:02 crc kubenswrapper[4971]: I1127 10:30:02.718955 4971 generic.go:334] "Generic (PLEG): container finished" podID="68914e27-a023-4f9f-8141-d0ec79522540" containerID="6b5804b0916f10a4bc943dda9e1215acbc5e71b2414cca3a0ca6cc49e7fbaec5" exitCode=0 Nov 27 10:30:02 crc kubenswrapper[4971]: I1127 10:30:02.719774 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403990-6d8ft" event={"ID":"68914e27-a023-4f9f-8141-d0ec79522540","Type":"ContainerDied","Data":"6b5804b0916f10a4bc943dda9e1215acbc5e71b2414cca3a0ca6cc49e7fbaec5"} Nov 27 10:30:04 crc kubenswrapper[4971]: I1127 10:30:04.245971 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403990-6d8ft" Nov 27 10:30:04 crc kubenswrapper[4971]: I1127 10:30:04.347441 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/68914e27-a023-4f9f-8141-d0ec79522540-config-volume\") pod \"68914e27-a023-4f9f-8141-d0ec79522540\" (UID: \"68914e27-a023-4f9f-8141-d0ec79522540\") " Nov 27 10:30:04 crc kubenswrapper[4971]: I1127 10:30:04.347913 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ddq9f\" (UniqueName: \"kubernetes.io/projected/68914e27-a023-4f9f-8141-d0ec79522540-kube-api-access-ddq9f\") pod \"68914e27-a023-4f9f-8141-d0ec79522540\" (UID: \"68914e27-a023-4f9f-8141-d0ec79522540\") " Nov 27 10:30:04 crc kubenswrapper[4971]: I1127 10:30:04.347982 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/68914e27-a023-4f9f-8141-d0ec79522540-secret-volume\") pod \"68914e27-a023-4f9f-8141-d0ec79522540\" (UID: \"68914e27-a023-4f9f-8141-d0ec79522540\") " Nov 27 10:30:04 crc kubenswrapper[4971]: I1127 10:30:04.349377 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/68914e27-a023-4f9f-8141-d0ec79522540-config-volume" (OuterVolumeSpecName: "config-volume") pod "68914e27-a023-4f9f-8141-d0ec79522540" (UID: "68914e27-a023-4f9f-8141-d0ec79522540"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 27 10:30:04 crc kubenswrapper[4971]: I1127 10:30:04.356347 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68914e27-a023-4f9f-8141-d0ec79522540-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "68914e27-a023-4f9f-8141-d0ec79522540" (UID: "68914e27-a023-4f9f-8141-d0ec79522540"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 27 10:30:04 crc kubenswrapper[4971]: I1127 10:30:04.371024 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68914e27-a023-4f9f-8141-d0ec79522540-kube-api-access-ddq9f" (OuterVolumeSpecName: "kube-api-access-ddq9f") pod "68914e27-a023-4f9f-8141-d0ec79522540" (UID: "68914e27-a023-4f9f-8141-d0ec79522540"). InnerVolumeSpecName "kube-api-access-ddq9f". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 10:30:04 crc kubenswrapper[4971]: I1127 10:30:04.451326 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ddq9f\" (UniqueName: \"kubernetes.io/projected/68914e27-a023-4f9f-8141-d0ec79522540-kube-api-access-ddq9f\") on node \"crc\" DevicePath \"\"" Nov 27 10:30:04 crc kubenswrapper[4971]: I1127 10:30:04.451372 4971 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/68914e27-a023-4f9f-8141-d0ec79522540-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 27 10:30:04 crc kubenswrapper[4971]: I1127 10:30:04.451386 4971 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/68914e27-a023-4f9f-8141-d0ec79522540-config-volume\") on node \"crc\" DevicePath \"\"" Nov 27 10:30:04 crc kubenswrapper[4971]: I1127 10:30:04.754930 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29403990-6d8ft" event={"ID":"68914e27-a023-4f9f-8141-d0ec79522540","Type":"ContainerDied","Data":"2525e6167422a4d0828b061567c31ccd234f99c622b3bef33ee0c9c871502d3f"} Nov 27 10:30:04 crc kubenswrapper[4971]: I1127 10:30:04.755396 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2525e6167422a4d0828b061567c31ccd234f99c622b3bef33ee0c9c871502d3f" Nov 27 10:30:04 crc kubenswrapper[4971]: I1127 10:30:04.755054 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29403990-6d8ft" Nov 27 10:30:04 crc kubenswrapper[4971]: I1127 10:30:04.844381 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403945-skfvv"] Nov 27 10:30:04 crc kubenswrapper[4971]: I1127 10:30:04.859950 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29403945-skfvv"] Nov 27 10:30:06 crc kubenswrapper[4971]: I1127 10:30:06.567214 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c37fb49d-e681-42e1-a2c6-295824b2223c" path="/var/lib/kubelet/pods/c37fb49d-e681-42e1-a2c6-295824b2223c/volumes" Nov 27 10:31:03 crc kubenswrapper[4971]: I1127 10:31:03.378274 4971 scope.go:117] "RemoveContainer" containerID="152b4dfeb501488a392f27a0ca5eefc764ad5dd668bbc8b62e6df992b8f3ccd5" Nov 27 10:31:56 crc kubenswrapper[4971]: I1127 10:31:56.413629 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 10:31:56 crc kubenswrapper[4971]: I1127 10:31:56.414830 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 10:32:15 crc kubenswrapper[4971]: I1127 10:32:15.960576 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-k99ph"] Nov 27 10:32:15 crc kubenswrapper[4971]: E1127 10:32:15.961877 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68914e27-a023-4f9f-8141-d0ec79522540" containerName="collect-profiles" Nov 27 10:32:15 crc kubenswrapper[4971]: I1127 10:32:15.961894 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="68914e27-a023-4f9f-8141-d0ec79522540" containerName="collect-profiles" Nov 27 10:32:15 crc kubenswrapper[4971]: I1127 10:32:15.962199 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="68914e27-a023-4f9f-8141-d0ec79522540" containerName="collect-profiles" Nov 27 10:32:15 crc kubenswrapper[4971]: I1127 10:32:15.966979 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k99ph" Nov 27 10:32:15 crc kubenswrapper[4971]: I1127 10:32:15.990895 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-k99ph"] Nov 27 10:32:16 crc kubenswrapper[4971]: I1127 10:32:16.032553 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9zg46\" (UniqueName: \"kubernetes.io/projected/08375dda-bff0-43f7-bde4-1fc0771078ac-kube-api-access-9zg46\") pod \"redhat-marketplace-k99ph\" (UID: \"08375dda-bff0-43f7-bde4-1fc0771078ac\") " pod="openshift-marketplace/redhat-marketplace-k99ph" Nov 27 10:32:16 crc kubenswrapper[4971]: I1127 10:32:16.032624 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08375dda-bff0-43f7-bde4-1fc0771078ac-catalog-content\") pod \"redhat-marketplace-k99ph\" (UID: \"08375dda-bff0-43f7-bde4-1fc0771078ac\") " pod="openshift-marketplace/redhat-marketplace-k99ph" Nov 27 10:32:16 crc kubenswrapper[4971]: I1127 10:32:16.032703 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08375dda-bff0-43f7-bde4-1fc0771078ac-utilities\") pod \"redhat-marketplace-k99ph\" (UID: \"08375dda-bff0-43f7-bde4-1fc0771078ac\") " pod="openshift-marketplace/redhat-marketplace-k99ph" Nov 27 10:32:16 crc kubenswrapper[4971]: I1127 10:32:16.136834 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9zg46\" (UniqueName: \"kubernetes.io/projected/08375dda-bff0-43f7-bde4-1fc0771078ac-kube-api-access-9zg46\") pod \"redhat-marketplace-k99ph\" (UID: \"08375dda-bff0-43f7-bde4-1fc0771078ac\") " pod="openshift-marketplace/redhat-marketplace-k99ph" Nov 27 10:32:16 crc kubenswrapper[4971]: I1127 10:32:16.136925 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08375dda-bff0-43f7-bde4-1fc0771078ac-catalog-content\") pod \"redhat-marketplace-k99ph\" (UID: \"08375dda-bff0-43f7-bde4-1fc0771078ac\") " pod="openshift-marketplace/redhat-marketplace-k99ph" Nov 27 10:32:16 crc kubenswrapper[4971]: I1127 10:32:16.137010 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08375dda-bff0-43f7-bde4-1fc0771078ac-utilities\") pod \"redhat-marketplace-k99ph\" (UID: \"08375dda-bff0-43f7-bde4-1fc0771078ac\") " pod="openshift-marketplace/redhat-marketplace-k99ph" Nov 27 10:32:16 crc kubenswrapper[4971]: I1127 10:32:16.137615 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08375dda-bff0-43f7-bde4-1fc0771078ac-catalog-content\") pod \"redhat-marketplace-k99ph\" (UID: \"08375dda-bff0-43f7-bde4-1fc0771078ac\") " pod="openshift-marketplace/redhat-marketplace-k99ph" Nov 27 10:32:16 crc kubenswrapper[4971]: I1127 10:32:16.137801 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08375dda-bff0-43f7-bde4-1fc0771078ac-utilities\") pod \"redhat-marketplace-k99ph\" (UID: \"08375dda-bff0-43f7-bde4-1fc0771078ac\") " pod="openshift-marketplace/redhat-marketplace-k99ph" Nov 27 10:32:16 crc kubenswrapper[4971]: I1127 10:32:16.165088 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9zg46\" (UniqueName: \"kubernetes.io/projected/08375dda-bff0-43f7-bde4-1fc0771078ac-kube-api-access-9zg46\") pod \"redhat-marketplace-k99ph\" (UID: \"08375dda-bff0-43f7-bde4-1fc0771078ac\") " pod="openshift-marketplace/redhat-marketplace-k99ph" Nov 27 10:32:16 crc kubenswrapper[4971]: I1127 10:32:16.302630 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k99ph" Nov 27 10:32:16 crc kubenswrapper[4971]: I1127 10:32:16.826628 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-k99ph"] Nov 27 10:32:16 crc kubenswrapper[4971]: I1127 10:32:16.955561 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k99ph" event={"ID":"08375dda-bff0-43f7-bde4-1fc0771078ac","Type":"ContainerStarted","Data":"6b51e584fad9c16983186589189282fde6583c715e7bb5cd72db75786405c27a"} Nov 27 10:32:17 crc kubenswrapper[4971]: I1127 10:32:17.971706 4971 generic.go:334] "Generic (PLEG): container finished" podID="08375dda-bff0-43f7-bde4-1fc0771078ac" containerID="2c668cc8048ca4588132c36f9bd380b1f7ae9dbec544a65f3a630f2456e681b2" exitCode=0 Nov 27 10:32:17 crc kubenswrapper[4971]: I1127 10:32:17.971833 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k99ph" event={"ID":"08375dda-bff0-43f7-bde4-1fc0771078ac","Type":"ContainerDied","Data":"2c668cc8048ca4588132c36f9bd380b1f7ae9dbec544a65f3a630f2456e681b2"} Nov 27 10:32:17 crc kubenswrapper[4971]: I1127 10:32:17.975782 4971 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 27 10:32:19 crc kubenswrapper[4971]: I1127 10:32:19.996047 4971 generic.go:334] "Generic (PLEG): container finished" podID="08375dda-bff0-43f7-bde4-1fc0771078ac" containerID="0a2c5a1071cb549686ab5f68faf6e9f9fb04e033bc3dde39e70f570eb2adc749" exitCode=0 Nov 27 10:32:19 crc kubenswrapper[4971]: I1127 10:32:19.996314 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k99ph" event={"ID":"08375dda-bff0-43f7-bde4-1fc0771078ac","Type":"ContainerDied","Data":"0a2c5a1071cb549686ab5f68faf6e9f9fb04e033bc3dde39e70f570eb2adc749"} Nov 27 10:32:21 crc kubenswrapper[4971]: I1127 10:32:21.015031 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k99ph" event={"ID":"08375dda-bff0-43f7-bde4-1fc0771078ac","Type":"ContainerStarted","Data":"b98b5b4024f6f8833d572d1ada0835909160eb9a199245ae229f2066f88f2933"} Nov 27 10:32:21 crc kubenswrapper[4971]: I1127 10:32:21.043937 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-k99ph" podStartSLOduration=3.533659739 podStartE2EDuration="6.043909418s" podCreationTimestamp="2025-11-27 10:32:15 +0000 UTC" firstStartedPulling="2025-11-27 10:32:17.975338182 +0000 UTC m=+13176.167382150" lastFinishedPulling="2025-11-27 10:32:20.485587861 +0000 UTC m=+13178.677631829" observedRunningTime="2025-11-27 10:32:21.035629171 +0000 UTC m=+13179.227673129" watchObservedRunningTime="2025-11-27 10:32:21.043909418 +0000 UTC m=+13179.235953376" Nov 27 10:32:23 crc kubenswrapper[4971]: I1127 10:32:23.045250 4971 generic.go:334] "Generic (PLEG): container finished" podID="478390e2-ef28-432b-a0b5-be4a1ebb75cf" containerID="0aed6c0d32929f552acdc69181c94f29768d5cd8c1c98ad7e3c9d0f6e46b2780" exitCode=0 Nov 27 10:32:23 crc kubenswrapper[4971]: I1127 10:32:23.045378 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8pdfd/must-gather-zl67k" event={"ID":"478390e2-ef28-432b-a0b5-be4a1ebb75cf","Type":"ContainerDied","Data":"0aed6c0d32929f552acdc69181c94f29768d5cd8c1c98ad7e3c9d0f6e46b2780"} Nov 27 10:32:23 crc kubenswrapper[4971]: I1127 10:32:23.046734 4971 scope.go:117] "RemoveContainer" containerID="0aed6c0d32929f552acdc69181c94f29768d5cd8c1c98ad7e3c9d0f6e46b2780" Nov 27 10:32:23 crc kubenswrapper[4971]: I1127 10:32:23.534741 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-8pdfd_must-gather-zl67k_478390e2-ef28-432b-a0b5-be4a1ebb75cf/gather/0.log" Nov 27 10:32:26 crc kubenswrapper[4971]: I1127 10:32:26.303037 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-k99ph" Nov 27 10:32:26 crc kubenswrapper[4971]: I1127 10:32:26.303506 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-k99ph" Nov 27 10:32:26 crc kubenswrapper[4971]: I1127 10:32:26.400766 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-k99ph" Nov 27 10:32:26 crc kubenswrapper[4971]: I1127 10:32:26.413348 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 10:32:26 crc kubenswrapper[4971]: I1127 10:32:26.413466 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 10:32:27 crc kubenswrapper[4971]: I1127 10:32:27.219846 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-k99ph" Nov 27 10:32:27 crc kubenswrapper[4971]: I1127 10:32:27.298092 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-k99ph"] Nov 27 10:32:29 crc kubenswrapper[4971]: I1127 10:32:29.160393 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-k99ph" podUID="08375dda-bff0-43f7-bde4-1fc0771078ac" containerName="registry-server" containerID="cri-o://b98b5b4024f6f8833d572d1ada0835909160eb9a199245ae229f2066f88f2933" gracePeriod=2 Nov 27 10:32:29 crc kubenswrapper[4971]: I1127 10:32:29.722162 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k99ph" Nov 27 10:32:29 crc kubenswrapper[4971]: I1127 10:32:29.887690 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08375dda-bff0-43f7-bde4-1fc0771078ac-utilities\") pod \"08375dda-bff0-43f7-bde4-1fc0771078ac\" (UID: \"08375dda-bff0-43f7-bde4-1fc0771078ac\") " Nov 27 10:32:29 crc kubenswrapper[4971]: I1127 10:32:29.888084 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08375dda-bff0-43f7-bde4-1fc0771078ac-catalog-content\") pod \"08375dda-bff0-43f7-bde4-1fc0771078ac\" (UID: \"08375dda-bff0-43f7-bde4-1fc0771078ac\") " Nov 27 10:32:29 crc kubenswrapper[4971]: I1127 10:32:29.888242 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9zg46\" (UniqueName: \"kubernetes.io/projected/08375dda-bff0-43f7-bde4-1fc0771078ac-kube-api-access-9zg46\") pod \"08375dda-bff0-43f7-bde4-1fc0771078ac\" (UID: \"08375dda-bff0-43f7-bde4-1fc0771078ac\") " Nov 27 10:32:29 crc kubenswrapper[4971]: I1127 10:32:29.889615 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08375dda-bff0-43f7-bde4-1fc0771078ac-utilities" (OuterVolumeSpecName: "utilities") pod "08375dda-bff0-43f7-bde4-1fc0771078ac" (UID: "08375dda-bff0-43f7-bde4-1fc0771078ac"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 10:32:29 crc kubenswrapper[4971]: I1127 10:32:29.908967 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08375dda-bff0-43f7-bde4-1fc0771078ac-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "08375dda-bff0-43f7-bde4-1fc0771078ac" (UID: "08375dda-bff0-43f7-bde4-1fc0771078ac"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 10:32:29 crc kubenswrapper[4971]: I1127 10:32:29.910800 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08375dda-bff0-43f7-bde4-1fc0771078ac-kube-api-access-9zg46" (OuterVolumeSpecName: "kube-api-access-9zg46") pod "08375dda-bff0-43f7-bde4-1fc0771078ac" (UID: "08375dda-bff0-43f7-bde4-1fc0771078ac"). InnerVolumeSpecName "kube-api-access-9zg46". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 10:32:29 crc kubenswrapper[4971]: I1127 10:32:29.993419 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08375dda-bff0-43f7-bde4-1fc0771078ac-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 10:32:29 crc kubenswrapper[4971]: I1127 10:32:29.993520 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9zg46\" (UniqueName: \"kubernetes.io/projected/08375dda-bff0-43f7-bde4-1fc0771078ac-kube-api-access-9zg46\") on node \"crc\" DevicePath \"\"" Nov 27 10:32:29 crc kubenswrapper[4971]: I1127 10:32:29.993570 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08375dda-bff0-43f7-bde4-1fc0771078ac-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 10:32:30 crc kubenswrapper[4971]: I1127 10:32:30.178299 4971 generic.go:334] "Generic (PLEG): container finished" podID="08375dda-bff0-43f7-bde4-1fc0771078ac" containerID="b98b5b4024f6f8833d572d1ada0835909160eb9a199245ae229f2066f88f2933" exitCode=0 Nov 27 10:32:30 crc kubenswrapper[4971]: I1127 10:32:30.178361 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k99ph" event={"ID":"08375dda-bff0-43f7-bde4-1fc0771078ac","Type":"ContainerDied","Data":"b98b5b4024f6f8833d572d1ada0835909160eb9a199245ae229f2066f88f2933"} Nov 27 10:32:30 crc kubenswrapper[4971]: I1127 10:32:30.178835 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k99ph" event={"ID":"08375dda-bff0-43f7-bde4-1fc0771078ac","Type":"ContainerDied","Data":"6b51e584fad9c16983186589189282fde6583c715e7bb5cd72db75786405c27a"} Nov 27 10:32:30 crc kubenswrapper[4971]: I1127 10:32:30.178470 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k99ph" Nov 27 10:32:30 crc kubenswrapper[4971]: I1127 10:32:30.178868 4971 scope.go:117] "RemoveContainer" containerID="b98b5b4024f6f8833d572d1ada0835909160eb9a199245ae229f2066f88f2933" Nov 27 10:32:30 crc kubenswrapper[4971]: I1127 10:32:30.228064 4971 scope.go:117] "RemoveContainer" containerID="0a2c5a1071cb549686ab5f68faf6e9f9fb04e033bc3dde39e70f570eb2adc749" Nov 27 10:32:30 crc kubenswrapper[4971]: I1127 10:32:30.229783 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-k99ph"] Nov 27 10:32:30 crc kubenswrapper[4971]: I1127 10:32:30.243312 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-k99ph"] Nov 27 10:32:30 crc kubenswrapper[4971]: I1127 10:32:30.262101 4971 scope.go:117] "RemoveContainer" containerID="2c668cc8048ca4588132c36f9bd380b1f7ae9dbec544a65f3a630f2456e681b2" Nov 27 10:32:30 crc kubenswrapper[4971]: I1127 10:32:30.318367 4971 scope.go:117] "RemoveContainer" containerID="b98b5b4024f6f8833d572d1ada0835909160eb9a199245ae229f2066f88f2933" Nov 27 10:32:30 crc kubenswrapper[4971]: E1127 10:32:30.319028 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b98b5b4024f6f8833d572d1ada0835909160eb9a199245ae229f2066f88f2933\": container with ID starting with b98b5b4024f6f8833d572d1ada0835909160eb9a199245ae229f2066f88f2933 not found: ID does not exist" containerID="b98b5b4024f6f8833d572d1ada0835909160eb9a199245ae229f2066f88f2933" Nov 27 10:32:30 crc kubenswrapper[4971]: I1127 10:32:30.319089 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b98b5b4024f6f8833d572d1ada0835909160eb9a199245ae229f2066f88f2933"} err="failed to get container status \"b98b5b4024f6f8833d572d1ada0835909160eb9a199245ae229f2066f88f2933\": rpc error: code = NotFound desc = could not find container \"b98b5b4024f6f8833d572d1ada0835909160eb9a199245ae229f2066f88f2933\": container with ID starting with b98b5b4024f6f8833d572d1ada0835909160eb9a199245ae229f2066f88f2933 not found: ID does not exist" Nov 27 10:32:30 crc kubenswrapper[4971]: I1127 10:32:30.319134 4971 scope.go:117] "RemoveContainer" containerID="0a2c5a1071cb549686ab5f68faf6e9f9fb04e033bc3dde39e70f570eb2adc749" Nov 27 10:32:30 crc kubenswrapper[4971]: E1127 10:32:30.319740 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0a2c5a1071cb549686ab5f68faf6e9f9fb04e033bc3dde39e70f570eb2adc749\": container with ID starting with 0a2c5a1071cb549686ab5f68faf6e9f9fb04e033bc3dde39e70f570eb2adc749 not found: ID does not exist" containerID="0a2c5a1071cb549686ab5f68faf6e9f9fb04e033bc3dde39e70f570eb2adc749" Nov 27 10:32:30 crc kubenswrapper[4971]: I1127 10:32:30.319836 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a2c5a1071cb549686ab5f68faf6e9f9fb04e033bc3dde39e70f570eb2adc749"} err="failed to get container status \"0a2c5a1071cb549686ab5f68faf6e9f9fb04e033bc3dde39e70f570eb2adc749\": rpc error: code = NotFound desc = could not find container \"0a2c5a1071cb549686ab5f68faf6e9f9fb04e033bc3dde39e70f570eb2adc749\": container with ID starting with 0a2c5a1071cb549686ab5f68faf6e9f9fb04e033bc3dde39e70f570eb2adc749 not found: ID does not exist" Nov 27 10:32:30 crc kubenswrapper[4971]: I1127 10:32:30.319913 4971 scope.go:117] "RemoveContainer" containerID="2c668cc8048ca4588132c36f9bd380b1f7ae9dbec544a65f3a630f2456e681b2" Nov 27 10:32:30 crc kubenswrapper[4971]: E1127 10:32:30.320247 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c668cc8048ca4588132c36f9bd380b1f7ae9dbec544a65f3a630f2456e681b2\": container with ID starting with 2c668cc8048ca4588132c36f9bd380b1f7ae9dbec544a65f3a630f2456e681b2 not found: ID does not exist" containerID="2c668cc8048ca4588132c36f9bd380b1f7ae9dbec544a65f3a630f2456e681b2" Nov 27 10:32:30 crc kubenswrapper[4971]: I1127 10:32:30.320333 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c668cc8048ca4588132c36f9bd380b1f7ae9dbec544a65f3a630f2456e681b2"} err="failed to get container status \"2c668cc8048ca4588132c36f9bd380b1f7ae9dbec544a65f3a630f2456e681b2\": rpc error: code = NotFound desc = could not find container \"2c668cc8048ca4588132c36f9bd380b1f7ae9dbec544a65f3a630f2456e681b2\": container with ID starting with 2c668cc8048ca4588132c36f9bd380b1f7ae9dbec544a65f3a630f2456e681b2 not found: ID does not exist" Nov 27 10:32:30 crc kubenswrapper[4971]: I1127 10:32:30.565128 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08375dda-bff0-43f7-bde4-1fc0771078ac" path="/var/lib/kubelet/pods/08375dda-bff0-43f7-bde4-1fc0771078ac/volumes" Nov 27 10:32:35 crc kubenswrapper[4971]: I1127 10:32:35.557378 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-8pdfd/must-gather-zl67k"] Nov 27 10:32:35 crc kubenswrapper[4971]: I1127 10:32:35.559619 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-8pdfd/must-gather-zl67k" podUID="478390e2-ef28-432b-a0b5-be4a1ebb75cf" containerName="copy" containerID="cri-o://fef07161adb2a515623f89e36e39c9a59b35063aca8155cedc092ba9f47dcaeb" gracePeriod=2 Nov 27 10:32:35 crc kubenswrapper[4971]: I1127 10:32:35.569401 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-8pdfd/must-gather-zl67k"] Nov 27 10:32:36 crc kubenswrapper[4971]: I1127 10:32:36.097107 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-8pdfd_must-gather-zl67k_478390e2-ef28-432b-a0b5-be4a1ebb75cf/copy/0.log" Nov 27 10:32:36 crc kubenswrapper[4971]: I1127 10:32:36.097586 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8pdfd/must-gather-zl67k" Nov 27 10:32:36 crc kubenswrapper[4971]: I1127 10:32:36.260303 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-87bq8\" (UniqueName: \"kubernetes.io/projected/478390e2-ef28-432b-a0b5-be4a1ebb75cf-kube-api-access-87bq8\") pod \"478390e2-ef28-432b-a0b5-be4a1ebb75cf\" (UID: \"478390e2-ef28-432b-a0b5-be4a1ebb75cf\") " Nov 27 10:32:36 crc kubenswrapper[4971]: I1127 10:32:36.261231 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/478390e2-ef28-432b-a0b5-be4a1ebb75cf-must-gather-output\") pod \"478390e2-ef28-432b-a0b5-be4a1ebb75cf\" (UID: \"478390e2-ef28-432b-a0b5-be4a1ebb75cf\") " Nov 27 10:32:36 crc kubenswrapper[4971]: I1127 10:32:36.270993 4971 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-8pdfd_must-gather-zl67k_478390e2-ef28-432b-a0b5-be4a1ebb75cf/copy/0.log" Nov 27 10:32:36 crc kubenswrapper[4971]: I1127 10:32:36.271863 4971 generic.go:334] "Generic (PLEG): container finished" podID="478390e2-ef28-432b-a0b5-be4a1ebb75cf" containerID="fef07161adb2a515623f89e36e39c9a59b35063aca8155cedc092ba9f47dcaeb" exitCode=143 Nov 27 10:32:36 crc kubenswrapper[4971]: I1127 10:32:36.271935 4971 scope.go:117] "RemoveContainer" containerID="fef07161adb2a515623f89e36e39c9a59b35063aca8155cedc092ba9f47dcaeb" Nov 27 10:32:36 crc kubenswrapper[4971]: I1127 10:32:36.272120 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8pdfd/must-gather-zl67k" Nov 27 10:32:36 crc kubenswrapper[4971]: I1127 10:32:36.272705 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/478390e2-ef28-432b-a0b5-be4a1ebb75cf-kube-api-access-87bq8" (OuterVolumeSpecName: "kube-api-access-87bq8") pod "478390e2-ef28-432b-a0b5-be4a1ebb75cf" (UID: "478390e2-ef28-432b-a0b5-be4a1ebb75cf"). InnerVolumeSpecName "kube-api-access-87bq8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 10:32:36 crc kubenswrapper[4971]: I1127 10:32:36.365398 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-87bq8\" (UniqueName: \"kubernetes.io/projected/478390e2-ef28-432b-a0b5-be4a1ebb75cf-kube-api-access-87bq8\") on node \"crc\" DevicePath \"\"" Nov 27 10:32:36 crc kubenswrapper[4971]: I1127 10:32:36.378644 4971 scope.go:117] "RemoveContainer" containerID="0aed6c0d32929f552acdc69181c94f29768d5cd8c1c98ad7e3c9d0f6e46b2780" Nov 27 10:32:36 crc kubenswrapper[4971]: I1127 10:32:36.432200 4971 scope.go:117] "RemoveContainer" containerID="fef07161adb2a515623f89e36e39c9a59b35063aca8155cedc092ba9f47dcaeb" Nov 27 10:32:36 crc kubenswrapper[4971]: E1127 10:32:36.433624 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fef07161adb2a515623f89e36e39c9a59b35063aca8155cedc092ba9f47dcaeb\": container with ID starting with fef07161adb2a515623f89e36e39c9a59b35063aca8155cedc092ba9f47dcaeb not found: ID does not exist" containerID="fef07161adb2a515623f89e36e39c9a59b35063aca8155cedc092ba9f47dcaeb" Nov 27 10:32:36 crc kubenswrapper[4971]: I1127 10:32:36.433685 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fef07161adb2a515623f89e36e39c9a59b35063aca8155cedc092ba9f47dcaeb"} err="failed to get container status \"fef07161adb2a515623f89e36e39c9a59b35063aca8155cedc092ba9f47dcaeb\": rpc error: code = NotFound desc = could not find container \"fef07161adb2a515623f89e36e39c9a59b35063aca8155cedc092ba9f47dcaeb\": container with ID starting with fef07161adb2a515623f89e36e39c9a59b35063aca8155cedc092ba9f47dcaeb not found: ID does not exist" Nov 27 10:32:36 crc kubenswrapper[4971]: I1127 10:32:36.433726 4971 scope.go:117] "RemoveContainer" containerID="0aed6c0d32929f552acdc69181c94f29768d5cd8c1c98ad7e3c9d0f6e46b2780" Nov 27 10:32:36 crc kubenswrapper[4971]: E1127 10:32:36.434276 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0aed6c0d32929f552acdc69181c94f29768d5cd8c1c98ad7e3c9d0f6e46b2780\": container with ID starting with 0aed6c0d32929f552acdc69181c94f29768d5cd8c1c98ad7e3c9d0f6e46b2780 not found: ID does not exist" containerID="0aed6c0d32929f552acdc69181c94f29768d5cd8c1c98ad7e3c9d0f6e46b2780" Nov 27 10:32:36 crc kubenswrapper[4971]: I1127 10:32:36.434343 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0aed6c0d32929f552acdc69181c94f29768d5cd8c1c98ad7e3c9d0f6e46b2780"} err="failed to get container status \"0aed6c0d32929f552acdc69181c94f29768d5cd8c1c98ad7e3c9d0f6e46b2780\": rpc error: code = NotFound desc = could not find container \"0aed6c0d32929f552acdc69181c94f29768d5cd8c1c98ad7e3c9d0f6e46b2780\": container with ID starting with 0aed6c0d32929f552acdc69181c94f29768d5cd8c1c98ad7e3c9d0f6e46b2780 not found: ID does not exist" Nov 27 10:32:36 crc kubenswrapper[4971]: I1127 10:32:36.691876 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/478390e2-ef28-432b-a0b5-be4a1ebb75cf-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "478390e2-ef28-432b-a0b5-be4a1ebb75cf" (UID: "478390e2-ef28-432b-a0b5-be4a1ebb75cf"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 10:32:36 crc kubenswrapper[4971]: I1127 10:32:36.780291 4971 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/478390e2-ef28-432b-a0b5-be4a1ebb75cf-must-gather-output\") on node \"crc\" DevicePath \"\"" Nov 27 10:32:38 crc kubenswrapper[4971]: I1127 10:32:38.604826 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="478390e2-ef28-432b-a0b5-be4a1ebb75cf" path="/var/lib/kubelet/pods/478390e2-ef28-432b-a0b5-be4a1ebb75cf/volumes" Nov 27 10:32:56 crc kubenswrapper[4971]: I1127 10:32:56.414137 4971 patch_prober.go:28] interesting pod/machine-config-daemon-zdz6h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 27 10:32:56 crc kubenswrapper[4971]: I1127 10:32:56.415272 4971 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 27 10:32:56 crc kubenswrapper[4971]: I1127 10:32:56.415346 4971 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" Nov 27 10:32:56 crc kubenswrapper[4971]: I1127 10:32:56.417031 4971 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"69e0880639ede65ab1f37011c12886df1228f7179ca05fbe2e45c9270018313c"} pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 27 10:32:56 crc kubenswrapper[4971]: I1127 10:32:56.417148 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerName="machine-config-daemon" containerID="cri-o://69e0880639ede65ab1f37011c12886df1228f7179ca05fbe2e45c9270018313c" gracePeriod=600 Nov 27 10:32:56 crc kubenswrapper[4971]: E1127 10:32:56.578590 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:32:56 crc kubenswrapper[4971]: I1127 10:32:56.602447 4971 generic.go:334] "Generic (PLEG): container finished" podID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" containerID="69e0880639ede65ab1f37011c12886df1228f7179ca05fbe2e45c9270018313c" exitCode=0 Nov 27 10:32:56 crc kubenswrapper[4971]: I1127 10:32:56.602517 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" event={"ID":"0ab8c2ef-d82b-4396-919d-8550cc2e24d7","Type":"ContainerDied","Data":"69e0880639ede65ab1f37011c12886df1228f7179ca05fbe2e45c9270018313c"} Nov 27 10:32:56 crc kubenswrapper[4971]: I1127 10:32:56.602607 4971 scope.go:117] "RemoveContainer" containerID="40651520d9a16833ec5da7f3b31faf04fb3ad0c60cd1a3ec6e12318c1dc0944c" Nov 27 10:32:56 crc kubenswrapper[4971]: I1127 10:32:56.603692 4971 scope.go:117] "RemoveContainer" containerID="69e0880639ede65ab1f37011c12886df1228f7179ca05fbe2e45c9270018313c" Nov 27 10:32:56 crc kubenswrapper[4971]: E1127 10:32:56.604973 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:33:11 crc kubenswrapper[4971]: I1127 10:33:11.551819 4971 scope.go:117] "RemoveContainer" containerID="69e0880639ede65ab1f37011c12886df1228f7179ca05fbe2e45c9270018313c" Nov 27 10:33:11 crc kubenswrapper[4971]: E1127 10:33:11.553110 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:33:23 crc kubenswrapper[4971]: I1127 10:33:23.551408 4971 scope.go:117] "RemoveContainer" containerID="69e0880639ede65ab1f37011c12886df1228f7179ca05fbe2e45c9270018313c" Nov 27 10:33:23 crc kubenswrapper[4971]: E1127 10:33:23.553932 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:33:34 crc kubenswrapper[4971]: I1127 10:33:34.601795 4971 scope.go:117] "RemoveContainer" containerID="69e0880639ede65ab1f37011c12886df1228f7179ca05fbe2e45c9270018313c" Nov 27 10:33:34 crc kubenswrapper[4971]: E1127 10:33:34.604039 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:33:46 crc kubenswrapper[4971]: I1127 10:33:46.524135 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hxk8g"] Nov 27 10:33:46 crc kubenswrapper[4971]: E1127 10:33:46.526037 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08375dda-bff0-43f7-bde4-1fc0771078ac" containerName="extract-content" Nov 27 10:33:46 crc kubenswrapper[4971]: I1127 10:33:46.526061 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="08375dda-bff0-43f7-bde4-1fc0771078ac" containerName="extract-content" Nov 27 10:33:46 crc kubenswrapper[4971]: E1127 10:33:46.526076 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08375dda-bff0-43f7-bde4-1fc0771078ac" containerName="extract-utilities" Nov 27 10:33:46 crc kubenswrapper[4971]: I1127 10:33:46.526086 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="08375dda-bff0-43f7-bde4-1fc0771078ac" containerName="extract-utilities" Nov 27 10:33:46 crc kubenswrapper[4971]: E1127 10:33:46.526113 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="478390e2-ef28-432b-a0b5-be4a1ebb75cf" containerName="copy" Nov 27 10:33:46 crc kubenswrapper[4971]: I1127 10:33:46.526124 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="478390e2-ef28-432b-a0b5-be4a1ebb75cf" containerName="copy" Nov 27 10:33:46 crc kubenswrapper[4971]: E1127 10:33:46.526153 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="478390e2-ef28-432b-a0b5-be4a1ebb75cf" containerName="gather" Nov 27 10:33:46 crc kubenswrapper[4971]: I1127 10:33:46.526166 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="478390e2-ef28-432b-a0b5-be4a1ebb75cf" containerName="gather" Nov 27 10:33:46 crc kubenswrapper[4971]: E1127 10:33:46.526190 4971 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08375dda-bff0-43f7-bde4-1fc0771078ac" containerName="registry-server" Nov 27 10:33:46 crc kubenswrapper[4971]: I1127 10:33:46.526199 4971 state_mem.go:107] "Deleted CPUSet assignment" podUID="08375dda-bff0-43f7-bde4-1fc0771078ac" containerName="registry-server" Nov 27 10:33:46 crc kubenswrapper[4971]: I1127 10:33:46.526610 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="08375dda-bff0-43f7-bde4-1fc0771078ac" containerName="registry-server" Nov 27 10:33:46 crc kubenswrapper[4971]: I1127 10:33:46.526640 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="478390e2-ef28-432b-a0b5-be4a1ebb75cf" containerName="gather" Nov 27 10:33:46 crc kubenswrapper[4971]: I1127 10:33:46.526653 4971 memory_manager.go:354] "RemoveStaleState removing state" podUID="478390e2-ef28-432b-a0b5-be4a1ebb75cf" containerName="copy" Nov 27 10:33:46 crc kubenswrapper[4971]: I1127 10:33:46.531224 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hxk8g" Nov 27 10:33:46 crc kubenswrapper[4971]: I1127 10:33:46.545294 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hxk8g"] Nov 27 10:33:46 crc kubenswrapper[4971]: I1127 10:33:46.691971 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/780cd9cc-9b45-468e-a0cd-e97f25dca4ee-utilities\") pod \"community-operators-hxk8g\" (UID: \"780cd9cc-9b45-468e-a0cd-e97f25dca4ee\") " pod="openshift-marketplace/community-operators-hxk8g" Nov 27 10:33:46 crc kubenswrapper[4971]: I1127 10:33:46.692143 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/780cd9cc-9b45-468e-a0cd-e97f25dca4ee-catalog-content\") pod \"community-operators-hxk8g\" (UID: \"780cd9cc-9b45-468e-a0cd-e97f25dca4ee\") " pod="openshift-marketplace/community-operators-hxk8g" Nov 27 10:33:46 crc kubenswrapper[4971]: I1127 10:33:46.692272 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62wst\" (UniqueName: \"kubernetes.io/projected/780cd9cc-9b45-468e-a0cd-e97f25dca4ee-kube-api-access-62wst\") pod \"community-operators-hxk8g\" (UID: \"780cd9cc-9b45-468e-a0cd-e97f25dca4ee\") " pod="openshift-marketplace/community-operators-hxk8g" Nov 27 10:33:46 crc kubenswrapper[4971]: I1127 10:33:46.794270 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/780cd9cc-9b45-468e-a0cd-e97f25dca4ee-catalog-content\") pod \"community-operators-hxk8g\" (UID: \"780cd9cc-9b45-468e-a0cd-e97f25dca4ee\") " pod="openshift-marketplace/community-operators-hxk8g" Nov 27 10:33:46 crc kubenswrapper[4971]: I1127 10:33:46.794370 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62wst\" (UniqueName: \"kubernetes.io/projected/780cd9cc-9b45-468e-a0cd-e97f25dca4ee-kube-api-access-62wst\") pod \"community-operators-hxk8g\" (UID: \"780cd9cc-9b45-468e-a0cd-e97f25dca4ee\") " pod="openshift-marketplace/community-operators-hxk8g" Nov 27 10:33:46 crc kubenswrapper[4971]: I1127 10:33:46.794426 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/780cd9cc-9b45-468e-a0cd-e97f25dca4ee-utilities\") pod \"community-operators-hxk8g\" (UID: \"780cd9cc-9b45-468e-a0cd-e97f25dca4ee\") " pod="openshift-marketplace/community-operators-hxk8g" Nov 27 10:33:46 crc kubenswrapper[4971]: I1127 10:33:46.794922 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/780cd9cc-9b45-468e-a0cd-e97f25dca4ee-utilities\") pod \"community-operators-hxk8g\" (UID: \"780cd9cc-9b45-468e-a0cd-e97f25dca4ee\") " pod="openshift-marketplace/community-operators-hxk8g" Nov 27 10:33:46 crc kubenswrapper[4971]: I1127 10:33:46.795144 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/780cd9cc-9b45-468e-a0cd-e97f25dca4ee-catalog-content\") pod \"community-operators-hxk8g\" (UID: \"780cd9cc-9b45-468e-a0cd-e97f25dca4ee\") " pod="openshift-marketplace/community-operators-hxk8g" Nov 27 10:33:46 crc kubenswrapper[4971]: I1127 10:33:46.818188 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62wst\" (UniqueName: \"kubernetes.io/projected/780cd9cc-9b45-468e-a0cd-e97f25dca4ee-kube-api-access-62wst\") pod \"community-operators-hxk8g\" (UID: \"780cd9cc-9b45-468e-a0cd-e97f25dca4ee\") " pod="openshift-marketplace/community-operators-hxk8g" Nov 27 10:33:46 crc kubenswrapper[4971]: I1127 10:33:46.875387 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hxk8g" Nov 27 10:33:47 crc kubenswrapper[4971]: I1127 10:33:47.522631 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hxk8g"] Nov 27 10:33:48 crc kubenswrapper[4971]: I1127 10:33:48.475330 4971 generic.go:334] "Generic (PLEG): container finished" podID="780cd9cc-9b45-468e-a0cd-e97f25dca4ee" containerID="ab7e4ed1c8ec585a1cdce0151ee6dec1af604584b4cafcbb60354fb41cb518f6" exitCode=0 Nov 27 10:33:48 crc kubenswrapper[4971]: I1127 10:33:48.475409 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hxk8g" event={"ID":"780cd9cc-9b45-468e-a0cd-e97f25dca4ee","Type":"ContainerDied","Data":"ab7e4ed1c8ec585a1cdce0151ee6dec1af604584b4cafcbb60354fb41cb518f6"} Nov 27 10:33:48 crc kubenswrapper[4971]: I1127 10:33:48.476251 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hxk8g" event={"ID":"780cd9cc-9b45-468e-a0cd-e97f25dca4ee","Type":"ContainerStarted","Data":"68d06c1390b330ad187b40461735dcf181ecd7dfe123a7d58818fcd918815d26"} Nov 27 10:33:48 crc kubenswrapper[4971]: I1127 10:33:48.930787 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-59d5b"] Nov 27 10:33:48 crc kubenswrapper[4971]: I1127 10:33:48.935919 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-59d5b" Nov 27 10:33:48 crc kubenswrapper[4971]: I1127 10:33:48.962076 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-59d5b"] Nov 27 10:33:49 crc kubenswrapper[4971]: I1127 10:33:49.011098 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4fbde700-439b-431c-be64-d1d75af4b74a-utilities\") pod \"redhat-operators-59d5b\" (UID: \"4fbde700-439b-431c-be64-d1d75af4b74a\") " pod="openshift-marketplace/redhat-operators-59d5b" Nov 27 10:33:49 crc kubenswrapper[4971]: I1127 10:33:49.011220 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4fbde700-439b-431c-be64-d1d75af4b74a-catalog-content\") pod \"redhat-operators-59d5b\" (UID: \"4fbde700-439b-431c-be64-d1d75af4b74a\") " pod="openshift-marketplace/redhat-operators-59d5b" Nov 27 10:33:49 crc kubenswrapper[4971]: I1127 10:33:49.011350 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qbkpn\" (UniqueName: \"kubernetes.io/projected/4fbde700-439b-431c-be64-d1d75af4b74a-kube-api-access-qbkpn\") pod \"redhat-operators-59d5b\" (UID: \"4fbde700-439b-431c-be64-d1d75af4b74a\") " pod="openshift-marketplace/redhat-operators-59d5b" Nov 27 10:33:49 crc kubenswrapper[4971]: I1127 10:33:49.114569 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qbkpn\" (UniqueName: \"kubernetes.io/projected/4fbde700-439b-431c-be64-d1d75af4b74a-kube-api-access-qbkpn\") pod \"redhat-operators-59d5b\" (UID: \"4fbde700-439b-431c-be64-d1d75af4b74a\") " pod="openshift-marketplace/redhat-operators-59d5b" Nov 27 10:33:49 crc kubenswrapper[4971]: I1127 10:33:49.114692 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4fbde700-439b-431c-be64-d1d75af4b74a-utilities\") pod \"redhat-operators-59d5b\" (UID: \"4fbde700-439b-431c-be64-d1d75af4b74a\") " pod="openshift-marketplace/redhat-operators-59d5b" Nov 27 10:33:49 crc kubenswrapper[4971]: I1127 10:33:49.114827 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4fbde700-439b-431c-be64-d1d75af4b74a-catalog-content\") pod \"redhat-operators-59d5b\" (UID: \"4fbde700-439b-431c-be64-d1d75af4b74a\") " pod="openshift-marketplace/redhat-operators-59d5b" Nov 27 10:33:49 crc kubenswrapper[4971]: I1127 10:33:49.115437 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4fbde700-439b-431c-be64-d1d75af4b74a-utilities\") pod \"redhat-operators-59d5b\" (UID: \"4fbde700-439b-431c-be64-d1d75af4b74a\") " pod="openshift-marketplace/redhat-operators-59d5b" Nov 27 10:33:49 crc kubenswrapper[4971]: I1127 10:33:49.115525 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4fbde700-439b-431c-be64-d1d75af4b74a-catalog-content\") pod \"redhat-operators-59d5b\" (UID: \"4fbde700-439b-431c-be64-d1d75af4b74a\") " pod="openshift-marketplace/redhat-operators-59d5b" Nov 27 10:33:49 crc kubenswrapper[4971]: I1127 10:33:49.139831 4971 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-899ph"] Nov 27 10:33:49 crc kubenswrapper[4971]: I1127 10:33:49.143327 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-899ph" Nov 27 10:33:49 crc kubenswrapper[4971]: I1127 10:33:49.149605 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qbkpn\" (UniqueName: \"kubernetes.io/projected/4fbde700-439b-431c-be64-d1d75af4b74a-kube-api-access-qbkpn\") pod \"redhat-operators-59d5b\" (UID: \"4fbde700-439b-431c-be64-d1d75af4b74a\") " pod="openshift-marketplace/redhat-operators-59d5b" Nov 27 10:33:49 crc kubenswrapper[4971]: I1127 10:33:49.172302 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-899ph"] Nov 27 10:33:49 crc kubenswrapper[4971]: I1127 10:33:49.216481 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fgxcp\" (UniqueName: \"kubernetes.io/projected/0de6d2da-4809-4889-b8f1-5ebc0d77b575-kube-api-access-fgxcp\") pod \"certified-operators-899ph\" (UID: \"0de6d2da-4809-4889-b8f1-5ebc0d77b575\") " pod="openshift-marketplace/certified-operators-899ph" Nov 27 10:33:49 crc kubenswrapper[4971]: I1127 10:33:49.216595 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0de6d2da-4809-4889-b8f1-5ebc0d77b575-catalog-content\") pod \"certified-operators-899ph\" (UID: \"0de6d2da-4809-4889-b8f1-5ebc0d77b575\") " pod="openshift-marketplace/certified-operators-899ph" Nov 27 10:33:49 crc kubenswrapper[4971]: I1127 10:33:49.216673 4971 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0de6d2da-4809-4889-b8f1-5ebc0d77b575-utilities\") pod \"certified-operators-899ph\" (UID: \"0de6d2da-4809-4889-b8f1-5ebc0d77b575\") " pod="openshift-marketplace/certified-operators-899ph" Nov 27 10:33:49 crc kubenswrapper[4971]: I1127 10:33:49.266565 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-59d5b" Nov 27 10:33:49 crc kubenswrapper[4971]: I1127 10:33:49.318956 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0de6d2da-4809-4889-b8f1-5ebc0d77b575-utilities\") pod \"certified-operators-899ph\" (UID: \"0de6d2da-4809-4889-b8f1-5ebc0d77b575\") " pod="openshift-marketplace/certified-operators-899ph" Nov 27 10:33:49 crc kubenswrapper[4971]: I1127 10:33:49.319103 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fgxcp\" (UniqueName: \"kubernetes.io/projected/0de6d2da-4809-4889-b8f1-5ebc0d77b575-kube-api-access-fgxcp\") pod \"certified-operators-899ph\" (UID: \"0de6d2da-4809-4889-b8f1-5ebc0d77b575\") " pod="openshift-marketplace/certified-operators-899ph" Nov 27 10:33:49 crc kubenswrapper[4971]: I1127 10:33:49.319159 4971 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0de6d2da-4809-4889-b8f1-5ebc0d77b575-catalog-content\") pod \"certified-operators-899ph\" (UID: \"0de6d2da-4809-4889-b8f1-5ebc0d77b575\") " pod="openshift-marketplace/certified-operators-899ph" Nov 27 10:33:49 crc kubenswrapper[4971]: I1127 10:33:49.319818 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0de6d2da-4809-4889-b8f1-5ebc0d77b575-catalog-content\") pod \"certified-operators-899ph\" (UID: \"0de6d2da-4809-4889-b8f1-5ebc0d77b575\") " pod="openshift-marketplace/certified-operators-899ph" Nov 27 10:33:49 crc kubenswrapper[4971]: I1127 10:33:49.320054 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0de6d2da-4809-4889-b8f1-5ebc0d77b575-utilities\") pod \"certified-operators-899ph\" (UID: \"0de6d2da-4809-4889-b8f1-5ebc0d77b575\") " pod="openshift-marketplace/certified-operators-899ph" Nov 27 10:33:49 crc kubenswrapper[4971]: I1127 10:33:49.338259 4971 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fgxcp\" (UniqueName: \"kubernetes.io/projected/0de6d2da-4809-4889-b8f1-5ebc0d77b575-kube-api-access-fgxcp\") pod \"certified-operators-899ph\" (UID: \"0de6d2da-4809-4889-b8f1-5ebc0d77b575\") " pod="openshift-marketplace/certified-operators-899ph" Nov 27 10:33:49 crc kubenswrapper[4971]: I1127 10:33:49.508593 4971 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-899ph" Nov 27 10:33:49 crc kubenswrapper[4971]: I1127 10:33:49.550516 4971 scope.go:117] "RemoveContainer" containerID="69e0880639ede65ab1f37011c12886df1228f7179ca05fbe2e45c9270018313c" Nov 27 10:33:49 crc kubenswrapper[4971]: E1127 10:33:49.550808 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:33:49 crc kubenswrapper[4971]: I1127 10:33:49.812056 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-59d5b"] Nov 27 10:33:50 crc kubenswrapper[4971]: I1127 10:33:50.149768 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-899ph"] Nov 27 10:33:50 crc kubenswrapper[4971]: I1127 10:33:50.511737 4971 generic.go:334] "Generic (PLEG): container finished" podID="0de6d2da-4809-4889-b8f1-5ebc0d77b575" containerID="81c7cc8c3fbc90828e9a9fc79be83441cb4c42515fc3272f736cac11b88619b4" exitCode=0 Nov 27 10:33:50 crc kubenswrapper[4971]: I1127 10:33:50.511928 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-899ph" event={"ID":"0de6d2da-4809-4889-b8f1-5ebc0d77b575","Type":"ContainerDied","Data":"81c7cc8c3fbc90828e9a9fc79be83441cb4c42515fc3272f736cac11b88619b4"} Nov 27 10:33:50 crc kubenswrapper[4971]: I1127 10:33:50.511967 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-899ph" event={"ID":"0de6d2da-4809-4889-b8f1-5ebc0d77b575","Type":"ContainerStarted","Data":"57dc4b88d2fed5680067c0895d2b84312c8ca89067ac46a430e30406225bbca8"} Nov 27 10:33:50 crc kubenswrapper[4971]: I1127 10:33:50.517836 4971 generic.go:334] "Generic (PLEG): container finished" podID="4fbde700-439b-431c-be64-d1d75af4b74a" containerID="0e47ff58ed02296535ea54fb6871e754202441ef716867b19ace387f80f07088" exitCode=0 Nov 27 10:33:50 crc kubenswrapper[4971]: I1127 10:33:50.517868 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-59d5b" event={"ID":"4fbde700-439b-431c-be64-d1d75af4b74a","Type":"ContainerDied","Data":"0e47ff58ed02296535ea54fb6871e754202441ef716867b19ace387f80f07088"} Nov 27 10:33:50 crc kubenswrapper[4971]: I1127 10:33:50.517888 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-59d5b" event={"ID":"4fbde700-439b-431c-be64-d1d75af4b74a","Type":"ContainerStarted","Data":"07c922ef5ab2091f8100b6f6307ee5285be86d960472676a89812b34987af89c"} Nov 27 10:33:52 crc kubenswrapper[4971]: I1127 10:33:52.576092 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-899ph" event={"ID":"0de6d2da-4809-4889-b8f1-5ebc0d77b575","Type":"ContainerStarted","Data":"5b4c14ece2e5f564b03f17c60b484c48b5d39663ce375b59fef3aab4e84b981a"} Nov 27 10:33:52 crc kubenswrapper[4971]: I1127 10:33:52.577126 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-59d5b" event={"ID":"4fbde700-439b-431c-be64-d1d75af4b74a","Type":"ContainerStarted","Data":"c1fa32aedd806f9fc5bbc72fc43a6285ff0e76a7d7ab73ad2f8366a3431c271b"} Nov 27 10:33:53 crc kubenswrapper[4971]: I1127 10:33:53.587314 4971 generic.go:334] "Generic (PLEG): container finished" podID="0de6d2da-4809-4889-b8f1-5ebc0d77b575" containerID="5b4c14ece2e5f564b03f17c60b484c48b5d39663ce375b59fef3aab4e84b981a" exitCode=0 Nov 27 10:33:53 crc kubenswrapper[4971]: I1127 10:33:53.587431 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-899ph" event={"ID":"0de6d2da-4809-4889-b8f1-5ebc0d77b575","Type":"ContainerDied","Data":"5b4c14ece2e5f564b03f17c60b484c48b5d39663ce375b59fef3aab4e84b981a"} Nov 27 10:33:55 crc kubenswrapper[4971]: I1127 10:33:55.634280 4971 generic.go:334] "Generic (PLEG): container finished" podID="780cd9cc-9b45-468e-a0cd-e97f25dca4ee" containerID="fcfa75a76afdfa19dabba1bd8c0f1f6797aba941804901bc0288abd8acbc673a" exitCode=0 Nov 27 10:33:55 crc kubenswrapper[4971]: I1127 10:33:55.634402 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hxk8g" event={"ID":"780cd9cc-9b45-468e-a0cd-e97f25dca4ee","Type":"ContainerDied","Data":"fcfa75a76afdfa19dabba1bd8c0f1f6797aba941804901bc0288abd8acbc673a"} Nov 27 10:33:56 crc kubenswrapper[4971]: I1127 10:33:56.658823 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-899ph" event={"ID":"0de6d2da-4809-4889-b8f1-5ebc0d77b575","Type":"ContainerStarted","Data":"6df25e5720efc4d965e2efcdd4eac9e29a5669a686f6a598c367ec50a10602a1"} Nov 27 10:33:57 crc kubenswrapper[4971]: I1127 10:33:57.684449 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hxk8g" event={"ID":"780cd9cc-9b45-468e-a0cd-e97f25dca4ee","Type":"ContainerStarted","Data":"05685ccc3965d16e92f2e201134ede724c224e5e030c71c041ed3dfa1d6f3298"} Nov 27 10:33:57 crc kubenswrapper[4971]: I1127 10:33:57.695958 4971 generic.go:334] "Generic (PLEG): container finished" podID="4fbde700-439b-431c-be64-d1d75af4b74a" containerID="c1fa32aedd806f9fc5bbc72fc43a6285ff0e76a7d7ab73ad2f8366a3431c271b" exitCode=0 Nov 27 10:33:57 crc kubenswrapper[4971]: I1127 10:33:57.697208 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-59d5b" event={"ID":"4fbde700-439b-431c-be64-d1d75af4b74a","Type":"ContainerDied","Data":"c1fa32aedd806f9fc5bbc72fc43a6285ff0e76a7d7ab73ad2f8366a3431c271b"} Nov 27 10:33:57 crc kubenswrapper[4971]: I1127 10:33:57.717282 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hxk8g" podStartSLOduration=3.460880257 podStartE2EDuration="11.717247779s" podCreationTimestamp="2025-11-27 10:33:46 +0000 UTC" firstStartedPulling="2025-11-27 10:33:48.481287306 +0000 UTC m=+13266.673331224" lastFinishedPulling="2025-11-27 10:33:56.737654788 +0000 UTC m=+13274.929698746" observedRunningTime="2025-11-27 10:33:57.712432991 +0000 UTC m=+13275.904476919" watchObservedRunningTime="2025-11-27 10:33:57.717247779 +0000 UTC m=+13275.909291737" Nov 27 10:33:57 crc kubenswrapper[4971]: I1127 10:33:57.753288 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-899ph" podStartSLOduration=3.286972031 podStartE2EDuration="8.753263837s" podCreationTimestamp="2025-11-27 10:33:49 +0000 UTC" firstStartedPulling="2025-11-27 10:33:50.517700211 +0000 UTC m=+13268.709744129" lastFinishedPulling="2025-11-27 10:33:55.983991967 +0000 UTC m=+13274.176035935" observedRunningTime="2025-11-27 10:33:57.735745287 +0000 UTC m=+13275.927789205" watchObservedRunningTime="2025-11-27 10:33:57.753263837 +0000 UTC m=+13275.945307765" Nov 27 10:33:59 crc kubenswrapper[4971]: I1127 10:33:59.511299 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-899ph" Nov 27 10:33:59 crc kubenswrapper[4971]: I1127 10:33:59.513077 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-899ph" Nov 27 10:33:59 crc kubenswrapper[4971]: I1127 10:33:59.720917 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-59d5b" event={"ID":"4fbde700-439b-431c-be64-d1d75af4b74a","Type":"ContainerStarted","Data":"209c71c63607ed4b2ea751f0195a3f6a109d25fba6b2964d4e6ccdfd8b57aba5"} Nov 27 10:33:59 crc kubenswrapper[4971]: I1127 10:33:59.751142 4971 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-59d5b" podStartSLOduration=3.7866002500000002 podStartE2EDuration="11.751118432s" podCreationTimestamp="2025-11-27 10:33:48 +0000 UTC" firstStartedPulling="2025-11-27 10:33:50.529507388 +0000 UTC m=+13268.721551316" lastFinishedPulling="2025-11-27 10:33:58.49402554 +0000 UTC m=+13276.686069498" observedRunningTime="2025-11-27 10:33:59.744851783 +0000 UTC m=+13277.936895711" watchObservedRunningTime="2025-11-27 10:33:59.751118432 +0000 UTC m=+13277.943162350" Nov 27 10:34:00 crc kubenswrapper[4971]: I1127 10:34:00.582719 4971 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-899ph" podUID="0de6d2da-4809-4889-b8f1-5ebc0d77b575" containerName="registry-server" probeResult="failure" output=< Nov 27 10:34:00 crc kubenswrapper[4971]: timeout: failed to connect service ":50051" within 1s Nov 27 10:34:00 crc kubenswrapper[4971]: > Nov 27 10:34:02 crc kubenswrapper[4971]: I1127 10:34:02.567298 4971 scope.go:117] "RemoveContainer" containerID="69e0880639ede65ab1f37011c12886df1228f7179ca05fbe2e45c9270018313c" Nov 27 10:34:02 crc kubenswrapper[4971]: E1127 10:34:02.568242 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:34:06 crc kubenswrapper[4971]: I1127 10:34:06.876252 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hxk8g" Nov 27 10:34:06 crc kubenswrapper[4971]: I1127 10:34:06.877383 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hxk8g" Nov 27 10:34:06 crc kubenswrapper[4971]: I1127 10:34:06.953498 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hxk8g" Nov 27 10:34:07 crc kubenswrapper[4971]: I1127 10:34:07.938609 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hxk8g" Nov 27 10:34:08 crc kubenswrapper[4971]: I1127 10:34:08.055095 4971 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hxk8g"] Nov 27 10:34:08 crc kubenswrapper[4971]: I1127 10:34:08.109380 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8l8gh"] Nov 27 10:34:08 crc kubenswrapper[4971]: I1127 10:34:08.109959 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-8l8gh" podUID="2677e996-721b-4cff-8b1d-fb927f04e0e6" containerName="registry-server" containerID="cri-o://d5690977889e4e6c9fdea28582e602212cf5e82b6a236a69eed87be7d11fa832" gracePeriod=2 Nov 27 10:34:08 crc kubenswrapper[4971]: I1127 10:34:08.729898 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8l8gh" Nov 27 10:34:08 crc kubenswrapper[4971]: I1127 10:34:08.844438 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-45c5z\" (UniqueName: \"kubernetes.io/projected/2677e996-721b-4cff-8b1d-fb927f04e0e6-kube-api-access-45c5z\") pod \"2677e996-721b-4cff-8b1d-fb927f04e0e6\" (UID: \"2677e996-721b-4cff-8b1d-fb927f04e0e6\") " Nov 27 10:34:08 crc kubenswrapper[4971]: I1127 10:34:08.844635 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2677e996-721b-4cff-8b1d-fb927f04e0e6-catalog-content\") pod \"2677e996-721b-4cff-8b1d-fb927f04e0e6\" (UID: \"2677e996-721b-4cff-8b1d-fb927f04e0e6\") " Nov 27 10:34:08 crc kubenswrapper[4971]: I1127 10:34:08.844813 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2677e996-721b-4cff-8b1d-fb927f04e0e6-utilities\") pod \"2677e996-721b-4cff-8b1d-fb927f04e0e6\" (UID: \"2677e996-721b-4cff-8b1d-fb927f04e0e6\") " Nov 27 10:34:08 crc kubenswrapper[4971]: I1127 10:34:08.845758 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2677e996-721b-4cff-8b1d-fb927f04e0e6-utilities" (OuterVolumeSpecName: "utilities") pod "2677e996-721b-4cff-8b1d-fb927f04e0e6" (UID: "2677e996-721b-4cff-8b1d-fb927f04e0e6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 10:34:08 crc kubenswrapper[4971]: I1127 10:34:08.892702 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2677e996-721b-4cff-8b1d-fb927f04e0e6-kube-api-access-45c5z" (OuterVolumeSpecName: "kube-api-access-45c5z") pod "2677e996-721b-4cff-8b1d-fb927f04e0e6" (UID: "2677e996-721b-4cff-8b1d-fb927f04e0e6"). InnerVolumeSpecName "kube-api-access-45c5z". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 10:34:08 crc kubenswrapper[4971]: I1127 10:34:08.909927 4971 generic.go:334] "Generic (PLEG): container finished" podID="2677e996-721b-4cff-8b1d-fb927f04e0e6" containerID="d5690977889e4e6c9fdea28582e602212cf5e82b6a236a69eed87be7d11fa832" exitCode=0 Nov 27 10:34:08 crc kubenswrapper[4971]: I1127 10:34:08.911135 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8l8gh" Nov 27 10:34:08 crc kubenswrapper[4971]: I1127 10:34:08.911525 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8l8gh" event={"ID":"2677e996-721b-4cff-8b1d-fb927f04e0e6","Type":"ContainerDied","Data":"d5690977889e4e6c9fdea28582e602212cf5e82b6a236a69eed87be7d11fa832"} Nov 27 10:34:08 crc kubenswrapper[4971]: I1127 10:34:08.911588 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8l8gh" event={"ID":"2677e996-721b-4cff-8b1d-fb927f04e0e6","Type":"ContainerDied","Data":"bd6e4d3990d6f559340f5147aa558fe2cc0264d61058b7b354da52d71cd4e969"} Nov 27 10:34:08 crc kubenswrapper[4971]: I1127 10:34:08.911608 4971 scope.go:117] "RemoveContainer" containerID="d5690977889e4e6c9fdea28582e602212cf5e82b6a236a69eed87be7d11fa832" Nov 27 10:34:08 crc kubenswrapper[4971]: I1127 10:34:08.951398 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2677e996-721b-4cff-8b1d-fb927f04e0e6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2677e996-721b-4cff-8b1d-fb927f04e0e6" (UID: "2677e996-721b-4cff-8b1d-fb927f04e0e6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 10:34:08 crc kubenswrapper[4971]: I1127 10:34:08.951436 4971 scope.go:117] "RemoveContainer" containerID="3e0b531794aa8b2befb75c357bb1e969cbbcda544fcdd74ccfee9d6c864914a9" Nov 27 10:34:08 crc kubenswrapper[4971]: I1127 10:34:08.955168 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2677e996-721b-4cff-8b1d-fb927f04e0e6-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 10:34:08 crc kubenswrapper[4971]: I1127 10:34:08.955196 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2677e996-721b-4cff-8b1d-fb927f04e0e6-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 10:34:08 crc kubenswrapper[4971]: I1127 10:34:08.955207 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-45c5z\" (UniqueName: \"kubernetes.io/projected/2677e996-721b-4cff-8b1d-fb927f04e0e6-kube-api-access-45c5z\") on node \"crc\" DevicePath \"\"" Nov 27 10:34:08 crc kubenswrapper[4971]: I1127 10:34:08.975123 4971 scope.go:117] "RemoveContainer" containerID="44b5c54883d9a38de4c097d2983e3bfb9550c0e84162a5078a5b68a6d74a5750" Nov 27 10:34:09 crc kubenswrapper[4971]: I1127 10:34:09.028818 4971 scope.go:117] "RemoveContainer" containerID="d5690977889e4e6c9fdea28582e602212cf5e82b6a236a69eed87be7d11fa832" Nov 27 10:34:09 crc kubenswrapper[4971]: E1127 10:34:09.029615 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d5690977889e4e6c9fdea28582e602212cf5e82b6a236a69eed87be7d11fa832\": container with ID starting with d5690977889e4e6c9fdea28582e602212cf5e82b6a236a69eed87be7d11fa832 not found: ID does not exist" containerID="d5690977889e4e6c9fdea28582e602212cf5e82b6a236a69eed87be7d11fa832" Nov 27 10:34:09 crc kubenswrapper[4971]: I1127 10:34:09.029677 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5690977889e4e6c9fdea28582e602212cf5e82b6a236a69eed87be7d11fa832"} err="failed to get container status \"d5690977889e4e6c9fdea28582e602212cf5e82b6a236a69eed87be7d11fa832\": rpc error: code = NotFound desc = could not find container \"d5690977889e4e6c9fdea28582e602212cf5e82b6a236a69eed87be7d11fa832\": container with ID starting with d5690977889e4e6c9fdea28582e602212cf5e82b6a236a69eed87be7d11fa832 not found: ID does not exist" Nov 27 10:34:09 crc kubenswrapper[4971]: I1127 10:34:09.029716 4971 scope.go:117] "RemoveContainer" containerID="3e0b531794aa8b2befb75c357bb1e969cbbcda544fcdd74ccfee9d6c864914a9" Nov 27 10:34:09 crc kubenswrapper[4971]: E1127 10:34:09.030151 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e0b531794aa8b2befb75c357bb1e969cbbcda544fcdd74ccfee9d6c864914a9\": container with ID starting with 3e0b531794aa8b2befb75c357bb1e969cbbcda544fcdd74ccfee9d6c864914a9 not found: ID does not exist" containerID="3e0b531794aa8b2befb75c357bb1e969cbbcda544fcdd74ccfee9d6c864914a9" Nov 27 10:34:09 crc kubenswrapper[4971]: I1127 10:34:09.030208 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e0b531794aa8b2befb75c357bb1e969cbbcda544fcdd74ccfee9d6c864914a9"} err="failed to get container status \"3e0b531794aa8b2befb75c357bb1e969cbbcda544fcdd74ccfee9d6c864914a9\": rpc error: code = NotFound desc = could not find container \"3e0b531794aa8b2befb75c357bb1e969cbbcda544fcdd74ccfee9d6c864914a9\": container with ID starting with 3e0b531794aa8b2befb75c357bb1e969cbbcda544fcdd74ccfee9d6c864914a9 not found: ID does not exist" Nov 27 10:34:09 crc kubenswrapper[4971]: I1127 10:34:09.030250 4971 scope.go:117] "RemoveContainer" containerID="44b5c54883d9a38de4c097d2983e3bfb9550c0e84162a5078a5b68a6d74a5750" Nov 27 10:34:09 crc kubenswrapper[4971]: E1127 10:34:09.030656 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"44b5c54883d9a38de4c097d2983e3bfb9550c0e84162a5078a5b68a6d74a5750\": container with ID starting with 44b5c54883d9a38de4c097d2983e3bfb9550c0e84162a5078a5b68a6d74a5750 not found: ID does not exist" containerID="44b5c54883d9a38de4c097d2983e3bfb9550c0e84162a5078a5b68a6d74a5750" Nov 27 10:34:09 crc kubenswrapper[4971]: I1127 10:34:09.030694 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44b5c54883d9a38de4c097d2983e3bfb9550c0e84162a5078a5b68a6d74a5750"} err="failed to get container status \"44b5c54883d9a38de4c097d2983e3bfb9550c0e84162a5078a5b68a6d74a5750\": rpc error: code = NotFound desc = could not find container \"44b5c54883d9a38de4c097d2983e3bfb9550c0e84162a5078a5b68a6d74a5750\": container with ID starting with 44b5c54883d9a38de4c097d2983e3bfb9550c0e84162a5078a5b68a6d74a5750 not found: ID does not exist" Nov 27 10:34:09 crc kubenswrapper[4971]: I1127 10:34:09.267850 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-59d5b" Nov 27 10:34:09 crc kubenswrapper[4971]: I1127 10:34:09.267907 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-59d5b" Nov 27 10:34:09 crc kubenswrapper[4971]: I1127 10:34:09.301386 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8l8gh"] Nov 27 10:34:09 crc kubenswrapper[4971]: I1127 10:34:09.314851 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-8l8gh"] Nov 27 10:34:09 crc kubenswrapper[4971]: I1127 10:34:09.354642 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-59d5b" Nov 27 10:34:09 crc kubenswrapper[4971]: I1127 10:34:09.571466 4971 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-899ph" Nov 27 10:34:09 crc kubenswrapper[4971]: I1127 10:34:09.625596 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-899ph" Nov 27 10:34:09 crc kubenswrapper[4971]: I1127 10:34:09.993312 4971 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-59d5b" Nov 27 10:34:10 crc kubenswrapper[4971]: I1127 10:34:10.570277 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2677e996-721b-4cff-8b1d-fb927f04e0e6" path="/var/lib/kubelet/pods/2677e996-721b-4cff-8b1d-fb927f04e0e6/volumes" Nov 27 10:34:11 crc kubenswrapper[4971]: I1127 10:34:11.618441 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-59d5b"] Nov 27 10:34:11 crc kubenswrapper[4971]: I1127 10:34:11.945743 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-59d5b" podUID="4fbde700-439b-431c-be64-d1d75af4b74a" containerName="registry-server" containerID="cri-o://209c71c63607ed4b2ea751f0195a3f6a109d25fba6b2964d4e6ccdfd8b57aba5" gracePeriod=2 Nov 27 10:34:12 crc kubenswrapper[4971]: I1127 10:34:12.494723 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-59d5b" Nov 27 10:34:12 crc kubenswrapper[4971]: I1127 10:34:12.558063 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4fbde700-439b-431c-be64-d1d75af4b74a-utilities\") pod \"4fbde700-439b-431c-be64-d1d75af4b74a\" (UID: \"4fbde700-439b-431c-be64-d1d75af4b74a\") " Nov 27 10:34:12 crc kubenswrapper[4971]: I1127 10:34:12.558395 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qbkpn\" (UniqueName: \"kubernetes.io/projected/4fbde700-439b-431c-be64-d1d75af4b74a-kube-api-access-qbkpn\") pod \"4fbde700-439b-431c-be64-d1d75af4b74a\" (UID: \"4fbde700-439b-431c-be64-d1d75af4b74a\") " Nov 27 10:34:12 crc kubenswrapper[4971]: I1127 10:34:12.558593 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4fbde700-439b-431c-be64-d1d75af4b74a-catalog-content\") pod \"4fbde700-439b-431c-be64-d1d75af4b74a\" (UID: \"4fbde700-439b-431c-be64-d1d75af4b74a\") " Nov 27 10:34:12 crc kubenswrapper[4971]: I1127 10:34:12.560728 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4fbde700-439b-431c-be64-d1d75af4b74a-utilities" (OuterVolumeSpecName: "utilities") pod "4fbde700-439b-431c-be64-d1d75af4b74a" (UID: "4fbde700-439b-431c-be64-d1d75af4b74a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 10:34:12 crc kubenswrapper[4971]: I1127 10:34:12.570628 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4fbde700-439b-431c-be64-d1d75af4b74a-kube-api-access-qbkpn" (OuterVolumeSpecName: "kube-api-access-qbkpn") pod "4fbde700-439b-431c-be64-d1d75af4b74a" (UID: "4fbde700-439b-431c-be64-d1d75af4b74a"). InnerVolumeSpecName "kube-api-access-qbkpn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 10:34:12 crc kubenswrapper[4971]: I1127 10:34:12.663446 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4fbde700-439b-431c-be64-d1d75af4b74a-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 10:34:12 crc kubenswrapper[4971]: I1127 10:34:12.663486 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qbkpn\" (UniqueName: \"kubernetes.io/projected/4fbde700-439b-431c-be64-d1d75af4b74a-kube-api-access-qbkpn\") on node \"crc\" DevicePath \"\"" Nov 27 10:34:12 crc kubenswrapper[4971]: I1127 10:34:12.663650 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4fbde700-439b-431c-be64-d1d75af4b74a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4fbde700-439b-431c-be64-d1d75af4b74a" (UID: "4fbde700-439b-431c-be64-d1d75af4b74a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 10:34:12 crc kubenswrapper[4971]: I1127 10:34:12.765696 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4fbde700-439b-431c-be64-d1d75af4b74a-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 10:34:12 crc kubenswrapper[4971]: I1127 10:34:12.982979 4971 generic.go:334] "Generic (PLEG): container finished" podID="4fbde700-439b-431c-be64-d1d75af4b74a" containerID="209c71c63607ed4b2ea751f0195a3f6a109d25fba6b2964d4e6ccdfd8b57aba5" exitCode=0 Nov 27 10:34:12 crc kubenswrapper[4971]: I1127 10:34:12.983039 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-59d5b" event={"ID":"4fbde700-439b-431c-be64-d1d75af4b74a","Type":"ContainerDied","Data":"209c71c63607ed4b2ea751f0195a3f6a109d25fba6b2964d4e6ccdfd8b57aba5"} Nov 27 10:34:12 crc kubenswrapper[4971]: I1127 10:34:12.983080 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-59d5b" Nov 27 10:34:12 crc kubenswrapper[4971]: I1127 10:34:12.983107 4971 scope.go:117] "RemoveContainer" containerID="209c71c63607ed4b2ea751f0195a3f6a109d25fba6b2964d4e6ccdfd8b57aba5" Nov 27 10:34:12 crc kubenswrapper[4971]: I1127 10:34:12.983081 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-59d5b" event={"ID":"4fbde700-439b-431c-be64-d1d75af4b74a","Type":"ContainerDied","Data":"07c922ef5ab2091f8100b6f6307ee5285be86d960472676a89812b34987af89c"} Nov 27 10:34:13 crc kubenswrapper[4971]: I1127 10:34:13.015774 4971 scope.go:117] "RemoveContainer" containerID="c1fa32aedd806f9fc5bbc72fc43a6285ff0e76a7d7ab73ad2f8366a3431c271b" Nov 27 10:34:13 crc kubenswrapper[4971]: I1127 10:34:13.045609 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-59d5b"] Nov 27 10:34:13 crc kubenswrapper[4971]: I1127 10:34:13.053604 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-59d5b"] Nov 27 10:34:13 crc kubenswrapper[4971]: I1127 10:34:13.058946 4971 scope.go:117] "RemoveContainer" containerID="0e47ff58ed02296535ea54fb6871e754202441ef716867b19ace387f80f07088" Nov 27 10:34:13 crc kubenswrapper[4971]: I1127 10:34:13.101277 4971 scope.go:117] "RemoveContainer" containerID="209c71c63607ed4b2ea751f0195a3f6a109d25fba6b2964d4e6ccdfd8b57aba5" Nov 27 10:34:13 crc kubenswrapper[4971]: E1127 10:34:13.101849 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"209c71c63607ed4b2ea751f0195a3f6a109d25fba6b2964d4e6ccdfd8b57aba5\": container with ID starting with 209c71c63607ed4b2ea751f0195a3f6a109d25fba6b2964d4e6ccdfd8b57aba5 not found: ID does not exist" containerID="209c71c63607ed4b2ea751f0195a3f6a109d25fba6b2964d4e6ccdfd8b57aba5" Nov 27 10:34:13 crc kubenswrapper[4971]: I1127 10:34:13.101883 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"209c71c63607ed4b2ea751f0195a3f6a109d25fba6b2964d4e6ccdfd8b57aba5"} err="failed to get container status \"209c71c63607ed4b2ea751f0195a3f6a109d25fba6b2964d4e6ccdfd8b57aba5\": rpc error: code = NotFound desc = could not find container \"209c71c63607ed4b2ea751f0195a3f6a109d25fba6b2964d4e6ccdfd8b57aba5\": container with ID starting with 209c71c63607ed4b2ea751f0195a3f6a109d25fba6b2964d4e6ccdfd8b57aba5 not found: ID does not exist" Nov 27 10:34:13 crc kubenswrapper[4971]: I1127 10:34:13.101906 4971 scope.go:117] "RemoveContainer" containerID="c1fa32aedd806f9fc5bbc72fc43a6285ff0e76a7d7ab73ad2f8366a3431c271b" Nov 27 10:34:13 crc kubenswrapper[4971]: E1127 10:34:13.103304 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c1fa32aedd806f9fc5bbc72fc43a6285ff0e76a7d7ab73ad2f8366a3431c271b\": container with ID starting with c1fa32aedd806f9fc5bbc72fc43a6285ff0e76a7d7ab73ad2f8366a3431c271b not found: ID does not exist" containerID="c1fa32aedd806f9fc5bbc72fc43a6285ff0e76a7d7ab73ad2f8366a3431c271b" Nov 27 10:34:13 crc kubenswrapper[4971]: I1127 10:34:13.103330 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1fa32aedd806f9fc5bbc72fc43a6285ff0e76a7d7ab73ad2f8366a3431c271b"} err="failed to get container status \"c1fa32aedd806f9fc5bbc72fc43a6285ff0e76a7d7ab73ad2f8366a3431c271b\": rpc error: code = NotFound desc = could not find container \"c1fa32aedd806f9fc5bbc72fc43a6285ff0e76a7d7ab73ad2f8366a3431c271b\": container with ID starting with c1fa32aedd806f9fc5bbc72fc43a6285ff0e76a7d7ab73ad2f8366a3431c271b not found: ID does not exist" Nov 27 10:34:13 crc kubenswrapper[4971]: I1127 10:34:13.103351 4971 scope.go:117] "RemoveContainer" containerID="0e47ff58ed02296535ea54fb6871e754202441ef716867b19ace387f80f07088" Nov 27 10:34:13 crc kubenswrapper[4971]: E1127 10:34:13.103677 4971 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e47ff58ed02296535ea54fb6871e754202441ef716867b19ace387f80f07088\": container with ID starting with 0e47ff58ed02296535ea54fb6871e754202441ef716867b19ace387f80f07088 not found: ID does not exist" containerID="0e47ff58ed02296535ea54fb6871e754202441ef716867b19ace387f80f07088" Nov 27 10:34:13 crc kubenswrapper[4971]: I1127 10:34:13.103701 4971 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e47ff58ed02296535ea54fb6871e754202441ef716867b19ace387f80f07088"} err="failed to get container status \"0e47ff58ed02296535ea54fb6871e754202441ef716867b19ace387f80f07088\": rpc error: code = NotFound desc = could not find container \"0e47ff58ed02296535ea54fb6871e754202441ef716867b19ace387f80f07088\": container with ID starting with 0e47ff58ed02296535ea54fb6871e754202441ef716867b19ace387f80f07088 not found: ID does not exist" Nov 27 10:34:13 crc kubenswrapper[4971]: I1127 10:34:13.400569 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-899ph"] Nov 27 10:34:13 crc kubenswrapper[4971]: I1127 10:34:13.401784 4971 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-899ph" podUID="0de6d2da-4809-4889-b8f1-5ebc0d77b575" containerName="registry-server" containerID="cri-o://6df25e5720efc4d965e2efcdd4eac9e29a5669a686f6a598c367ec50a10602a1" gracePeriod=2 Nov 27 10:34:13 crc kubenswrapper[4971]: I1127 10:34:13.996484 4971 generic.go:334] "Generic (PLEG): container finished" podID="0de6d2da-4809-4889-b8f1-5ebc0d77b575" containerID="6df25e5720efc4d965e2efcdd4eac9e29a5669a686f6a598c367ec50a10602a1" exitCode=0 Nov 27 10:34:13 crc kubenswrapper[4971]: I1127 10:34:13.996549 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-899ph" event={"ID":"0de6d2da-4809-4889-b8f1-5ebc0d77b575","Type":"ContainerDied","Data":"6df25e5720efc4d965e2efcdd4eac9e29a5669a686f6a598c367ec50a10602a1"} Nov 27 10:34:13 crc kubenswrapper[4971]: I1127 10:34:13.996606 4971 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-899ph" event={"ID":"0de6d2da-4809-4889-b8f1-5ebc0d77b575","Type":"ContainerDied","Data":"57dc4b88d2fed5680067c0895d2b84312c8ca89067ac46a430e30406225bbca8"} Nov 27 10:34:13 crc kubenswrapper[4971]: I1127 10:34:13.996619 4971 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="57dc4b88d2fed5680067c0895d2b84312c8ca89067ac46a430e30406225bbca8" Nov 27 10:34:14 crc kubenswrapper[4971]: I1127 10:34:14.034763 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-899ph" Nov 27 10:34:14 crc kubenswrapper[4971]: I1127 10:34:14.108566 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0de6d2da-4809-4889-b8f1-5ebc0d77b575-utilities\") pod \"0de6d2da-4809-4889-b8f1-5ebc0d77b575\" (UID: \"0de6d2da-4809-4889-b8f1-5ebc0d77b575\") " Nov 27 10:34:14 crc kubenswrapper[4971]: I1127 10:34:14.108680 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0de6d2da-4809-4889-b8f1-5ebc0d77b575-catalog-content\") pod \"0de6d2da-4809-4889-b8f1-5ebc0d77b575\" (UID: \"0de6d2da-4809-4889-b8f1-5ebc0d77b575\") " Nov 27 10:34:14 crc kubenswrapper[4971]: I1127 10:34:14.108814 4971 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fgxcp\" (UniqueName: \"kubernetes.io/projected/0de6d2da-4809-4889-b8f1-5ebc0d77b575-kube-api-access-fgxcp\") pod \"0de6d2da-4809-4889-b8f1-5ebc0d77b575\" (UID: \"0de6d2da-4809-4889-b8f1-5ebc0d77b575\") " Nov 27 10:34:14 crc kubenswrapper[4971]: I1127 10:34:14.109176 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0de6d2da-4809-4889-b8f1-5ebc0d77b575-utilities" (OuterVolumeSpecName: "utilities") pod "0de6d2da-4809-4889-b8f1-5ebc0d77b575" (UID: "0de6d2da-4809-4889-b8f1-5ebc0d77b575"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 10:34:14 crc kubenswrapper[4971]: I1127 10:34:14.109652 4971 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0de6d2da-4809-4889-b8f1-5ebc0d77b575-utilities\") on node \"crc\" DevicePath \"\"" Nov 27 10:34:14 crc kubenswrapper[4971]: I1127 10:34:14.118759 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0de6d2da-4809-4889-b8f1-5ebc0d77b575-kube-api-access-fgxcp" (OuterVolumeSpecName: "kube-api-access-fgxcp") pod "0de6d2da-4809-4889-b8f1-5ebc0d77b575" (UID: "0de6d2da-4809-4889-b8f1-5ebc0d77b575"). InnerVolumeSpecName "kube-api-access-fgxcp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 27 10:34:14 crc kubenswrapper[4971]: I1127 10:34:14.163180 4971 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0de6d2da-4809-4889-b8f1-5ebc0d77b575-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0de6d2da-4809-4889-b8f1-5ebc0d77b575" (UID: "0de6d2da-4809-4889-b8f1-5ebc0d77b575"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 27 10:34:14 crc kubenswrapper[4971]: I1127 10:34:14.210686 4971 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fgxcp\" (UniqueName: \"kubernetes.io/projected/0de6d2da-4809-4889-b8f1-5ebc0d77b575-kube-api-access-fgxcp\") on node \"crc\" DevicePath \"\"" Nov 27 10:34:14 crc kubenswrapper[4971]: I1127 10:34:14.210724 4971 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0de6d2da-4809-4889-b8f1-5ebc0d77b575-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 27 10:34:14 crc kubenswrapper[4971]: I1127 10:34:14.551286 4971 scope.go:117] "RemoveContainer" containerID="69e0880639ede65ab1f37011c12886df1228f7179ca05fbe2e45c9270018313c" Nov 27 10:34:14 crc kubenswrapper[4971]: E1127 10:34:14.552085 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:34:14 crc kubenswrapper[4971]: I1127 10:34:14.576096 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4fbde700-439b-431c-be64-d1d75af4b74a" path="/var/lib/kubelet/pods/4fbde700-439b-431c-be64-d1d75af4b74a/volumes" Nov 27 10:34:15 crc kubenswrapper[4971]: I1127 10:34:15.013946 4971 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-899ph" Nov 27 10:34:15 crc kubenswrapper[4971]: I1127 10:34:15.063459 4971 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-899ph"] Nov 27 10:34:15 crc kubenswrapper[4971]: I1127 10:34:15.095950 4971 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-899ph"] Nov 27 10:34:16 crc kubenswrapper[4971]: I1127 10:34:16.567797 4971 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0de6d2da-4809-4889-b8f1-5ebc0d77b575" path="/var/lib/kubelet/pods/0de6d2da-4809-4889-b8f1-5ebc0d77b575/volumes" Nov 27 10:34:25 crc kubenswrapper[4971]: I1127 10:34:25.552279 4971 scope.go:117] "RemoveContainer" containerID="69e0880639ede65ab1f37011c12886df1228f7179ca05fbe2e45c9270018313c" Nov 27 10:34:25 crc kubenswrapper[4971]: E1127 10:34:25.553959 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:34:39 crc kubenswrapper[4971]: I1127 10:34:39.550355 4971 scope.go:117] "RemoveContainer" containerID="69e0880639ede65ab1f37011c12886df1228f7179ca05fbe2e45c9270018313c" Nov 27 10:34:39 crc kubenswrapper[4971]: E1127 10:34:39.552020 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:34:54 crc kubenswrapper[4971]: I1127 10:34:54.552011 4971 scope.go:117] "RemoveContainer" containerID="69e0880639ede65ab1f37011c12886df1228f7179ca05fbe2e45c9270018313c" Nov 27 10:34:54 crc kubenswrapper[4971]: E1127 10:34:54.553136 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:35:06 crc kubenswrapper[4971]: I1127 10:35:06.552194 4971 scope.go:117] "RemoveContainer" containerID="69e0880639ede65ab1f37011c12886df1228f7179ca05fbe2e45c9270018313c" Nov 27 10:35:06 crc kubenswrapper[4971]: E1127 10:35:06.553005 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:35:17 crc kubenswrapper[4971]: I1127 10:35:17.551529 4971 scope.go:117] "RemoveContainer" containerID="69e0880639ede65ab1f37011c12886df1228f7179ca05fbe2e45c9270018313c" Nov 27 10:35:17 crc kubenswrapper[4971]: E1127 10:35:17.552815 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:35:28 crc kubenswrapper[4971]: I1127 10:35:28.552380 4971 scope.go:117] "RemoveContainer" containerID="69e0880639ede65ab1f37011c12886df1228f7179ca05fbe2e45c9270018313c" Nov 27 10:35:28 crc kubenswrapper[4971]: E1127 10:35:28.554026 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:35:39 crc kubenswrapper[4971]: I1127 10:35:39.550981 4971 scope.go:117] "RemoveContainer" containerID="69e0880639ede65ab1f37011c12886df1228f7179ca05fbe2e45c9270018313c" Nov 27 10:35:39 crc kubenswrapper[4971]: E1127 10:35:39.552505 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" Nov 27 10:35:53 crc kubenswrapper[4971]: I1127 10:35:53.551115 4971 scope.go:117] "RemoveContainer" containerID="69e0880639ede65ab1f37011c12886df1228f7179ca05fbe2e45c9270018313c" Nov 27 10:35:53 crc kubenswrapper[4971]: E1127 10:35:53.552266 4971 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdz6h_openshift-machine-config-operator(0ab8c2ef-d82b-4396-919d-8550cc2e24d7)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdz6h" podUID="0ab8c2ef-d82b-4396-919d-8550cc2e24d7" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515112024630024437 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015112024630017354 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015111771524016511 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015111771524015461 5ustar corecore